aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-02-15 16:53:17 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-02-15 16:53:17 +0000
commit9b18d9c656aed3af44c0f946665e1d1fa44657fb (patch)
treea465938ae07904733beafdaa830866dbe8cbc7ac
parent3465004edbe6af3357185093fbf16830afe5c1a9 (diff)
parent7ba826e50dff1878e6ecc6b9af44097c040c8968 (diff)
downloadabseil-cpp-android-games-sdk-release.tar.gz
Snap for 8185358 from 7ba826e50dff1878e6ecc6b9af44097c040c8968 to android-games-sdk-releaseandroid-games-sdk-release
Change-Id: I2efaa50ea1cd0ee24f6e7315c880f804715b2986
-rw-r--r--Android.bp198
-rw-r--r--CMake/AbseilDll.cmake19
-rw-r--r--CMake/AbseilHelpers.cmake5
-rw-r--r--CMakeLists.txt19
-rw-r--r--METADATA18
-rw-r--r--MODULE_LICENSE_APACHE20
l---------NOTICE1
-rw-r--r--OWNERS2
-rw-r--r--absl/algorithm/container.h180
-rw-r--r--absl/base/attributes.h67
-rw-r--r--absl/base/config.h58
-rw-r--r--absl/base/dynamic_annotations.h2
-rw-r--r--absl/base/internal/sysinfo.cc67
-rw-r--r--absl/base/internal/thread_identity.cc8
-rw-r--r--absl/base/internal/thread_identity.h17
-rw-r--r--absl/base/options.h4
-rw-r--r--absl/container/btree_test.cc41
-rw-r--r--absl/container/flat_hash_map_test.cc26
-rw-r--r--absl/container/internal/btree.h4
-rw-r--r--absl/container/internal/hash_generator_testing.h21
-rw-r--r--absl/container/internal/raw_hash_set.h4
-rw-r--r--absl/container/internal/unordered_map_constructor_test.h38
-rw-r--r--absl/copts/GENERATED_AbseilCopts.cmake1
-rw-r--r--absl/copts/GENERATED_copts.bzl1
-rw-r--r--absl/copts/copts.py1
-rwxr-xr-xabsl/copts/generate_copts.py2
-rw-r--r--absl/debugging/failure_signal_handler.cc1
-rw-r--r--absl/debugging/failure_signal_handler.h2
-rw-r--r--absl/debugging/symbolize_elf.inc10
-rw-r--r--absl/debugging/symbolize_test.cc43
-rw-r--r--absl/flags/flag.h3
-rw-r--r--absl/hash/internal/wyhash.h2
-rw-r--r--absl/memory/memory.h2
-rw-r--r--absl/meta/type_traits.h23
-rw-r--r--absl/meta/type_traits_test.cc28
-rw-r--r--absl/numeric/int128.h10
-rw-r--r--absl/random/discrete_distribution_test.cc7
-rw-r--r--absl/random/internal/pool_urbg.cc7
-rw-r--r--absl/status/internal/status_internal.h4
-rw-r--r--absl/status/status.cc22
-rw-r--r--absl/status/status.h4
-rw-r--r--absl/status/status_test.cc8
-rw-r--r--absl/strings/BUILD.bazel295
-rw-r--r--absl/strings/CMakeLists.txt340
-rw-r--r--absl/strings/charconv.cc6
-rw-r--r--absl/strings/cord.cc563
-rw-r--r--absl/strings/cord.h186
-rw-r--r--absl/strings/cord_ring_reader_test.cc13
-rw-r--r--absl/strings/cord_ring_test.cc301
-rw-r--r--absl/strings/cord_test.cc73
-rw-r--r--absl/strings/cord_test_helpers.h62
-rw-r--r--absl/strings/cordz_test.cc421
-rw-r--r--absl/strings/cordz_test_helpers.h151
-rw-r--r--absl/strings/internal/charconv_parse.cc2
-rw-r--r--absl/strings/internal/cord_internal.h26
-rw-r--r--absl/strings/internal/cord_rep_ring.cc7
-rw-r--r--absl/strings/internal/cord_rep_ring.h31
-rw-r--r--absl/strings/internal/cord_rep_ring_reader.h4
-rw-r--r--absl/strings/internal/cordz_functions.cc104
-rw-r--r--absl/strings/internal/cordz_functions.h85
-rw-r--r--absl/strings/internal/cordz_functions_test.cc131
-rw-r--r--absl/strings/internal/cordz_handle.cc139
-rw-r--r--absl/strings/internal/cordz_handle.h131
-rw-r--r--absl/strings/internal/cordz_handle_test.cc265
-rw-r--r--absl/strings/internal/cordz_info.cc426
-rw-r--r--absl/strings/internal/cordz_info.h270
-rw-r--r--absl/strings/internal/cordz_info_statistics_test.cc508
-rw-r--r--absl/strings/internal/cordz_info_test.cc311
-rw-r--r--absl/strings/internal/cordz_sample_token.cc64
-rw-r--r--absl/strings/internal/cordz_sample_token.h97
-rw-r--r--absl/strings/internal/cordz_sample_token_test.cc208
-rw-r--r--absl/strings/internal/cordz_statistics.h84
-rw-r--r--absl/strings/internal/cordz_update_scope.h71
-rw-r--r--absl/strings/internal/cordz_update_scope_test.cc49
-rw-r--r--absl/strings/internal/cordz_update_tracker.h119
-rw-r--r--absl/strings/internal/cordz_update_tracker_test.cc143
-rw-r--r--absl/strings/internal/str_format/arg.h8
-rw-r--r--absl/strings/internal/str_format/convert_test.cc3
-rw-r--r--absl/strings/internal/str_split_internal.h68
-rw-r--r--absl/strings/str_split_test.cc22
-rw-r--r--absl/strings/string_view.h15
-rw-r--r--absl/synchronization/BUILD.bazel15
-rw-r--r--absl/synchronization/blocking_counter_benchmark.cc83
-rw-r--r--absl/synchronization/internal/per_thread_sem_test.cc2
-rw-r--r--absl/synchronization/internal/waiter.cc2
-rw-r--r--absl/time/civil_time.cc4
-rw-r--r--absl/time/duration_test.cc4
-rw-r--r--absl/time/internal/cctz/src/time_zone_fixed.cc2
-rw-r--r--absl/time/internal/cctz/src/time_zone_lookup_test.cc12
-rw-r--r--absl/time/time.h2
-rw-r--r--absl/types/span.h4
-rwxr-xr-xci/macos_xcode_bazel.sh2
-rwxr-xr-xcreate_lts.py15
93 files changed, 5912 insertions, 1017 deletions
diff --git a/Android.bp b/Android.bp
deleted file mode 100644
index 05de6919..00000000
--- a/Android.bp
+++ /dev/null
@@ -1,198 +0,0 @@
-package {
- default_applicable_licenses: ["external_abseil-cpp_license"],
-}
-
-// Added automatically by a large-scale-change that took the approach of
-// 'apply every license found to every target'. While this makes sure we respect
-// every license restriction, it may not be entirely correct.
-//
-// e.g. GPL in an MIT project might only apply to the contrib/ directory.
-//
-// Please consider splitting the single license below into multiple licenses,
-// taking care not to lose any license_kind information, and overriding the
-// default license using the 'licenses: [...]' property on targets as needed.
-//
-// For unused files, consider creating a 'fileGroup' with "//visibility:private"
-// to attach the license to, and including a comment whether the files may be
-// used in the current project.
-// See: http://go/android-license-faq
-license {
- name: "external_abseil-cpp_license",
- visibility: [":__subpackages__"],
- license_kinds: [
- "SPDX-license-identifier-Apache-2.0",
- "legacy_unencumbered",
- ],
- license_text: [
- "LICENSE",
- ],
-}
-
-cc_library_headers {
- name: "libabsl_headers",
- device_supported: false,
- host_supported: true,
- export_include_dirs: ["."],
-}
-
-cc_library_host_static {
- name: "libabsl_base",
- srcs: [
- "absl/base/internal/cycleclock.cc",
- "absl/base/internal/exponential_biased.cc",
- "absl/base/internal/low_level_alloc.cc",
- "absl/base/internal/periodic_sampler.cc",
- "absl/base/internal/raw_logging.cc",
- "absl/base/internal/spinlock.cc",
- "absl/base/internal/spinlock_wait.cc",
- "absl/base/internal/strerror.cc",
- "absl/base/internal/sysinfo.cc",
- "absl/base/internal/thread_identity.cc",
- "absl/base/internal/throw_delegate.cc",
- "absl/base/internal/unscaledcycleclock.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_container",
- srcs: [
- "absl/container/internal/test_instance_tracker.cc",
- "absl/container/internal/hash_generator_testing.cc",
- "absl/container/internal/hashtablez_sampler.cc",
- "absl/container/internal/hashtablez_sampler_force_weak_definition.cc",
- "absl/container/internal/raw_hash_set.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_debugging",
- srcs: [
- "absl/debugging/failure_signal_handler.cc",
- "absl/debugging/internal/address_is_readable.cc",
- "absl/debugging/internal/demangle.cc",
- "absl/debugging/internal/elf_mem_image.cc",
- "absl/debugging/internal/examine_stack.cc",
- "absl/debugging/internal/stack_consumption.cc",
- "absl/debugging/internal/vdso_support.cc",
- "absl/debugging/leak_check.cc",
- "absl/debugging/stacktrace.cc",
- "absl/debugging/symbolize.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_flags",
- srcs: [
- "absl/flags/commandlineflag.cc",
- "absl/flags/usage_config.cc",
- "absl/flags/marshalling.cc",
- "absl/flags/usage.cc",
- "absl/flags/flag.cc",
- "absl/flags/parse.cc",
- "absl/flags/internal/commandlineflag.cc",
- "absl/flags/internal/flag.cc",
- "absl/flags/internal/private_handle_accessor.cc",
- "absl/flags/internal/program_name.cc",
- "absl/flags/internal/usage.cc",
- "absl/flags/flag_test_defs.cc",
- "absl/flags/reflection.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_hash",
- srcs: [
- "absl/hash/internal/city.cc",
- "absl/hash/internal/hash.cc",
- "absl/hash/internal/wyhash.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_numeric",
- srcs: ["absl/numeric/int128.cc"],
-}
-
-cc_library_host_static {
- name: "libabsl_status",
- srcs: [
- "absl/status/status.cc",
- "absl/status/status_payload_printer.cc",
- "absl/status/statusor.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_strings",
- srcs: [
- "absl/strings/ascii.cc",
- "absl/strings/charconv.cc",
- "absl/strings/cord.cc",
- "absl/strings/escaping.cc",
- "absl/strings/internal/charconv_bigint.cc",
- "absl/strings/internal/charconv_parse.cc",
- "absl/strings/internal/cord_internal.cc",
- "absl/strings/internal/cord_rep_ring.cc",
- "absl/strings/internal/escaping.cc",
- "absl/strings/internal/memutil.cc",
- "absl/strings/internal/ostringstream.cc",
- "absl/strings/internal/str_format/arg.cc",
- "absl/strings/internal/str_format/bind.cc",
- "absl/strings/internal/str_format/extension.cc",
- "absl/strings/internal/str_format/float_conversion.cc",
- "absl/strings/internal/str_format/output.cc",
- "absl/strings/internal/str_format/parser.cc",
- "absl/strings/internal/utf8.cc",
- "absl/strings/match.cc",
- "absl/strings/numbers.cc",
- "absl/strings/str_cat.cc",
- "absl/strings/str_replace.cc",
- "absl/strings/str_split.cc",
- "absl/strings/string_view.cc",
- "absl/strings/substitute.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_synchronization",
- srcs: [
- "absl/synchronization/barrier.cc",
- "absl/synchronization/blocking_counter.cc",
- "absl/synchronization/internal/create_thread_identity.cc",
- "absl/synchronization/internal/per_thread_sem.cc",
- "absl/synchronization/internal/waiter.cc",
- "absl/synchronization/internal/graphcycles.cc",
- "absl/synchronization/notification.cc",
- "absl/synchronization/mutex.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_time",
- srcs: [
- "absl/time/civil_time.cc",
- "absl/time/clock.cc",
- "absl/time/duration.cc",
- "absl/time/format.cc",
- "absl/time/internal/cctz/src/civil_time_detail.cc",
- "absl/time/internal/cctz/src/time_zone_fixed.cc",
- "absl/time/internal/cctz/src/time_zone_format.cc",
- "absl/time/internal/cctz/src/time_zone_if.cc",
- "absl/time/internal/cctz/src/time_zone_impl.cc",
- "absl/time/internal/cctz/src/time_zone_info.cc",
- "absl/time/internal/cctz/src/time_zone_libc.cc",
- "absl/time/internal/cctz/src/time_zone_lookup.cc",
- "absl/time/internal/cctz/src/time_zone_posix.cc",
- "absl/time/internal/cctz/src/zone_info_source.cc",
- "absl/time/time.cc",
- ],
-}
-
-cc_library_host_static {
- name: "libabsl_types",
- srcs: [
- "absl/types/bad_any_cast.cc",
- "absl/types/bad_optional_access.cc",
- "absl/types/bad_variant_access.cc",
- ],
-}
diff --git a/CMake/AbseilDll.cmake b/CMake/AbseilDll.cmake
index 253c73ff..8ee4120f 100644
--- a/CMake/AbseilDll.cmake
+++ b/CMake/AbseilDll.cmake
@@ -197,16 +197,27 @@ set(ABSL_INTERNAL_DLL_FILES
"strings/cord.h"
"strings/escaping.cc"
"strings/escaping.h"
+ "strings/internal/charconv_bigint.cc"
+ "strings/internal/charconv_bigint.h"
+ "strings/internal/charconv_parse.cc"
+ "strings/internal/charconv_parse.h"
"strings/internal/cord_internal.cc"
"strings/internal/cord_internal.h"
"strings/internal/cord_rep_flat.h"
"strings/internal/cord_rep_ring.cc"
"strings/internal/cord_rep_ring.h"
"strings/internal/cord_rep_ring_reader.h"
- "strings/internal/charconv_bigint.cc"
- "strings/internal/charconv_bigint.h"
- "strings/internal/charconv_parse.cc"
- "strings/internal/charconv_parse.h"
+ "strings/internal/cordz_functions.cc"
+ "strings/internal/cordz_functions.h"
+ "strings/internal/cordz_handle.cc"
+ "strings/internal/cordz_handle.h"
+ "strings/internal/cordz_info.cc"
+ "strings/internal/cordz_info.h"
+ "strings/internal/cordz_sample_token.cc"
+ "strings/internal/cordz_sample_token.h"
+ "strings/internal/cordz_statistics.h"
+ "strings/internal/cordz_update_scope.h"
+ "strings/internal/cordz_update_tracker.h"
"strings/internal/stl_type_traits.h"
"strings/internal/string_constant.h"
"strings/match.cc"
diff --git a/CMake/AbseilHelpers.cmake b/CMake/AbseilHelpers.cmake
index 54fb8df3..1a80b5b4 100644
--- a/CMake/AbseilHelpers.cmake
+++ b/CMake/AbseilHelpers.cmake
@@ -141,7 +141,8 @@ function(absl_cc_library)
endif()
# Generate a pkg-config file for every library:
- if(_build_type STREQUAL "static" OR _build_type STREQUAL "shared")
+ if((_build_type STREQUAL "static" OR _build_type STREQUAL "shared")
+ AND ABSL_ENABLE_INSTALL)
if(NOT ABSL_CC_LIB_TESTONLY)
if(absl_VERSION)
set(PC_VERSION "${absl_VERSION}")
@@ -263,7 +264,7 @@ Cflags: -I\${includedir}${PC_CFLAGS}\n")
if(ABSL_ENABLE_INSTALL)
set_target_properties(${_NAME} PROPERTIES
OUTPUT_NAME "absl_${_NAME}"
- SOVERSION "2103.0.1"
+ SOVERSION 0
)
endif()
else()
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 3a73f707..d0c6e608 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -41,11 +41,16 @@ if (POLICY CMP0077)
cmake_policy(SET CMP0077 NEW)
endif (POLICY CMP0077)
+# Allow the user to specify the MSVC runtime
+if (POLICY CMP0091)
+ cmake_policy(SET CMP0091 NEW)
+endif (POLICY CMP0091)
+
# Set BUILD_TESTING to OFF by default.
# This must come before the project() and include(CTest) lines.
OPTION(BUILD_TESTING "Build tests" OFF)
-project(absl LANGUAGES CXX VERSION 20210324)
+project(absl LANGUAGES CXX)
include(CTest)
# Output directory is correct by default for most build setups. However, when
@@ -144,7 +149,17 @@ endif()
add_subdirectory(absl)
if(ABSL_ENABLE_INSTALL)
-
+ # absl:lts-remove-begin(system installation is supported for LTS releases)
+ # We don't support system-wide installation
+ list(APPEND SYSTEM_INSTALL_DIRS "/usr/local" "/usr" "/opt/" "/opt/local" "c:/Program Files/${PROJECT_NAME}")
+ if(NOT DEFINED CMAKE_INSTALL_PREFIX OR CMAKE_INSTALL_PREFIX IN_LIST SYSTEM_INSTALL_DIRS)
+ message(WARNING "\
+ The default and system-level install directories are unsupported except in LTS \
+ releases of Abseil. Please set CMAKE_INSTALL_PREFIX to install Abseil in your \
+ source or build tree directly.\
+ ")
+ endif()
+ # absl:lts-remove-end
# install as a subdirectory only
install(EXPORT ${PROJECT_NAME}Targets
diff --git a/METADATA b/METADATA
deleted file mode 100644
index 23f8fb26..00000000
--- a/METADATA
+++ /dev/null
@@ -1,18 +0,0 @@
-name: "extern/abseil-cpp"
-description:
- "An open-source collection of C++ code designed to augment the C++ standard "
- "library"
-
-third_party {
- url {
- type: HOMEPAGE
- value: "https://abseil.io"
- }
- url {
- type: GIT
- value: "https://github.com/abseil/abseil-cpp"
- }
- version: "20210324.2"
- last_upgrade_date { year: 2021 month: 07 day: 23 }
-}
-
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29b..00000000
--- a/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/NOTICE b/NOTICE
deleted file mode 120000
index 7a694c96..00000000
--- a/NOTICE
+++ /dev/null
@@ -1 +0,0 @@
-LICENSE \ No newline at end of file
diff --git a/OWNERS b/OWNERS
deleted file mode 100644
index cd410bc6..00000000
--- a/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-dwillemsen@google.com
-enh@google.com \ No newline at end of file
diff --git a/absl/algorithm/container.h b/absl/algorithm/container.h
index 6398438f..1652e7b0 100644
--- a/absl/algorithm/container.h
+++ b/absl/algorithm/container.h
@@ -905,11 +905,11 @@ void c_sort(C& c) {
// Overload of c_sort() for performing a `comp` comparison other than the
// default `operator<`.
-template <typename C, typename Compare>
-void c_sort(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+void c_sort(C& c, LessThan&& comp) {
std::sort(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_stable_sort()
@@ -925,11 +925,11 @@ void c_stable_sort(C& c) {
// Overload of c_stable_sort() for performing a `comp` comparison other than the
// default `operator<`.
-template <typename C, typename Compare>
-void c_stable_sort(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+void c_stable_sort(C& c, LessThan&& comp) {
std::stable_sort(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_sorted()
@@ -944,11 +944,11 @@ bool c_is_sorted(const C& c) {
// c_is_sorted() overload for performing a `comp` comparison other than the
// default `operator<`.
-template <typename C, typename Compare>
-bool c_is_sorted(const C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+bool c_is_sorted(const C& c, LessThan&& comp) {
return std::is_sorted(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_partial_sort()
@@ -966,14 +966,14 @@ void c_partial_sort(
// Overload of c_partial_sort() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename RandomAccessContainer, typename Compare>
+template <typename RandomAccessContainer, typename LessThan>
void c_partial_sort(
RandomAccessContainer& sequence,
container_algorithm_internal::ContainerIter<RandomAccessContainer> middle,
- Compare&& comp) {
+ LessThan&& comp) {
std::partial_sort(container_algorithm_internal::c_begin(sequence), middle,
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_partial_sort_copy()
@@ -994,15 +994,15 @@ c_partial_sort_copy(const C& sequence, RandomAccessContainer& result) {
// Overload of c_partial_sort_copy() for performing a `comp` comparison other
// than the default `operator<`.
-template <typename C, typename RandomAccessContainer, typename Compare>
+template <typename C, typename RandomAccessContainer, typename LessThan>
container_algorithm_internal::ContainerIter<RandomAccessContainer>
c_partial_sort_copy(const C& sequence, RandomAccessContainer& result,
- Compare&& comp) {
+ LessThan&& comp) {
return std::partial_sort_copy(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
container_algorithm_internal::c_begin(result),
container_algorithm_internal::c_end(result),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_sorted_until()
@@ -1018,12 +1018,12 @@ container_algorithm_internal::ContainerIter<C> c_is_sorted_until(C& c) {
// Overload of c_is_sorted_until() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename C, typename Compare>
+template <typename C, typename LessThan>
container_algorithm_internal::ContainerIter<C> c_is_sorted_until(
- C& c, Compare&& comp) {
+ C& c, LessThan&& comp) {
return std::is_sorted_until(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_nth_element()
@@ -1043,14 +1043,14 @@ void c_nth_element(
// Overload of c_nth_element() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename RandomAccessContainer, typename Compare>
+template <typename RandomAccessContainer, typename LessThan>
void c_nth_element(
RandomAccessContainer& sequence,
container_algorithm_internal::ContainerIter<RandomAccessContainer> nth,
- Compare&& comp) {
+ LessThan&& comp) {
std::nth_element(container_algorithm_internal::c_begin(sequence), nth,
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1072,12 +1072,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_lower_bound(
// Overload of c_lower_bound() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
+template <typename Sequence, typename T, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_lower_bound(
- Sequence& sequence, T&& value, Compare&& comp) {
+ Sequence& sequence, T&& value, LessThan&& comp) {
return std::lower_bound(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<T>(value), std::forward<Compare>(comp));
+ std::forward<T>(value), std::forward<LessThan>(comp));
}
// c_upper_bound()
@@ -1095,12 +1095,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_upper_bound(
// Overload of c_upper_bound() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
+template <typename Sequence, typename T, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_upper_bound(
- Sequence& sequence, T&& value, Compare&& comp) {
+ Sequence& sequence, T&& value, LessThan&& comp) {
return std::upper_bound(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<T>(value), std::forward<Compare>(comp));
+ std::forward<T>(value), std::forward<LessThan>(comp));
}
// c_equal_range()
@@ -1118,12 +1118,12 @@ c_equal_range(Sequence& sequence, T&& value) {
// Overload of c_equal_range() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
+template <typename Sequence, typename T, typename LessThan>
container_algorithm_internal::ContainerIterPairType<Sequence, Sequence>
-c_equal_range(Sequence& sequence, T&& value, Compare&& comp) {
+c_equal_range(Sequence& sequence, T&& value, LessThan&& comp) {
return std::equal_range(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<T>(value), std::forward<Compare>(comp));
+ std::forward<T>(value), std::forward<LessThan>(comp));
}
// c_binary_search()
@@ -1140,12 +1140,12 @@ bool c_binary_search(Sequence&& sequence, T&& value) {
// Overload of c_binary_search() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename Sequence, typename T, typename Compare>
-bool c_binary_search(Sequence&& sequence, T&& value, Compare&& comp) {
+template <typename Sequence, typename T, typename LessThan>
+bool c_binary_search(Sequence&& sequence, T&& value, LessThan&& comp) {
return std::binary_search(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
std::forward<T>(value),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1166,14 +1166,14 @@ OutputIterator c_merge(const C1& c1, const C2& c2, OutputIterator result) {
// Overload of c_merge() for performing a `comp` comparison other than
// the default `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare>
+template <typename C1, typename C2, typename OutputIterator, typename LessThan>
OutputIterator c_merge(const C1& c1, const C2& c2, OutputIterator result,
- Compare&& comp) {
+ LessThan&& comp) {
return std::merge(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), result,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_inplace_merge()
@@ -1189,13 +1189,13 @@ void c_inplace_merge(C& c,
// Overload of c_inplace_merge() for performing a merge using a `comp` other
// than `operator<`.
-template <typename C, typename Compare>
+template <typename C, typename LessThan>
void c_inplace_merge(C& c,
container_algorithm_internal::ContainerIter<C> middle,
- Compare&& comp) {
+ LessThan&& comp) {
std::inplace_merge(container_algorithm_internal::c_begin(c), middle,
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_includes()
@@ -1213,13 +1213,13 @@ bool c_includes(const C1& c1, const C2& c2) {
// Overload of c_includes() for performing a merge using a `comp` other than
// `operator<`.
-template <typename C1, typename C2, typename Compare>
-bool c_includes(const C1& c1, const C2& c2, Compare&& comp) {
+template <typename C1, typename C2, typename LessThan>
+bool c_includes(const C1& c1, const C2& c2, LessThan&& comp) {
return std::includes(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_union()
@@ -1243,7 +1243,7 @@ OutputIterator c_set_union(const C1& c1, const C2& c2, OutputIterator output) {
// Overload of c_set_union() for performing a merge using a `comp` other than
// `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1251,12 +1251,12 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
!container_algorithm_internal::IsUnorderedContainer<C2>::value,
void>::type>
OutputIterator c_set_union(const C1& c1, const C2& c2, OutputIterator output,
- Compare&& comp) {
+ LessThan&& comp) {
return std::set_union(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_intersection()
@@ -1280,7 +1280,7 @@ OutputIterator c_set_intersection(const C1& c1, const C2& c2,
// Overload of c_set_intersection() for performing a merge using a `comp` other
// than `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1288,12 +1288,12 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
!container_algorithm_internal::IsUnorderedContainer<C2>::value,
void>::type>
OutputIterator c_set_intersection(const C1& c1, const C2& c2,
- OutputIterator output, Compare&& comp) {
+ OutputIterator output, LessThan&& comp) {
return std::set_intersection(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_difference()
@@ -1318,7 +1318,7 @@ OutputIterator c_set_difference(const C1& c1, const C2& c2,
// Overload of c_set_difference() for performing a merge using a `comp` other
// than `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1326,12 +1326,12 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
!container_algorithm_internal::IsUnorderedContainer<C2>::value,
void>::type>
OutputIterator c_set_difference(const C1& c1, const C2& c2,
- OutputIterator output, Compare&& comp) {
+ OutputIterator output, LessThan&& comp) {
return std::set_difference(container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_set_symmetric_difference()
@@ -1357,7 +1357,7 @@ OutputIterator c_set_symmetric_difference(const C1& c1, const C2& c2,
// Overload of c_set_symmetric_difference() for performing a merge using a
// `comp` other than `operator<`.
-template <typename C1, typename C2, typename OutputIterator, typename Compare,
+template <typename C1, typename C2, typename OutputIterator, typename LessThan,
typename = typename std::enable_if<
!container_algorithm_internal::IsUnorderedContainer<C1>::value,
void>::type,
@@ -1366,13 +1366,13 @@ template <typename C1, typename C2, typename OutputIterator, typename Compare,
void>::type>
OutputIterator c_set_symmetric_difference(const C1& c1, const C2& c2,
OutputIterator output,
- Compare&& comp) {
+ LessThan&& comp) {
return std::set_symmetric_difference(
container_algorithm_internal::c_begin(c1),
container_algorithm_internal::c_end(c1),
container_algorithm_internal::c_begin(c2),
container_algorithm_internal::c_end(c2), output,
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1391,11 +1391,11 @@ void c_push_heap(RandomAccessContainer& sequence) {
// Overload of c_push_heap() for performing a push operation on a heap using a
// `comp` other than `operator<`.
-template <typename RandomAccessContainer, typename Compare>
-void c_push_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_push_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::push_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_pop_heap()
@@ -1410,11 +1410,11 @@ void c_pop_heap(RandomAccessContainer& sequence) {
// Overload of c_pop_heap() for performing a pop operation on a heap using a
// `comp` other than `operator<`.
-template <typename RandomAccessContainer, typename Compare>
-void c_pop_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_pop_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::pop_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_make_heap()
@@ -1429,11 +1429,11 @@ void c_make_heap(RandomAccessContainer& sequence) {
// Overload of c_make_heap() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
-void c_make_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_make_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::make_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_sort_heap()
@@ -1448,11 +1448,11 @@ void c_sort_heap(RandomAccessContainer& sequence) {
// Overload of c_sort_heap() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
-void c_sort_heap(RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+void c_sort_heap(RandomAccessContainer& sequence, LessThan&& comp) {
std::sort_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_heap()
@@ -1467,11 +1467,11 @@ bool c_is_heap(const RandomAccessContainer& sequence) {
// Overload of c_is_heap() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
-bool c_is_heap(const RandomAccessContainer& sequence, Compare&& comp) {
+template <typename RandomAccessContainer, typename LessThan>
+bool c_is_heap(const RandomAccessContainer& sequence, LessThan&& comp) {
return std::is_heap(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_is_heap_until()
@@ -1487,12 +1487,12 @@ c_is_heap_until(RandomAccessContainer& sequence) {
// Overload of c_is_heap_until() for performing heap comparisons using a
// `comp` other than `operator<`
-template <typename RandomAccessContainer, typename Compare>
+template <typename RandomAccessContainer, typename LessThan>
container_algorithm_internal::ContainerIter<RandomAccessContainer>
-c_is_heap_until(RandomAccessContainer& sequence, Compare&& comp) {
+c_is_heap_until(RandomAccessContainer& sequence, LessThan&& comp) {
return std::is_heap_until(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1513,12 +1513,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_min_element(
// Overload of c_min_element() for performing a `comp` comparison other than
// `operator<`.
-template <typename Sequence, typename Compare>
+template <typename Sequence, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_min_element(
- Sequence& sequence, Compare&& comp) {
+ Sequence& sequence, LessThan&& comp) {
return std::min_element(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_max_element()
@@ -1535,12 +1535,12 @@ container_algorithm_internal::ContainerIter<Sequence> c_max_element(
// Overload of c_max_element() for performing a `comp` comparison other than
// `operator<`.
-template <typename Sequence, typename Compare>
+template <typename Sequence, typename LessThan>
container_algorithm_internal::ContainerIter<Sequence> c_max_element(
- Sequence& sequence, Compare&& comp) {
+ Sequence& sequence, LessThan&& comp) {
return std::max_element(container_algorithm_internal::c_begin(sequence),
container_algorithm_internal::c_end(sequence),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_minmax_element()
@@ -1558,12 +1558,12 @@ c_minmax_element(C& c) {
// Overload of c_minmax_element() for performing `comp` comparisons other than
// `operator<`.
-template <typename C, typename Compare>
+template <typename C, typename LessThan>
container_algorithm_internal::ContainerIterPairType<C, C>
-c_minmax_element(C& c, Compare&& comp) {
+c_minmax_element(C& c, LessThan&& comp) {
return std::minmax_element(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
@@ -1588,15 +1588,15 @@ bool c_lexicographical_compare(Sequence1&& sequence1, Sequence2&& sequence2) {
// Overload of c_lexicographical_compare() for performing a lexicographical
// comparison using a `comp` operator instead of `operator<`.
-template <typename Sequence1, typename Sequence2, typename Compare>
+template <typename Sequence1, typename Sequence2, typename LessThan>
bool c_lexicographical_compare(Sequence1&& sequence1, Sequence2&& sequence2,
- Compare&& comp) {
+ LessThan&& comp) {
return std::lexicographical_compare(
container_algorithm_internal::c_begin(sequence1),
container_algorithm_internal::c_end(sequence1),
container_algorithm_internal::c_begin(sequence2),
container_algorithm_internal::c_end(sequence2),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_next_permutation()
@@ -1612,11 +1612,11 @@ bool c_next_permutation(C& c) {
// Overload of c_next_permutation() for performing a lexicographical
// comparison using a `comp` operator instead of `operator<`.
-template <typename C, typename Compare>
-bool c_next_permutation(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+bool c_next_permutation(C& c, LessThan&& comp) {
return std::next_permutation(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
// c_prev_permutation()
@@ -1632,11 +1632,11 @@ bool c_prev_permutation(C& c) {
// Overload of c_prev_permutation() for performing a lexicographical
// comparison using a `comp` operator instead of `operator<`.
-template <typename C, typename Compare>
-bool c_prev_permutation(C& c, Compare&& comp) {
+template <typename C, typename LessThan>
+bool c_prev_permutation(C& c, LessThan&& comp) {
return std::prev_permutation(container_algorithm_internal::c_begin(c),
container_algorithm_internal::c_end(c),
- std::forward<Compare>(comp));
+ std::forward<LessThan>(comp));
}
//------------------------------------------------------------------------------
diff --git a/absl/base/attributes.h b/absl/base/attributes.h
index cf2cb550..52139556 100644
--- a/absl/base/attributes.h
+++ b/absl/base/attributes.h
@@ -131,14 +131,14 @@
// ABSL_ATTRIBUTE_WEAK
//
// Tags a function as weak for the purposes of compilation and linking.
-// Weak attributes currently do not work properly in LLVM's Windows backend,
-// so disable them there. See https://bugs.llvm.org/show_bug.cgi?id=37598
+// Weak attributes did not work properly in LLVM's Windows backend before
+// 9.0.0, so disable them there. See https://bugs.llvm.org/show_bug.cgi?id=37598
// for further information.
// The MinGW compiler doesn't complain about the weak attribute until the link
// step, presumably because Windows doesn't use ELF binaries.
#if (ABSL_HAVE_ATTRIBUTE(weak) || \
(defined(__GNUC__) && !defined(__clang__))) && \
- !(defined(__llvm__) && defined(_WIN32)) && !defined(__MINGW32__)
+ (!defined(_WIN32) || __clang_major__ < 9) && !defined(__MINGW32__)
#undef ABSL_ATTRIBUTE_WEAK
#define ABSL_ATTRIBUTE_WEAK __attribute__((weak))
#define ABSL_HAVE_ATTRIBUTE_WEAK 1
@@ -281,10 +281,7 @@
// ABSL_ATTRIBUTE_RETURNS_NONNULL
//
// Tells the compiler that a particular function never returns a null pointer.
-#if ABSL_HAVE_ATTRIBUTE(returns_nonnull) || \
- (defined(__GNUC__) && \
- (__GNUC__ > 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)) && \
- !defined(__clang__))
+#if ABSL_HAVE_ATTRIBUTE(returns_nonnull)
#define ABSL_ATTRIBUTE_RETURNS_NONNULL __attribute__((returns_nonnull))
#else
#define ABSL_ATTRIBUTE_RETURNS_NONNULL
@@ -524,6 +521,13 @@
// ABSL_ATTRIBUTE_UNUSED
//
// Prevents the compiler from complaining about variables that appear unused.
+//
+// For code or headers that are assured to only build with C++17 and up, prefer
+// just using the standard '[[maybe_unused]]' directly over this macro.
+//
+// Due to differences in positioning requirements between the old, compiler
+// specific __attribute__ syntax and the now standard [[maybe_unused]], this
+// macro does not attempt to take advantage of '[[maybe_unused]]'.
#if ABSL_HAVE_ATTRIBUTE(unused) || (defined(__GNUC__) && !defined(__clang__))
#undef ABSL_ATTRIBUTE_UNUSED
#define ABSL_ATTRIBUTE_UNUSED __attribute__((__unused__))
@@ -595,31 +599,24 @@
// case 42:
// ...
//
-// Notes: when compiled with clang in C++11 mode, the ABSL_FALLTHROUGH_INTENDED
-// macro is expanded to the [[clang::fallthrough]] attribute, which is analysed
-// when performing switch labels fall-through diagnostic
-// (`-Wimplicit-fallthrough`). See clang documentation on language extensions
-// for details:
+// Notes: When supported, GCC and Clang can issue a warning on switch labels
+// with unannotated fallthrough using the warning `-Wimplicit-fallthrough`. See
+// clang documentation on language extensions for details:
// https://clang.llvm.org/docs/AttributeReference.html#fallthrough-clang-fallthrough
//
-// When used with unsupported compilers, the ABSL_FALLTHROUGH_INTENDED macro
-// has no effect on diagnostics. In any case this macro has no effect on runtime
+// When used with unsupported compilers, the ABSL_FALLTHROUGH_INTENDED macro has
+// no effect on diagnostics. In any case this macro has no effect on runtime
// behavior and performance of code.
#ifdef ABSL_FALLTHROUGH_INTENDED
#error "ABSL_FALLTHROUGH_INTENDED should not be defined."
-#endif
-
-// TODO(zhangxy): Use c++17 standard [[fallthrough]] macro, when supported.
-#if defined(__clang__) && defined(__has_warning)
-#if __has_feature(cxx_attributes) && __has_warning("-Wimplicit-fallthrough")
+#elif ABSL_HAVE_CPP_ATTRIBUTE(fallthrough)
+#define ABSL_FALLTHROUGH_INTENDED [[fallthrough]]
+#elif ABSL_HAVE_CPP_ATTRIBUTE(clang::fallthrough)
#define ABSL_FALLTHROUGH_INTENDED [[clang::fallthrough]]
-#endif
-#elif defined(__GNUC__) && __GNUC__ >= 7
+#elif ABSL_HAVE_CPP_ATTRIBUTE(gnu::fallthrough)
#define ABSL_FALLTHROUGH_INTENDED [[gnu::fallthrough]]
-#endif
-
-#ifndef ABSL_FALLTHROUGH_INTENDED
+#else
#define ABSL_FALLTHROUGH_INTENDED \
do { \
} while (0)
@@ -699,4 +696,26 @@
#define ABSL_ATTRIBUTE_PURE_FUNCTION
#endif
+// ABSL_ATTRIBUTE_LIFETIME_BOUND indicates that a resource owned by a function
+// parameter or implicit object parameter is retained by the return value of the
+// annotated function (or, for a parameter of a constructor, in the value of the
+// constructed object). This attribute causes warnings to be produced if a
+// temporary object does not live long enough.
+//
+// When applied to a reference parameter, the referenced object is assumed to be
+// retained by the return value of the function. When applied to a non-reference
+// parameter (for example, a pointer or a class type), all temporaries
+// referenced by the parameter are assumed to be retained by the return value of
+// the function.
+//
+// See also the upstream documentation:
+// https://clang.llvm.org/docs/AttributeReference.html#lifetimebound
+#if ABSL_HAVE_CPP_ATTRIBUTE(clang::lifetimebound)
+#define ABSL_ATTRIBUTE_LIFETIME_BOUND [[clang::lifetimebound]]
+#elif ABSL_HAVE_ATTRIBUTE(lifetimebound)
+#define ABSL_ATTRIBUTE_LIFETIME_BOUND __attribute__((lifetimebound))
+#else
+#define ABSL_ATTRIBUTE_LIFETIME_BOUND
+#endif
+
#endif // ABSL_BASE_ATTRIBUTES_H_
diff --git a/absl/base/config.h b/absl/base/config.h
index 95449969..0524196d 100644
--- a/absl/base/config.h
+++ b/absl/base/config.h
@@ -166,6 +166,22 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#define ABSL_HAVE_FEATURE(f) 0
#endif
+// Portable check for GCC minimum version:
+// https://gcc.gnu.org/onlinedocs/cpp/Common-Predefined-Macros.html
+#if defined(__GNUC__) && defined(__GNUC_MINOR__)
+#define ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(x, y) \
+ (__GNUC__ > (x) || __GNUC__ == (x) && __GNUC_MINOR__ >= (y))
+#else
+#define ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(x, y) 0
+#endif
+
+#if defined(__clang__) && defined(__clang_major__) && defined(__clang_minor__)
+#define ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(x, y) \
+ (__clang_major__ > (x) || __clang_major__ == (x) && __clang_minor__ >= (y))
+#else
+#define ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(x, y) 0
+#endif
+
// ABSL_HAVE_TLS is defined to 1 when __thread should be supported.
// We assume __thread is supported on Linux when compiled with Clang or compiled
// against libstdc++ with _GLIBCXX_HAVE_TLS defined.
@@ -183,10 +199,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// gcc >= 4.8.1 using libstdc++, and Visual Studio.
#ifdef ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE
#error ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE cannot be directly set
-#elif defined(_LIBCPP_VERSION) || \
- (!defined(__clang__) && defined(__GNUC__) && defined(__GLIBCXX__) && \
- (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8))) || \
- defined(_MSC_VER)
+#elif defined(_LIBCPP_VERSION) || defined(_MSC_VER) || \
+ (!defined(__clang__) && defined(__GLIBCXX__) && \
+ ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(4, 8))
#define ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE 1
#endif
@@ -205,10 +220,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#error ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE cannot be directly set
#elif defined(ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE)
#error ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE cannot directly set
-#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
- (!defined(__clang__) && defined(__GNUC__) && \
- (__GNUC__ > 7 || (__GNUC__ == 7 && __GNUC_MINOR__ >= 4)) && \
- (defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
+#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
+ (!defined(__clang__) && ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 4) && \
+ (defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
(defined(_MSC_VER) && !defined(__NVCC__))
#define ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE 1
#define ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE 1
@@ -222,7 +236,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#if ABSL_INTERNAL_HAS_KEYWORD(__builtin_LINE) && \
ABSL_INTERNAL_HAS_KEYWORD(__builtin_FILE)
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
-#elif defined(__GNUC__) && __GNUC__ >= 5
+#elif ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0)
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
#endif
#endif
@@ -319,25 +333,21 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// For further details, consult the compiler's documentation.
#ifdef ABSL_HAVE_EXCEPTIONS
#error ABSL_HAVE_EXCEPTIONS cannot be directly set.
-
-#elif defined(__clang__)
-
-#if __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 6)
+#elif ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(3, 6)
// Clang >= 3.6
#if ABSL_HAVE_FEATURE(cxx_exceptions)
#define ABSL_HAVE_EXCEPTIONS 1
#endif // ABSL_HAVE_FEATURE(cxx_exceptions)
-#else
+#elif defined(__clang__)
// Clang < 3.6
// http://releases.llvm.org/3.6.0/tools/clang/docs/ReleaseNotes.html#the-exceptions-macro
#if defined(__EXCEPTIONS) && ABSL_HAVE_FEATURE(cxx_exceptions)
#define ABSL_HAVE_EXCEPTIONS 1
#endif // defined(__EXCEPTIONS) && ABSL_HAVE_FEATURE(cxx_exceptions)
-#endif // __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 6)
-
// Handle remaining special cases and default to exceptions being supported.
-#elif !(defined(__GNUC__) && (__GNUC__ < 5) && !defined(__EXCEPTIONS)) && \
- !(defined(__GNUC__) && (__GNUC__ >= 5) && !defined(__cpp_exceptions)) && \
+#elif !(defined(__GNUC__) && (__GNUC__ < 5) && !defined(__EXCEPTIONS)) && \
+ !(ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0) && \
+ !defined(__cpp_exceptions)) && \
!(defined(_MSC_VER) && !defined(_CPPUNWIND))
#define ABSL_HAVE_EXCEPTIONS 1
#endif
@@ -690,10 +700,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// a compiler instrumentation module and a run-time library.
#ifdef ABSL_HAVE_MEMORY_SANITIZER
#error "ABSL_HAVE_MEMORY_SANITIZER cannot be directly set."
-#elif defined(MEMORY_SANITIZER)
-// The MEMORY_SANITIZER macro is deprecated but we will continue to honor it
-// for now.
-#define ABSL_HAVE_MEMORY_SANITIZER 1
#elif defined(__SANITIZE_MEMORY__)
#define ABSL_HAVE_MEMORY_SANITIZER 1
#elif !defined(__native_client__) && ABSL_HAVE_FEATURE(memory_sanitizer)
@@ -705,10 +711,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// ThreadSanitizer (TSan) is a fast data race detector.
#ifdef ABSL_HAVE_THREAD_SANITIZER
#error "ABSL_HAVE_THREAD_SANITIZER cannot be directly set."
-#elif defined(THREAD_SANITIZER)
-// The THREAD_SANITIZER macro is deprecated but we will continue to honor it
-// for now.
-#define ABSL_HAVE_THREAD_SANITIZER 1
#elif defined(__SANITIZE_THREAD__)
#define ABSL_HAVE_THREAD_SANITIZER 1
#elif ABSL_HAVE_FEATURE(thread_sanitizer)
@@ -720,10 +722,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// AddressSanitizer (ASan) is a fast memory error detector.
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
#error "ABSL_HAVE_ADDRESS_SANITIZER cannot be directly set."
-#elif defined(ADDRESS_SANITIZER)
-// The ADDRESS_SANITIZER macro is deprecated but we will continue to honor it
-// for now.
-#define ABSL_HAVE_ADDRESS_SANITIZER 1
#elif defined(__SANITIZE_ADDRESS__)
#define ABSL_HAVE_ADDRESS_SANITIZER 1
#elif ABSL_HAVE_FEATURE(address_sanitizer)
diff --git a/absl/base/dynamic_annotations.h b/absl/base/dynamic_annotations.h
index 880cbf6e..03d7096b 100644
--- a/absl/base/dynamic_annotations.h
+++ b/absl/base/dynamic_annotations.h
@@ -471,7 +471,7 @@ using absl::base_internal::ValgrindSlowdown;
__sanitizer_annotate_contiguous_container(beg, end, old_mid, new_mid)
#define ABSL_ADDRESS_SANITIZER_REDZONE(name) \
struct { \
- char x[8] __attribute__((aligned(8))); \
+ alignas(8) char x[8]; \
} name
#else
diff --git a/absl/base/internal/sysinfo.cc b/absl/base/internal/sysinfo.cc
index 4a3b2050..08a1e288 100644
--- a/absl/base/internal/sysinfo.cc
+++ b/absl/base/internal/sysinfo.cc
@@ -61,9 +61,76 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace base_internal {
+namespace {
+
+#if defined(_WIN32)
+
+// Returns number of bits set in `bitMask`
+DWORD Win32CountSetBits(ULONG_PTR bitMask) {
+ for (DWORD bitSetCount = 0; ; ++bitSetCount) {
+ if (bitMask == 0) return bitSetCount;
+ bitMask &= bitMask - 1;
+ }
+}
+
+// Returns the number of logical CPUs using GetLogicalProcessorInformation(), or
+// 0 if the number of processors is not available or can not be computed.
+// https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-getlogicalprocessorinformation
+int Win32NumCPUs() {
+#pragma comment(lib, "kernel32.lib")
+ using Info = SYSTEM_LOGICAL_PROCESSOR_INFORMATION;
+
+ DWORD info_size = sizeof(Info);
+ Info* info(static_cast<Info*>(malloc(info_size)));
+ if (info == nullptr) return 0;
+
+ bool success = GetLogicalProcessorInformation(info, &info_size);
+ if (!success && GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
+ free(info);
+ info = static_cast<Info*>(malloc(info_size));
+ if (info == nullptr) return 0;
+ success = GetLogicalProcessorInformation(info, &info_size);
+ }
+
+ DWORD logicalProcessorCount = 0;
+ if (success) {
+ Info* ptr = info;
+ DWORD byteOffset = 0;
+ while (byteOffset + sizeof(Info) <= info_size) {
+ switch (ptr->Relationship) {
+ case RelationProcessorCore:
+ logicalProcessorCount += Win32CountSetBits(ptr->ProcessorMask);
+ break;
+
+ case RelationNumaNode:
+ case RelationCache:
+ case RelationProcessorPackage:
+ // Ignore other entries
+ break;
+
+ default:
+ // Ignore unknown entries
+ break;
+ }
+ byteOffset += sizeof(Info);
+ ptr++;
+ }
+ }
+ free(info);
+ return logicalProcessorCount;
+}
+
+#endif
+
+} // namespace
+
+
static int GetNumCPUs() {
#if defined(__myriad2__)
return 1;
+#elif defined(_WIN32)
+ const unsigned hardware_concurrency = Win32NumCPUs();
+ return hardware_concurrency ? hardware_concurrency : 1;
#else
// Other possibilities:
// - Read /sys/devices/system/cpu/online and use cpumask_parse()
diff --git a/absl/base/internal/thread_identity.cc b/absl/base/internal/thread_identity.cc
index 9950e63a..6ea010ed 100644
--- a/absl/base/internal/thread_identity.cc
+++ b/absl/base/internal/thread_identity.cc
@@ -120,10 +120,10 @@ void SetCurrentThreadIdentity(
ABSL_THREAD_IDENTITY_MODE == ABSL_THREAD_IDENTITY_MODE_USE_CPP11
// Please see the comment on `CurrentThreadIdentityIfPresent` in
-// thread_identity.h. When we cannot expose thread_local variables in
-// headers, we opt for the correct-but-slower option of not inlining this
-// function.
-#ifndef ABSL_INTERNAL_INLINE_CURRENT_THREAD_IDENTITY_IF_PRESENT
+// thread_identity.h. Because DLLs cannot expose thread_local variables in
+// headers, we opt for the correct-but-slower option of placing the definition
+// of this function only in a translation unit inside DLL.
+#if defined(ABSL_BUILD_DLL) || defined(ABSL_CONSUME_DLL)
ThreadIdentity* CurrentThreadIdentityIfPresent() { return thread_identity_ptr; }
#endif
#endif
diff --git a/absl/base/internal/thread_identity.h b/absl/base/internal/thread_identity.h
index 6e25b92f..9ee651a3 100644
--- a/absl/base/internal/thread_identity.h
+++ b/absl/base/internal/thread_identity.h
@@ -236,18 +236,13 @@ ABSL_CONST_INIT extern thread_local ThreadIdentity* thread_identity_ptr;
#error Thread-local storage not detected on this platform
#endif
-// thread_local variables cannot be in headers exposed by DLLs or in certain
-// build configurations on Apple platforms. However, it is important for
-// performance reasons in general that `CurrentThreadIdentityIfPresent` be
-// inlined. In the other cases we opt to have the function not be inlined. Note
+// thread_local variables cannot be in headers exposed by DLLs. However, it is
+// important for performance reasons in general that
+// `CurrentThreadIdentityIfPresent` be inlined. This is not possible across a
+// DLL boundary so, with DLLs, we opt to have the function not be inlined. Note
// that `CurrentThreadIdentityIfPresent` is declared above so we can exclude
-// this entire inline definition.
-#if !defined(__APPLE__) && !defined(ABSL_BUILD_DLL) && \
- !defined(ABSL_CONSUME_DLL)
-#define ABSL_INTERNAL_INLINE_CURRENT_THREAD_IDENTITY_IF_PRESENT 1
-#endif
-
-#ifdef ABSL_INTERNAL_INLINE_CURRENT_THREAD_IDENTITY_IF_PRESENT
+// this entire inline definition when compiling as a DLL.
+#if !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL)
inline ThreadIdentity* CurrentThreadIdentityIfPresent() {
return thread_identity_ptr;
}
diff --git a/absl/base/options.h b/absl/base/options.h
index eca879af..230bf1ee 100644
--- a/absl/base/options.h
+++ b/absl/base/options.h
@@ -205,8 +205,8 @@
// be changed to a new, unique identifier name. In particular "head" is not
// allowed.
-#define ABSL_OPTION_USE_INLINE_NAMESPACE 1
-#define ABSL_OPTION_INLINE_NAMESPACE_NAME lts_20210324
+#define ABSL_OPTION_USE_INLINE_NAMESPACE 0
+#define ABSL_OPTION_INLINE_NAMESPACE_NAME head
// ABSL_OPTION_HARDENED
//
diff --git a/absl/container/btree_test.cc b/absl/container/btree_test.cc
index 74337df2..464dabac 100644
--- a/absl/container/btree_test.cc
+++ b/absl/container/btree_test.cc
@@ -2893,6 +2893,47 @@ TEST(Btree, AllocMoveConstructor_DifferentAlloc) {
EXPECT_EQ(bytes_used2, original_bytes_used);
}
+bool IntCmp(const int a, const int b) { return a < b; }
+
+TEST(Btree, SupportsFunctionPtrComparator) {
+ absl::btree_set<int, decltype(IntCmp) *> set(IntCmp);
+ set.insert({1, 2, 3});
+ EXPECT_THAT(set, ElementsAre(1, 2, 3));
+ EXPECT_TRUE(set.key_comp()(1, 2));
+ EXPECT_TRUE(set.value_comp()(1, 2));
+
+ absl::btree_map<int, int, decltype(IntCmp) *> map(&IntCmp);
+ map[1] = 1;
+ EXPECT_THAT(map, ElementsAre(Pair(1, 1)));
+ EXPECT_TRUE(map.key_comp()(1, 2));
+ // TODO(ezb): support value_comp() in this case and uncomment.
+ // EXPECT_TRUE(map.value_comp()(std::make_pair(1, 1), std::make_pair(2, 2)));
+}
+
+template <typename Compare>
+struct TransparentPassThroughComp {
+ using is_transparent = void;
+
+ // This will fail compilation if we attempt a comparison that Compare does not
+ // support, and the failure will happen inside the function implementation so
+ // it can't be avoided by using SFINAE on this comparator.
+ template <typename T, typename U>
+ bool operator()(const T &lhs, const U &rhs) const {
+ return Compare()(lhs, rhs);
+ }
+};
+
+TEST(Btree,
+ SupportsTransparentComparatorThatDoesNotImplementAllVisibleOperators) {
+ absl::btree_set<MultiKey, TransparentPassThroughComp<MultiKeyComp>> set;
+ set.insert(MultiKey{1, 2});
+ EXPECT_TRUE(set.contains(1));
+}
+
+TEST(Btree, ConstructImplicitlyWithUnadaptedComparator) {
+ absl::btree_set<MultiKey, MultiKeyComp> set = {{}, MultiKeyComp{}};
+}
+
} // namespace
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/absl/container/flat_hash_map_test.cc b/absl/container/flat_hash_map_test.cc
index 89ec60c9..8dda1d35 100644
--- a/absl/container/flat_hash_map_test.cc
+++ b/absl/container/flat_hash_map_test.cc
@@ -282,6 +282,32 @@ TEST(FlatHashMap, NodeHandleMutableKeyAccess) {
}
#endif
+TEST(FlatHashMap, Reserve) {
+ // Verify that if we reserve(size() + n) then we can perform n insertions
+ // without a rehash, i.e., without invalidating any references.
+ for (size_t trial = 0; trial < 20; ++trial) {
+ for (size_t initial = 3; initial < 100; ++initial) {
+ // Fill in `initial` entries, then erase 2 of them, then reserve space for
+ // two inserts and check for reference stability while doing the inserts.
+ flat_hash_map<size_t, size_t> map;
+ for (size_t i = 0; i < initial; ++i) {
+ map[i] = i;
+ }
+ map.erase(0);
+ map.erase(1);
+ map.reserve(map.size() + 2);
+ size_t& a2 = map[2];
+ // In the event of a failure, asan will complain in one of these two
+ // assignments.
+ map[initial] = a2;
+ map[initial + 1] = a2;
+ // Fail even when not under asan:
+ size_t& a2new = map[2];
+ EXPECT_EQ(&a2, &a2new);
+ }
+ }
+}
+
} // namespace
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h
index 0bb38366..d372a1d6 100644
--- a/absl/container/internal/btree.h
+++ b/absl/container/internal/btree.h
@@ -484,8 +484,8 @@ class btree_node {
std::is_same<std::greater<key_type>,
key_compare>::value)>;
- // This class is organized by gtl::Layout as if it had the following
- // structure:
+ // This class is organized by absl::container_internal::Layout as if it had
+ // the following structure:
// // A pointer to the node's parent.
// btree_node *parent;
//
diff --git a/absl/container/internal/hash_generator_testing.h b/absl/container/internal/hash_generator_testing.h
index 6869fe45..f1f555a5 100644
--- a/absl/container/internal/hash_generator_testing.h
+++ b/absl/container/internal/hash_generator_testing.h
@@ -21,11 +21,13 @@
#include <stdint.h>
#include <algorithm>
+#include <cassert>
#include <iosfwd>
#include <random>
#include <tuple>
#include <type_traits>
#include <utility>
+#include <vector>
#include "absl/container/internal/hash_policy_testing.h"
#include "absl/memory/memory.h"
@@ -153,6 +155,25 @@ using GeneratedType = decltype(
typename Container::value_type,
typename Container::key_type>::type>&>()());
+// Naive wrapper that performs a linear search of previous values.
+// Beware this is O(SQR), which is reasonable for smaller kMaxValues.
+template <class T, size_t kMaxValues = 64, class E = void>
+struct UniqueGenerator {
+ Generator<T, E> gen;
+ std::vector<T> values;
+
+ T operator()() {
+ assert(values.size() < kMaxValues);
+ for (;;) {
+ T value = gen();
+ if (std::find(values.begin(), values.end(), value) == values.end()) {
+ values.push_back(value);
+ return value;
+ }
+ }
+ }
+};
+
} // namespace hash_internal
} // namespace container_internal
ABSL_NAMESPACE_END
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 8615de8b..b23e0078 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -1323,8 +1323,8 @@ class raw_hash_set {
}
void reserve(size_t n) {
- size_t m = GrowthToLowerboundCapacity(n);
- if (m > capacity_) {
+ if (n > size() + growth_left()) {
+ size_t m = GrowthToLowerboundCapacity(n);
resize(NormalizeCapacity(m));
}
}
diff --git a/absl/container/internal/unordered_map_constructor_test.h b/absl/container/internal/unordered_map_constructor_test.h
index 3f90ad7c..c1d20f3c 100644
--- a/absl/container/internal/unordered_map_constructor_test.h
+++ b/absl/container/internal/unordered_map_constructor_test.h
@@ -179,7 +179,7 @@ TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashEqualAlloc) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
- hash_internal::Generator<T>());
+ hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, hasher, equal, alloc);
EXPECT_EQ(m.hash_function(), hasher);
EXPECT_EQ(m.key_eq(), equal);
@@ -198,7 +198,7 @@ void InputIteratorBucketAllocTest(std::true_type) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
- hash_internal::Generator<T>());
+ hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, alloc);
EXPECT_EQ(m.get_allocator(), alloc);
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
@@ -221,7 +221,7 @@ void InputIteratorBucketHashAllocTest(std::true_type) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
- hash_internal::Generator<T>());
+ hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, hasher, alloc);
EXPECT_EQ(m.hash_function(), hasher);
EXPECT_EQ(m.get_allocator(), alloc);
@@ -241,8 +241,9 @@ TYPED_TEST_P(ConstructorTest, CopyConstructor) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam n(m);
EXPECT_EQ(m.hash_function(), n.hash_function());
EXPECT_EQ(m.key_eq(), n.key_eq());
@@ -262,8 +263,9 @@ void CopyConstructorAllocTest(std::true_type) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam n(m, A(11));
EXPECT_EQ(m.hash_function(), n.hash_function());
EXPECT_EQ(m.key_eq(), n.key_eq());
@@ -285,8 +287,9 @@ TYPED_TEST_P(ConstructorTest, MoveConstructor) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam t(m);
TypeParam n(std::move(t));
EXPECT_EQ(m.hash_function(), n.hash_function());
@@ -307,8 +310,9 @@ void MoveConstructorAllocTest(std::true_type) {
H hasher;
E equal;
A alloc(0);
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
- for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
+ for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam t(m);
TypeParam n(std::move(t), A(1));
EXPECT_EQ(m.hash_function(), n.hash_function());
@@ -325,7 +329,7 @@ TYPED_TEST_P(ConstructorTest, MoveConstructorAlloc) {
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashEqualAlloc) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
using H = typename TypeParam::hasher;
using E = typename TypeParam::key_equal;
@@ -348,7 +352,7 @@ template <typename TypeParam>
void InitializerListBucketAllocTest(std::true_type) {
using T = hash_internal::GeneratedType<TypeParam>;
using A = typename TypeParam::allocator_type;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
A alloc(0);
TypeParam m(values, 123, alloc);
@@ -371,7 +375,7 @@ void InitializerListBucketHashAllocTest(std::true_type) {
using A = typename TypeParam::allocator_type;
H hasher;
A alloc(0);
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m(values, 123, hasher, alloc);
EXPECT_EQ(m.hash_function(), hasher);
@@ -392,7 +396,7 @@ TYPED_TEST_P(ConstructorTest, Assignment) {
H hasher;
E equal;
A alloc(0);
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
TypeParam n;
n = m;
@@ -412,7 +416,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignment) {
H hasher;
E equal;
A alloc(0);
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
TypeParam t(m);
TypeParam n;
@@ -424,7 +428,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignment) {
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m;
m = values;
@@ -433,7 +437,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()});
TypeParam n({gen()});
n = m;
@@ -442,7 +446,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()});
TypeParam t(m);
TypeParam n({gen()});
@@ -452,7 +456,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m;
m = values;
@@ -461,7 +465,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
using T = hash_internal::GeneratedType<TypeParam>;
- hash_internal::Generator<T> gen;
+ hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m(values);
m = *&m; // Avoid -Wself-assign
diff --git a/absl/copts/GENERATED_AbseilCopts.cmake b/absl/copts/GENERATED_AbseilCopts.cmake
index 51742c9b..18a1e5c3 100644
--- a/absl/copts/GENERATED_AbseilCopts.cmake
+++ b/absl/copts/GENERATED_AbseilCopts.cmake
@@ -71,6 +71,7 @@ list(APPEND ABSL_LLVM_FLAGS
"-Wformat-security"
"-Wgnu-redeclared-enum"
"-Winfinite-recursion"
+ "-Winvalid-constexpr"
"-Wliteral-conversion"
"-Wmissing-declarations"
"-Woverlength-strings"
diff --git a/absl/copts/GENERATED_copts.bzl b/absl/copts/GENERATED_copts.bzl
index 6707488f..d2bd5608 100644
--- a/absl/copts/GENERATED_copts.bzl
+++ b/absl/copts/GENERATED_copts.bzl
@@ -72,6 +72,7 @@ ABSL_LLVM_FLAGS = [
"-Wformat-security",
"-Wgnu-redeclared-enum",
"-Winfinite-recursion",
+ "-Winvalid-constexpr",
"-Wliteral-conversion",
"-Wmissing-declarations",
"-Woverlength-strings",
diff --git a/absl/copts/copts.py b/absl/copts/copts.py
index cf52981c..ce30df89 100644
--- a/absl/copts/copts.py
+++ b/absl/copts/copts.py
@@ -87,6 +87,7 @@ COPT_VARS = {
"-Wformat-security",
"-Wgnu-redeclared-enum",
"-Winfinite-recursion",
+ "-Winvalid-constexpr",
"-Wliteral-conversion",
"-Wmissing-declarations",
"-Woverlength-strings",
diff --git a/absl/copts/generate_copts.py b/absl/copts/generate_copts.py
index 0e5dc9fa..34be2fc2 100755
--- a/absl/copts/generate_copts.py
+++ b/absl/copts/generate_copts.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python3
"""Generate Abseil compile compile option configs.
Usage: <path_to_absl>/copts/generate_copts.py
diff --git a/absl/debugging/failure_signal_handler.cc b/absl/debugging/failure_signal_handler.cc
index 3ddebd74..689e5979 100644
--- a/absl/debugging/failure_signal_handler.cc
+++ b/absl/debugging/failure_signal_handler.cc
@@ -367,6 +367,7 @@ static void AbslFailureSignalHandler(int signo, siginfo_t*, void* ucontext) {
// goes after this point.
if (fsh_options.writerfn != nullptr) {
WriteFailureInfo(signo, ucontext, my_cpu, fsh_options.writerfn);
+ fsh_options.writerfn(nullptr);
}
if (fsh_options.call_previous_handler) {
diff --git a/absl/debugging/failure_signal_handler.h b/absl/debugging/failure_signal_handler.h
index 0c0f585d..500115c0 100644
--- a/absl/debugging/failure_signal_handler.h
+++ b/absl/debugging/failure_signal_handler.h
@@ -90,7 +90,7 @@ struct FailureSignalHandlerOptions {
// If non-null, indicates a pointer to a callback function that will be called
// upon failure, with a string argument containing failure data. This function
// may be used as a hook to write failure data to a secondary location, such
- // as a log file. This function may also be called with null data, as a hint
+ // as a log file. This function will also be called with null data, as a hint
// to flush any buffered data before the program may be terminated. Consider
// flushing any buffered data in all calls to this function.
//
diff --git a/absl/debugging/symbolize_elf.inc b/absl/debugging/symbolize_elf.inc
index f4d5727b..87dbd078 100644
--- a/absl/debugging/symbolize_elf.inc
+++ b/absl/debugging/symbolize_elf.inc
@@ -701,6 +701,16 @@ static ABSL_ATTRIBUTE_NOINLINE FindSymbolResult FindSymbol(
const char *start_address =
ComputeOffset(original_start_address, relocation);
+#ifdef __arm__
+ // ARM functions are always aligned to multiples of two bytes; the
+ // lowest-order bit in start_address is ignored by the CPU and indicates
+ // whether the function contains ARM (0) or Thumb (1) code. We don't care
+ // about what encoding is being used; we just want the real start address
+ // of the function.
+ start_address = reinterpret_cast<const char *>(
+ reinterpret_cast<uintptr_t>(start_address) & ~1);
+#endif
+
if (deref_function_descriptor_pointer &&
InSection(original_start_address, opd)) {
// The opd section is mapped into memory. Just dereference
diff --git a/absl/debugging/symbolize_test.cc b/absl/debugging/symbolize_test.cc
index a2dd4956..35de02e2 100644
--- a/absl/debugging/symbolize_test.cc
+++ b/absl/debugging/symbolize_test.cc
@@ -477,6 +477,46 @@ void ABSL_ATTRIBUTE_NOINLINE TestWithReturnAddress() {
#endif
}
+#if defined(__arm__) && ABSL_HAVE_ATTRIBUTE(target)
+// Test that we correctly identify bounds of Thumb functions on ARM.
+//
+// Thumb functions have the lowest-order bit set in their addresses in the ELF
+// symbol table. This requires some extra logic to properly compute function
+// bounds. To test this logic, nudge a Thumb function right up against an ARM
+// function and try to symbolize the ARM function.
+//
+// A naive implementation will simply use the Thumb function's entry point as
+// written in the symbol table and will therefore treat the Thumb function as
+// extending one byte further in the instruction stream than it actually does.
+// When asked to symbolize the start of the ARM function, it will identify an
+// overlap between the Thumb and ARM functions, and it will return the name of
+// the Thumb function.
+//
+// A correct implementation, on the other hand, will null out the lowest-order
+// bit in the Thumb function's entry point. It will correctly compute the end of
+// the Thumb function, it will find no overlap between the Thumb and ARM
+// functions, and it will return the name of the ARM function.
+
+__attribute__((target("thumb"))) int ArmThumbOverlapThumb(int x) {
+ return x * x * x;
+}
+
+__attribute__((target("arm"))) int ArmThumbOverlapArm(int x) {
+ return x * x * x;
+}
+
+void ABSL_ATTRIBUTE_NOINLINE TestArmThumbOverlap() {
+#if defined(ABSL_HAVE_ATTRIBUTE_NOINLINE)
+ const char *symbol = TrySymbolize((void *)&ArmThumbOverlapArm);
+ ABSL_RAW_CHECK(symbol != nullptr, "TestArmThumbOverlap failed");
+ ABSL_RAW_CHECK(strcmp("ArmThumbOverlapArm()", symbol) == 0,
+ "TestArmThumbOverlap failed");
+ std::cout << "TestArmThumbOverlap passed" << std::endl;
+#endif
+}
+
+#endif // defined(__arm__) && ABSL_HAVE_ATTRIBUTE(target)
+
#elif defined(_WIN32)
#if !defined(ABSL_CONSUME_DLL)
@@ -551,6 +591,9 @@ int main(int argc, char **argv) {
TestWithPCInsideInlineFunction();
TestWithPCInsideNonInlineFunction();
TestWithReturnAddress();
+#if defined(__arm__) && ABSL_HAVE_ATTRIBUTE(target)
+ TestArmThumbOverlap();
+#endif
#endif
return RUN_ALL_TESTS();
diff --git a/absl/flags/flag.h b/absl/flags/flag.h
index f09580b0..14209e7b 100644
--- a/absl/flags/flag.h
+++ b/absl/flags/flag.h
@@ -265,6 +265,8 @@ ABSL_NAMESPACE_END
//
// ABSL_FLAG(T, name, default_value, help).OnUpdate(callback);
//
+// `callback` should be convertible to `void (*)()`.
+//
// After any setting of the flag value, the callback will be called at least
// once. A rapid sequence of changes may be merged together into the same
// callback. No concurrent calls to the callback will be made for the same
@@ -279,7 +281,6 @@ ABSL_NAMESPACE_END
// Note: ABSL_FLAG.OnUpdate() does not have a public definition. Hence, this
// comment serves as its API documentation.
-
// -----------------------------------------------------------------------------
// Implementation details below this section
// -----------------------------------------------------------------------------
diff --git a/absl/hash/internal/wyhash.h b/absl/hash/internal/wyhash.h
index 4aff4e93..2b534b47 100644
--- a/absl/hash/internal/wyhash.h
+++ b/absl/hash/internal/wyhash.h
@@ -36,7 +36,7 @@ namespace hash_internal {
// integers are hashed into the result.
//
// To allow all hashable types (including string_view and Span) to depend on
-// this algoritm, we keep the API low-level, with as few dependencies as
+// this algorithm, we keep the API low-level, with as few dependencies as
// possible.
uint64_t Wyhash(const void* data, size_t len, uint64_t seed,
const uint64_t salt[5]);
diff --git a/absl/memory/memory.h b/absl/memory/memory.h
index 2b5ff623..d6332606 100644
--- a/absl/memory/memory.h
+++ b/absl/memory/memory.h
@@ -420,7 +420,7 @@ struct pointer_traits<T*> {
//
// A C++11 compatible implementation of C++17's std::allocator_traits.
//
-#if __cplusplus >= 201703L
+#if __cplusplus >= 201703L || (defined(_MSVC_LANG) && _MSVC_LANG >= 201703L)
using std::allocator_traits;
#else // __cplusplus >= 201703L
template <typename Alloc>
diff --git a/absl/meta/type_traits.h b/absl/meta/type_traits.h
index d5cb5f3b..e7c12393 100644
--- a/absl/meta/type_traits.h
+++ b/absl/meta/type_traits.h
@@ -499,6 +499,27 @@ struct is_trivially_copy_assignable
#endif // ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE
};
+#if defined(__cpp_lib_remove_cvref) && __cpp_lib_remove_cvref >= 201711L
+template <typename T>
+using remove_cvref = std::remove_cvref<T>;
+
+template <typename T>
+using remove_cvref_t = typename std::remove_cvref<T>::type;
+#else
+// remove_cvref()
+//
+// C++11 compatible implementation of std::remove_cvref which was added in
+// C++20.
+template <typename T>
+struct remove_cvref {
+ using type =
+ typename std::remove_cv<typename std::remove_reference<T>::type>::type;
+};
+
+template <typename T>
+using remove_cvref_t = typename remove_cvref<T>::type;
+#endif
+
namespace type_traits_internal {
// is_trivially_copyable()
//
@@ -613,7 +634,7 @@ using underlying_type_t = typename std::underlying_type<T>::type;
namespace type_traits_internal {
-#if __cplusplus >= 201703L
+#if __cplusplus >= 201703L || (defined(_MSVC_LANG) && _MSVC_LANG >= 201703L)
// std::result_of is deprecated (C++17) or removed (C++20)
template<typename> struct result_of;
template<typename F, typename... Args>
diff --git a/absl/meta/type_traits_test.cc b/absl/meta/type_traits_test.cc
index 1aafd0d4..0ef5b665 100644
--- a/absl/meta/type_traits_test.cc
+++ b/absl/meta/type_traits_test.cc
@@ -942,6 +942,34 @@ TEST(TypeTraitsTest, TestTriviallyCopyable) {
absl::type_traits_internal::is_trivially_copyable<Trivial&>::value);
}
+TEST(TypeTraitsTest, TestRemoveCVRef) {
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int>::type, int>::value));
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int&>::type, int>::value));
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int&&>::type, int>::value));
+ EXPECT_TRUE((
+ std::is_same<typename absl::remove_cvref<const int&>::type, int>::value));
+ EXPECT_TRUE(
+ (std::is_same<typename absl::remove_cvref<int*>::type, int*>::value));
+ // Does not remove const in this case.
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int*>::type,
+ const int*>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<int[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<int(&)[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<int(&&)[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int(&)[2]>::type,
+ int[2]>::value));
+ EXPECT_TRUE((std::is_same<typename absl::remove_cvref<const int(&&)[2]>::type,
+ int[2]>::value));
+}
+
#define ABSL_INTERNAL_EXPECT_ALIAS_EQUIVALENCE(trait_name, ...) \
EXPECT_TRUE((std::is_same<typename std::trait_name<__VA_ARGS__>::type, \
absl::trait_name##_t<__VA_ARGS__>>::value))
diff --git a/absl/numeric/int128.h b/absl/numeric/int128.h
index 0dd814a8..235361a8 100644
--- a/absl/numeric/int128.h
+++ b/absl/numeric/int128.h
@@ -817,27 +817,27 @@ inline uint128 operator-(uint128 val) {
return MakeUint128(hi, lo);
}
-inline bool operator!(uint128 val) {
+constexpr inline bool operator!(uint128 val) {
return !Uint128High64(val) && !Uint128Low64(val);
}
// Logical operators.
-inline uint128 operator~(uint128 val) {
+constexpr inline uint128 operator~(uint128 val) {
return MakeUint128(~Uint128High64(val), ~Uint128Low64(val));
}
-inline uint128 operator|(uint128 lhs, uint128 rhs) {
+constexpr inline uint128 operator|(uint128 lhs, uint128 rhs) {
return MakeUint128(Uint128High64(lhs) | Uint128High64(rhs),
Uint128Low64(lhs) | Uint128Low64(rhs));
}
-inline uint128 operator&(uint128 lhs, uint128 rhs) {
+constexpr inline uint128 operator&(uint128 lhs, uint128 rhs) {
return MakeUint128(Uint128High64(lhs) & Uint128High64(rhs),
Uint128Low64(lhs) & Uint128Low64(rhs));
}
-inline uint128 operator^(uint128 lhs, uint128 rhs) {
+constexpr inline uint128 operator^(uint128 lhs, uint128 rhs) {
return MakeUint128(Uint128High64(lhs) ^ Uint128High64(rhs),
Uint128Low64(lhs) ^ Uint128Low64(rhs));
}
diff --git a/absl/random/discrete_distribution_test.cc b/absl/random/discrete_distribution_test.cc
index 6d007006..415b14cc 100644
--- a/absl/random/discrete_distribution_test.cc
+++ b/absl/random/discrete_distribution_test.cc
@@ -99,6 +99,7 @@ TYPED_TEST(DiscreteDistributionTypeTest, Constructor) {
}
TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
+ using testing::_;
using testing::Pair;
{
@@ -111,8 +112,8 @@ TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
// Each bucket is p=1/3, so bucket 0 will send half it's traffic
// to bucket 2, while the rest will retain all of their traffic.
EXPECT_THAT(q, testing::ElementsAre(Pair(0.5, 2), //
- Pair(1.0, 1), //
- Pair(1.0, 2)));
+ Pair(1.0, _), //
+ Pair(1.0, _)));
}
{
@@ -135,7 +136,7 @@ TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
EXPECT_THAT(q, testing::ElementsAre(Pair(b0, 3), //
Pair(b1, 3), //
- Pair(1.0, 2), //
+ Pair(1.0, _), //
Pair(b3, 2), //
Pair(b1, 3)));
}
diff --git a/absl/random/internal/pool_urbg.cc b/absl/random/internal/pool_urbg.cc
index 5bee5307..725100a4 100644
--- a/absl/random/internal/pool_urbg.cc
+++ b/absl/random/internal/pool_urbg.cc
@@ -194,11 +194,10 @@ RandenPoolEntry* PoolAlignedAlloc() {
// Not all the platforms that we build for have std::aligned_alloc, however
// since we never free these objects, we can over allocate and munge the
// pointers to the correct alignment.
- void* memory = std::malloc(sizeof(RandenPoolEntry) + kAlignment);
- auto x = reinterpret_cast<intptr_t>(memory);
+ intptr_t x = reinterpret_cast<intptr_t>(
+ new char[sizeof(RandenPoolEntry) + kAlignment]);
auto y = x % kAlignment;
- void* aligned =
- (y == 0) ? memory : reinterpret_cast<void*>(x + kAlignment - y);
+ void* aligned = reinterpret_cast<void*>(y == 0 ? x : (x + kAlignment - y));
return new (aligned) RandenPoolEntry();
}
diff --git a/absl/status/internal/status_internal.h b/absl/status/internal/status_internal.h
index 99a2d964..ccafd702 100644
--- a/absl/status/internal/status_internal.h
+++ b/absl/status/internal/status_internal.h
@@ -47,11 +47,11 @@ using Payloads = absl::InlinedVector<Payload, 1>;
// Reference-counted representation of Status data.
struct StatusRep {
- StatusRep(absl::StatusCode code, std::string message,
+ StatusRep(absl::StatusCode code, absl::string_view message,
std::unique_ptr<status_internal::Payloads> payloads)
: ref(int32_t{1}),
code(code),
- message(std::move(message)),
+ message(message),
payloads(std::move(payloads)) {}
std::atomic<int32_t> ref;
diff --git a/absl/status/status.cc b/absl/status/status.cc
index 51a0d268..5a5cd5c2 100644
--- a/absl/status/status.cc
+++ b/absl/status/status.cc
@@ -207,19 +207,10 @@ void Status::UnrefNonInlined(uintptr_t rep) {
}
}
-uintptr_t Status::NewRep(
- absl::StatusCode code, absl::string_view msg,
- std::unique_ptr<status_internal::Payloads> payloads) {
- status_internal::StatusRep* rep = new status_internal::StatusRep(
- code, std::string(msg.data(), msg.size()),
- std::move(payloads));
- return PointerToRep(rep);
-}
-
Status::Status(absl::StatusCode code, absl::string_view msg)
: rep_(CodeToInlinedRep(code)) {
if (code != absl::StatusCode::kOk && !msg.empty()) {
- rep_ = NewRep(code, msg, nullptr);
+ rep_ = PointerToRep(new status_internal::StatusRep(code, msg, nullptr));
}
}
@@ -238,9 +229,9 @@ absl::StatusCode Status::code() const {
void Status::PrepareToModify() {
ABSL_RAW_CHECK(!ok(), "PrepareToModify shouldn't be called on OK status.");
if (IsInlined(rep_)) {
- rep_ =
- NewRep(static_cast<absl::StatusCode>(raw_code()), absl::string_view(),
- nullptr);
+ rep_ = PointerToRep(new status_internal::StatusRep(
+ static_cast<absl::StatusCode>(raw_code()), absl::string_view(),
+ nullptr));
return;
}
@@ -251,8 +242,9 @@ void Status::PrepareToModify() {
if (rep->payloads) {
payloads = absl::make_unique<status_internal::Payloads>(*rep->payloads);
}
- rep_ = NewRep(rep->code, message(),
- std::move(payloads));
+ status_internal::StatusRep* const new_rep = new status_internal::StatusRep(
+ rep->code, message(), std::move(payloads));
+ rep_ = PointerToRep(new_rep);
UnrefNonInlined(rep_i);
}
}
diff --git a/absl/status/status.h b/absl/status/status.h
index 61486fee..2e05f46e 100644
--- a/absl/status/status.h
+++ b/absl/status/status.h
@@ -293,6 +293,8 @@ enum class StatusToStringMode : int {
kWithPayload = 1 << 0,
// ToString will include all the extra data this Status has.
kWithEverything = ~kWithNoExtraData,
+ // Default mode used by ToString. Its exact value might change in the future.
+ kDefault = kWithPayload,
};
// absl::StatusToStringMode is specified as a bitmask type, which means the
@@ -509,7 +511,7 @@ class Status final {
// result, and the payloads to be printed use the status payload printer
// mechanism (which is internal).
std::string ToString(
- StatusToStringMode mode = StatusToStringMode::kWithPayload) const;
+ StatusToStringMode mode = StatusToStringMode::kDefault) const;
// Status::IgnoreError()
//
diff --git a/absl/status/status_test.cc b/absl/status/status_test.cc
index 0e1a43ce..1b038f6d 100644
--- a/absl/status/status_test.cc
+++ b/absl/status/status_test.cc
@@ -36,7 +36,9 @@ TEST(StatusCode, InsertionOperator) {
// its creator, and its classifier.
struct ErrorTest {
absl::StatusCode code;
- using Creator = absl::Status (*)(absl::string_view);
+ using Creator = absl::Status (*)(
+ absl::string_view
+ );
using Classifier = bool (*)(const absl::Status&);
Creator creator;
Classifier classifier;
@@ -78,7 +80,9 @@ TEST(Status, CreateAndClassify) {
// expected error code and message.
std::string message =
absl::StrCat("error code ", test.code, " test message");
- absl::Status status = test.creator(message);
+ absl::Status status = test.creator(
+ message
+ );
EXPECT_EQ(test.code, status.code());
EXPECT_EQ(message, status.message());
diff --git a/absl/strings/BUILD.bazel b/absl/strings/BUILD.bazel
index 123b5efb..1cb5b3e5 100644
--- a/absl/strings/BUILD.bazel
+++ b/absl/strings/BUILD.bazel
@@ -297,6 +297,28 @@ cc_library(
)
cc_library(
+ name = "cordz_update_tracker",
+ hdrs = ["internal/cordz_update_tracker.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = ["//absl/base:config"],
+)
+
+cc_test(
+ name = "cordz_update_tracker_test",
+ srcs = ["internal/cordz_update_tracker_test.cc"],
+ deps = [
+ ":cordz_update_tracker",
+ "//absl/base:config",
+ "//absl/base:core_headers",
+ "//absl/synchronization",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_library(
name = "cord",
srcs = [
"cord.cc",
@@ -307,10 +329,16 @@ cc_library(
copts = ABSL_DEFAULT_COPTS,
deps = [
":cord_internal",
+ ":cordz_functions",
+ ":cordz_info",
+ ":cordz_statistics",
+ ":cordz_update_scope",
+ ":cordz_update_tracker",
":internal",
":str_format",
":strings",
"//absl/base",
+ "//absl/base:config",
"//absl/base:core_headers",
"//absl/base:endian",
"//absl/base:raw_logging_internal",
@@ -323,6 +351,215 @@ cc_library(
)
cc_library(
+ name = "cordz_handle",
+ srcs = ["internal/cordz_handle.cc"],
+ hdrs = ["internal/cordz_handle.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [
+ "//absl/base",
+ "//absl/base:config",
+ "//absl/base:raw_logging_internal",
+ "//absl/synchronization",
+ ],
+)
+
+cc_library(
+ name = "cordz_info",
+ srcs = ["internal/cordz_info.cc"],
+ hdrs = ["internal/cordz_info.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [
+ ":cord_internal",
+ ":cordz_functions",
+ ":cordz_handle",
+ ":cordz_statistics",
+ ":cordz_update_tracker",
+ "//absl/base",
+ "//absl/base:config",
+ "//absl/base:core_headers",
+ "//absl/base:raw_logging_internal",
+ "//absl/container:inlined_vector",
+ "//absl/debugging:stacktrace",
+ "//absl/synchronization",
+ "//absl/types:span",
+ ],
+)
+
+cc_library(
+ name = "cordz_update_scope",
+ hdrs = ["internal/cordz_update_scope.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [
+ ":cord_internal",
+ ":cordz_info",
+ ":cordz_update_tracker",
+ "//absl/base:config",
+ "//absl/base:core_headers",
+ ],
+)
+
+cc_test(
+ name = "cordz_update_scope_test",
+ srcs = ["internal/cordz_update_scope_test.cc"],
+ copts = ABSL_DEFAULT_COPTS,
+ deps = [
+ ":cord_internal",
+ ":cordz_info",
+ ":cordz_test_helpers",
+ ":cordz_update_scope",
+ ":cordz_update_tracker",
+ "//absl/base:config",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_library(
+ name = "cordz_sample_token",
+ srcs = ["internal/cordz_sample_token.cc"],
+ hdrs = ["internal/cordz_sample_token.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [
+ ":cordz_handle",
+ ":cordz_info",
+ "//absl/base:config",
+ ],
+)
+
+cc_library(
+ name = "cordz_functions",
+ srcs = ["internal/cordz_functions.cc"],
+ hdrs = ["internal/cordz_functions.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [
+ "//absl/base:config",
+ "//absl/base:core_headers",
+ "//absl/base:exponential_biased",
+ "//absl/base:raw_logging_internal",
+ ],
+)
+
+cc_library(
+ name = "cordz_statistics",
+ hdrs = ["internal/cordz_statistics.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ visibility = [
+ "//absl:__subpackages__",
+ ],
+ deps = [
+ ":cordz_update_tracker",
+ "//absl/base:config",
+ ],
+)
+
+cc_test(
+ name = "cordz_functions_test",
+ srcs = [
+ "internal/cordz_functions_test.cc",
+ ],
+ deps = [
+ ":cordz_functions",
+ ":cordz_test_helpers",
+ "//absl/base:config",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_test(
+ name = "cordz_handle_test",
+ srcs = [
+ "internal/cordz_handle_test.cc",
+ ],
+ deps = [
+ ":cordz_handle",
+ "//absl/base:config",
+ "//absl/memory",
+ "//absl/random",
+ "//absl/random:distributions",
+ "//absl/synchronization",
+ "//absl/synchronization:thread_pool",
+ "//absl/time",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_test(
+ name = "cordz_info_test",
+ srcs = [
+ "internal/cordz_info_test.cc",
+ ],
+ deps = [
+ ":cord_internal",
+ ":cordz_handle",
+ ":cordz_info",
+ ":cordz_statistics",
+ ":cordz_test_helpers",
+ ":cordz_update_tracker",
+ ":strings",
+ "//absl/base:config",
+ "//absl/debugging:stacktrace",
+ "//absl/debugging:symbolize",
+ "//absl/types:span",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_test(
+ name = "cordz_info_statistics_test",
+ srcs = [
+ "internal/cordz_info_statistics_test.cc",
+ ],
+ deps = [
+ ":cord",
+ ":cord_internal",
+ ":cordz_info",
+ ":cordz_sample_token",
+ ":cordz_statistics",
+ ":cordz_update_scope",
+ ":cordz_update_tracker",
+ "//absl/base:config",
+ "//absl/synchronization",
+ "//absl/synchronization:thread_pool",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_test(
+ name = "cordz_sample_token_test",
+ srcs = [
+ "internal/cordz_sample_token_test.cc",
+ ],
+ deps = [
+ ":cord_internal",
+ ":cordz_handle",
+ ":cordz_info",
+ ":cordz_sample_token",
+ ":cordz_test_helpers",
+ "//absl/base:config",
+ "//absl/memory",
+ "//absl/random",
+ "//absl/synchronization",
+ "//absl/synchronization:thread_pool",
+ "//absl/time",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_library(
name = "cord_test_helpers",
testonly = 1,
hdrs = [
@@ -331,6 +568,28 @@ cc_library(
copts = ABSL_DEFAULT_COPTS,
deps = [
":cord",
+ ":cord_internal",
+ ":strings",
+ "//absl/base:config",
+ ],
+)
+
+cc_library(
+ name = "cordz_test_helpers",
+ testonly = 1,
+ hdrs = ["cordz_test_helpers.h"],
+ copts = ABSL_DEFAULT_COPTS,
+ deps = [
+ ":cord",
+ ":cord_internal",
+ ":cordz_info",
+ ":cordz_sample_token",
+ ":cordz_statistics",
+ ":cordz_update_tracker",
+ ":strings",
+ "//absl/base:config",
+ "//absl/base:core_headers",
+ "@com_google_googletest//:gtest",
],
)
@@ -343,6 +602,8 @@ cc_test(
deps = [
":cord",
":cord_test_helpers",
+ ":cordz_functions",
+ ":cordz_test_helpers",
":str_format",
":strings",
"//absl/base",
@@ -356,6 +617,39 @@ cc_test(
)
cc_test(
+ name = "cordz_test",
+ size = "medium",
+ srcs = ["cordz_test.cc"],
+ copts = ABSL_TEST_COPTS,
+ tags = [
+ "benchmark",
+ "no_test_android_arm",
+ "no_test_android_arm64",
+ "no_test_android_x86",
+ "no_test_darwin_x86_64",
+ "no_test_ios_x86_64",
+ "no_test_loonix",
+ "no_test_msvc_x64",
+ ],
+ visibility = ["//visibility:private"],
+ deps = [
+ ":cord",
+ ":cord_test_helpers",
+ ":cordz_functions",
+ ":cordz_info",
+ ":cordz_sample_token",
+ ":cordz_statistics",
+ ":cordz_test_helpers",
+ ":cordz_update_tracker",
+ ":strings",
+ "//absl/base:config",
+ "//absl/base:core_headers",
+ "//absl/base:raw_logging_internal",
+ "@com_google_googletest//:gtest_main",
+ ],
+)
+
+cc_test(
name = "cord_ring_test",
size = "medium",
srcs = ["cord_ring_test.cc"],
@@ -434,6 +728,7 @@ cc_test(
":strings",
"//absl/base:core_headers",
"//absl/base:dynamic_annotations",
+ "//absl/container:btree",
"//absl/container:flat_hash_map",
"//absl/container:node_hash_map",
"@com_google_googletest//:gtest_main",
diff --git a/absl/strings/CMakeLists.txt b/absl/strings/CMakeLists.txt
index 3b7ae639..d3f1523c 100644
--- a/absl/strings/CMakeLists.txt
+++ b/absl/strings/CMakeLists.txt
@@ -221,9 +221,9 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::strings
- absl::base
absl::core_headers
absl::dynamic_annotations
+ absl::btree
absl::flat_hash_map
absl::node_hash_map
gmock_main
@@ -552,34 +552,295 @@ absl_cc_test(
absl_cc_library(
NAME
- cord
+ cord_internal
HDRS
- "cord.h"
- SRCS
- "cord.cc"
- "internal/cord_internal.cc"
"internal/cord_internal.h"
+ "internal/cord_rep_flat.h"
"internal/cord_rep_ring.h"
- "internal/cord_rep_ring.cc"
"internal/cord_rep_ring_reader.h"
- "internal/cord_rep_flat.h"
+ SRCS
+ "internal/cord_internal.cc"
+ "internal/cord_rep_ring.cc"
COPTS
${ABSL_DEFAULT_COPTS}
DEPS
- absl::base
absl::base_internal
absl::compressed_tuple
absl::config
absl::core_headers
absl::endian
+ absl::inlined_vector
+ absl::layout
+ absl::raw_logging_internal
+ absl::strings
+ absl::throw_delegate
+ absl::type_traits
+)
+
+absl_cc_library(
+ NAME
+ cordz_update_tracker
+ HDRS
+ "internal/cordz_update_tracker.h"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+)
+
+absl_cc_test(
+ NAME
+ cordz_update_tracker_test
+ SRCS
+ "internal/cordz_update_tracker_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::synchronization
+ gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_functions
+ HDRS
+ "internal/cordz_functions.h"
+ SRCS
+ "internal/cordz_functions.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::core_headers
+ absl::exponential_biased
+ absl::raw_logging_internal
+)
+
+absl_cc_test(
+ NAME
+ cordz_functions_test
+ SRCS
+ "internal/cordz_functions_test.cc"
+ DEPS
+ absl::config
+ absl::cordz_functions
+ absl::cordz_test_helpers
+ gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_statistics
+ HDRS
+ "internal/cordz_statistics.h"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::core_headers
+ absl::cordz_update_tracker
+ absl::synchronization
+)
+
+absl_cc_library(
+ NAME
+ cordz_handle
+ HDRS
+ "internal/cordz_handle.h"
+ SRCS
+ "internal/cordz_handle.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::base
+ absl::config
+ absl::raw_logging_internal
+ absl::synchronization
+)
+
+absl_cc_test(
+ NAME
+ cordz_handle_test
+ SRCS
+ "internal/cordz_handle_test.cc"
+ DEPS
+ absl::config
+ absl::cordz_handle
+ absl::cordz_test_helpers
+ absl::memory
+ absl::random_random
+ absl::random_distributions
+ absl::synchronization
+ absl::time
+ gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_info
+ HDRS
+ "internal/cordz_info.h"
+ SRCS
+ "internal/cordz_info.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::base
+ absl::config
+ absl::cord_internal
+ absl::cordz_functions
+ absl::cordz_handle
+ absl::cordz_statistics
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::inlined_vector
+ absl::span
+ absl::raw_logging_internal
+ absl::stacktrace
+ absl::synchronization
+)
+
+absl_cc_test(
+ NAME
+ cordz_info_test
+ SRCS
+ "internal/cordz_info_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_test_helpers
+ absl::cordz_handle
+ absl::cordz_info
+ absl::cordz_statistics
+ absl::cordz_test_helpers
+ absl::cordz_update_tracker
+ absl::span
+ absl::stacktrace
+ absl::symbolize
+ gmock_main
+)
+
+absl_cc_test(
+ NAME
+ cordz_info_statistics_test
+ SRCS
+ "internal/cordz_info_statistics_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_statistics
+ absl::cordz_update_scope
+ absl::cordz_update_tracker
+ absl::thread_pool
+ gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_sample_token
+ HDRS
+ "internal/cordz_sample_token.h"
+ SRCS
+ "internal/cordz_sample_token.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::cordz_handle
+ absl::cordz_info
+)
+
+absl_cc_test(
+ NAME
+ cordz_sample_token_test
+ SRCS
+ "internal/cordz_sample_token_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_handle
+ absl::cordz_info
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_test_helpers
+ absl::memory
+ absl::random_random
+ absl::synchronization
+ absl::thread_pool
+ absl::time
+ gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cordz_update_scope
+ HDRS
+ "internal/cordz_update_scope.h"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_update_tracker
+ absl::core_headers
+)
+
+absl_cc_test(
+ NAME
+ cordz_update_scope_test
+ SRCS
+ "internal/cordz_update_scope_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_test_helpers
+ absl::cordz_update_scope
+ absl::cordz_update_tracker
+ absl::core_headers
+ gmock_main
+)
+
+absl_cc_library(
+ NAME
+ cord
+ HDRS
+ "cord.h"
+ SRCS
+ "cord.cc"
+ COPTS
+ ${ABSL_DEFAULT_COPTS}
+ DEPS
+ absl::base
+ absl::config
+ absl::cord_internal
+ absl::cordz_functions
+ absl::cordz_info
+ absl::cordz_update_scope
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::endian
absl::fixed_array
absl::function_ref
absl::inlined_vector
absl::optional
absl::raw_logging_internal
absl::strings
- absl::strings_internal
- absl::throw_delegate
absl::type_traits
PUBLIC
)
@@ -592,7 +853,30 @@ absl_cc_library(
COPTS
${ABSL_TEST_COPTS}
DEPS
+ absl::config
+ absl::cord
+ absl::cord_internal
+ absl::strings
+ TESTONLY
+)
+
+absl_cc_library(
+ NAME
+ cordz_test_helpers
+ HDRS
+ "cordz_test_helpers.h"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::config
absl::cord
+ absl::cord_internal
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_statistics
+ absl::cordz_update_tracker
+ absl::core_headers
+ absl::strings
TESTONLY
)
@@ -609,6 +893,8 @@ absl_cc_test(
absl::strings
absl::base
absl::config
+ absl::cord_test_helpers
+ absl::cordz_test_helpers
absl::core_headers
absl::endian
absl::raw_logging_internal
@@ -624,12 +910,12 @@ absl_cc_test(
COPTS
${ABSL_TEST_COPTS}
DEPS
- absl::config
- absl::cord
- absl::strings
absl::base
+ absl::config
+ absl::cord_internal
absl::core_headers
absl::raw_logging_internal
+ absl::strings
gmock_main
)
@@ -641,9 +927,33 @@ absl_cc_test(
COPTS
${ABSL_TEST_COPTS}
DEPS
- absl::cord
+ absl::base
+ absl::cord_internal
+ absl::core_headers
absl::strings
+ gmock_main
+)
+
+absl_cc_test(
+ NAME
+ cordz_test
+ SRCS
+ "cordz_test.cc"
+ COPTS
+ ${ABSL_TEST_COPTS}
+ DEPS
+ absl::cord
+ absl::cord_test_helpers
+ absl::cordz_test_helpers
+ absl::cordz_functions
+ absl::cordz_info
+ absl::cordz_sample_token
+ absl::cordz_statistics
+ absl::cordz_update_tracker
absl::base
+ absl::config
absl::core_headers
+ absl::raw_logging_internal
+ absl::strings
gmock_main
)
diff --git a/absl/strings/charconv.cc b/absl/strings/charconv.cc
index b8674c28..fefcfc90 100644
--- a/absl/strings/charconv.cc
+++ b/absl/strings/charconv.cc
@@ -111,7 +111,7 @@ struct FloatTraits<double> {
return sign ? -ldexp(mantissa, exponent) : ldexp(mantissa, exponent);
#else
constexpr uint64_t kMantissaMask =
- (uint64_t(1) << (kTargetMantissaBits - 1)) - 1;
+ (uint64_t{1} << (kTargetMantissaBits - 1)) - 1;
uint64_t dbl = static_cast<uint64_t>(sign) << 63;
if (mantissa > kMantissaMask) {
// Normal value.
@@ -151,7 +151,7 @@ struct FloatTraits<float> {
return sign ? -ldexpf(mantissa, exponent) : ldexpf(mantissa, exponent);
#else
constexpr uint32_t kMantissaMask =
- (uint32_t(1) << (kTargetMantissaBits - 1)) - 1;
+ (uint32_t{1} << (kTargetMantissaBits - 1)) - 1;
uint32_t flt = static_cast<uint32_t>(sign) << 31;
if (mantissa > kMantissaMask) {
// Normal value.
@@ -499,7 +499,7 @@ bool MustRoundUp(uint64_t guess_mantissa, int guess_exponent,
template <typename FloatType>
CalculatedFloat CalculatedFloatFromRawValues(uint64_t mantissa, int exponent) {
CalculatedFloat result;
- if (mantissa == uint64_t(1) << FloatTraits<FloatType>::kTargetMantissaBits) {
+ if (mantissa == uint64_t{1} << FloatTraits<FloatType>::kTargetMantissaBits) {
mantissa >>= 1;
exponent += 1;
}
diff --git a/absl/strings/cord.cc b/absl/strings/cord.cc
index 93533757..5dad781e 100644
--- a/absl/strings/cord.cc
+++ b/absl/strings/cord.cc
@@ -38,6 +38,9 @@
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cord_rep_flat.h"
#include "absl/strings/internal/cord_rep_ring.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_scope.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
#include "absl/strings/internal/resize_uninitialized.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
@@ -53,8 +56,10 @@ using ::absl::cord_internal::CordRepExternal;
using ::absl::cord_internal::CordRepFlat;
using ::absl::cord_internal::CordRepRing;
using ::absl::cord_internal::CordRepSubstring;
-using ::absl::cord_internal::kMinFlatLength;
+using ::absl::cord_internal::CordzUpdateTracker;
+using ::absl::cord_internal::InlineData;
using ::absl::cord_internal::kMaxFlatLength;
+using ::absl::cord_internal::kMinFlatLength;
using ::absl::cord_internal::CONCAT;
using ::absl::cord_internal::EXTERNAL;
@@ -206,7 +211,7 @@ static CordRep* MakeBalancedTree(CordRep** reps, size_t n) {
}
static CordRepFlat* CreateFlat(const char* data, size_t length,
- size_t alloc_hint) {
+ size_t alloc_hint) {
CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
flat->length = length;
memcpy(flat->Data(), data, length);
@@ -230,9 +235,7 @@ static CordRep* RingNewTree(const char* data, size_t length,
// Create a new tree out of the specified array.
// The returned node has a refcount of 1.
-static CordRep* NewTree(const char* data,
- size_t length,
- size_t alloc_hint) {
+static CordRep* NewTree(const char* data, size_t length, size_t alloc_hint) {
if (length == 0) return nullptr;
if (cord_ring_enabled()) {
return RingNewTree(data, length, alloc_hint);
@@ -279,6 +282,35 @@ static CordRep* NewSubstring(CordRep* child, size_t offset, size_t length) {
}
}
+// Creates a CordRep from the provided string. If the string is large enough,
+// and not wasteful, we move the string into an external cord rep, preserving
+// the already allocated string contents.
+// Requires the provided string length to be larger than `kMaxInline`.
+static CordRep* CordRepFromString(std::string&& src) {
+ assert(src.length() > cord_internal::kMaxInline);
+ if (
+ // String is short: copy data to avoid external block overhead.
+ src.size() <= kMaxBytesToCopy ||
+ // String is wasteful: copy data to avoid pinning too much unused memory.
+ src.size() < src.capacity() / 2
+ ) {
+ return NewTree(src.data(), src.size(), 0);
+ }
+
+ struct StringReleaser {
+ void operator()(absl::string_view /* data */) {}
+ std::string data;
+ };
+ const absl::string_view original_data = src;
+ auto* rep =
+ static_cast<::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
+ absl::cord_internal::NewExternalRep(original_data,
+ StringReleaser{std::move(src)}));
+ // Moving src may have invalidated its data pointer, so adjust it.
+ rep->base = rep->template get<0>().data.data();
+ return rep;
+}
+
// --------------------------------------------------------------------
// Cord::InlineRep functions
@@ -299,20 +331,6 @@ inline char* Cord::InlineRep::set_data(size_t n) {
return data_.as_chars();
}
-inline CordRep* Cord::InlineRep::force_tree(size_t extra_hint) {
- if (data_.is_tree()) {
- return data_.as_tree();
- }
-
- size_t len = inline_size();
- CordRepFlat* result = CordRepFlat::New(len + extra_hint);
- result->length = len;
- static_assert(kMinFlatLength >= sizeof(data_), "");
- memcpy(result->Data(), data_.as_chars(), sizeof(data_));
- set_tree(result);
- return result;
-}
-
inline void Cord::InlineRep::reduce_size(size_t n) {
size_t tag = inline_size();
assert(tag <= kMaxInline);
@@ -334,25 +352,72 @@ static CordRepRing* ForceRing(CordRep* rep, size_t extra) {
return (rep->tag == RING) ? rep->ring() : CordRepRing::Create(rep, extra);
}
-void Cord::InlineRep::AppendTree(CordRep* tree) {
+void Cord::InlineRep::AppendTreeToInlined(CordRep* tree,
+ MethodIdentifier method) {
+ assert(!is_tree());
+ if (!data_.is_empty()) {
+ CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Append(CordRepRing::Create(flat, 1), tree);
+ } else {
+ tree = Concat(flat, tree);
+ }
+ }
+ EmplaceTree(tree, method);
+}
+
+void Cord::InlineRep::AppendTreeToTree(CordRep* tree, MethodIdentifier method) {
+ assert(is_tree());
+ const CordzUpdateScope scope(data_.cordz_info(), method);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Append(ForceRing(data_.as_tree(), 1), tree);
+ } else {
+ tree = Concat(data_.as_tree(), tree);
+ }
+ SetTree(tree, scope);
+}
+
+void Cord::InlineRep::AppendTree(CordRep* tree, MethodIdentifier method) {
if (tree == nullptr) return;
- if (data_.is_empty()) {
- set_tree(tree);
- } else if (cord_ring_enabled()) {
- set_tree(CordRepRing::Append(ForceRing(force_tree(0), 1), tree));
+ if (data_.is_tree()) {
+ AppendTreeToTree(tree, method);
+ } else {
+ AppendTreeToInlined(tree, method);
+ }
+}
+
+void Cord::InlineRep::PrependTreeToInlined(CordRep* tree,
+ MethodIdentifier method) {
+ assert(!is_tree());
+ if (!data_.is_empty()) {
+ CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Prepend(CordRepRing::Create(flat, 1), tree);
+ } else {
+ tree = Concat(tree, flat);
+ }
+ }
+ EmplaceTree(tree, method);
+}
+
+void Cord::InlineRep::PrependTreeToTree(CordRep* tree,
+ MethodIdentifier method) {
+ assert(is_tree());
+ const CordzUpdateScope scope(data_.cordz_info(), method);
+ if (cord_ring_enabled()) {
+ tree = CordRepRing::Prepend(ForceRing(data_.as_tree(), 1), tree);
} else {
- set_tree(Concat(force_tree(0), tree));
+ tree = Concat(tree, data_.as_tree());
}
+ SetTree(tree, scope);
}
-void Cord::InlineRep::PrependTree(CordRep* tree) {
+void Cord::InlineRep::PrependTree(CordRep* tree, MethodIdentifier method) {
assert(tree != nullptr);
- if (data_.is_empty()) {
- set_tree(tree);
- } else if (cord_ring_enabled()) {
- set_tree(CordRepRing::Prepend(ForceRing(force_tree(0), 1), tree));
+ if (data_.is_tree()) {
+ PrependTreeToTree(tree, method);
} else {
- set_tree(Concat(tree, force_tree(0)));
+ PrependTreeToInlined(tree, method);
}
}
@@ -404,76 +469,43 @@ static inline bool PrepareAppendRegion(CordRep* root, char** region,
return true;
}
+template <bool has_length>
void Cord::InlineRep::GetAppendRegion(char** region, size_t* size,
- size_t max_length) {
- if (max_length == 0) {
- *region = nullptr;
- *size = 0;
- return;
- }
-
- // Try to fit in the inline buffer if possible.
- if (!is_tree()) {
- size_t inline_length = inline_size();
- if (max_length <= kMaxInline - inline_length) {
- *region = data_.as_chars() + inline_length;
- *size = max_length;
- set_inline_size(inline_length + max_length);
- return;
- }
- }
-
- CordRep* root = force_tree(max_length);
-
- if (PrepareAppendRegion(root, region, size, max_length)) {
- return;
- }
-
- // Allocate new node.
- CordRepFlat* new_node =
- CordRepFlat::New(std::max(static_cast<size_t>(root->length), max_length));
- new_node->length = std::min(new_node->Capacity(), max_length);
- *region = new_node->Data();
- *size = new_node->length;
-
- if (cord_ring_enabled()) {
- replace_tree(CordRepRing::Append(ForceRing(root, 1), new_node));
- return;
- }
- replace_tree(Concat(root, new_node));
-}
-
-void Cord::InlineRep::GetAppendRegion(char** region, size_t* size) {
- const size_t max_length = std::numeric_limits<size_t>::max();
-
- // Try to fit in the inline buffer if possible.
- if (!data_.is_tree()) {
- size_t inline_length = inline_size();
- if (inline_length < kMaxInline) {
- *region = data_.as_chars() + inline_length;
- *size = kMaxInline - inline_length;
- set_inline_size(kMaxInline);
+ size_t length) {
+ auto constexpr method = CordzUpdateTracker::kGetAppendRegion;
+
+ CordRep* root = tree();
+ size_t sz = root ? root->length : inline_size();
+ if (root == nullptr) {
+ size_t available = kMaxInline - sz;
+ if (available >= (has_length ? length : 1)) {
+ *region = data_.as_chars() + sz;
+ *size = has_length ? length : available;
+ set_inline_size(has_length ? sz + length : kMaxInline);
return;
}
}
- CordRep* root = force_tree(max_length);
-
- if (PrepareAppendRegion(root, region, size, max_length)) {
+ size_t extra = has_length ? length : (std::max)(sz, kMinFlatLength);
+ CordRep* rep = root ? root : MakeFlatWithExtraCapacity(extra);
+ CordzUpdateScope scope(root ? data_.cordz_info() : nullptr, method);
+ if (PrepareAppendRegion(rep, region, size, length)) {
+ CommitTree(root, rep, scope, method);
return;
}
// Allocate new node.
- CordRepFlat* new_node = CordRepFlat::New(root->length);
- new_node->length = new_node->Capacity();
+ CordRepFlat* new_node = CordRepFlat::New(extra);
+ new_node->length = std::min(new_node->Capacity(), length);
*region = new_node->Data();
*size = new_node->length;
if (cord_ring_enabled()) {
- replace_tree(CordRepRing::Append(ForceRing(root, 1), new_node));
- return;
+ rep = CordRepRing::Append(ForceRing(rep, 1), new_node);
+ } else {
+ rep = Concat(rep, new_node);
}
- replace_tree(Concat(root, new_node));
+ CommitTree(root, rep, scope, method);
}
// If the rep is a leaf, this will increment the value at total_mem_usage and
@@ -484,68 +516,81 @@ static bool RepMemoryUsageLeaf(const CordRep* rep, size_t* total_mem_usage) {
return true;
}
if (rep->tag == EXTERNAL) {
- *total_mem_usage += sizeof(CordRepConcat) + rep->length;
+ // We don't know anything about the embedded / bound data, but we can safely
+ // assume it is 'at least' a word / pointer to data. In the future we may
+ // choose to use the 'data' byte as a tag to identify the types of some
+ // well-known externals, such as a std::string instance.
+ *total_mem_usage +=
+ sizeof(cord_internal::CordRepExternalImpl<intptr_t>) + rep->length;
return true;
}
return false;
}
void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
- ClearSlow();
+ assert(&src != this);
+ assert(is_tree() || src.is_tree());
+ auto constexpr method = CordzUpdateTracker::kAssignCord;
+ if (ABSL_PREDICT_TRUE(!is_tree())) {
+ EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
+ return;
+ }
- data_ = src.data_;
- if (is_tree()) {
- data_.set_profiled(false);
- CordRep::Ref(tree());
+ // See b/187581164: unsample cord if already sampled
+ // TODO(b/117940323): continuously 'assigned to' cords would reach 100%
+ // sampling probability. Imagine a cord x in some cache:
+ // cache.SetCord(const Cord& foo) {
+ // x = foo;
+ // }
+ // CordzInfo::MaybeTrackCord does:
+ // x.profiled = foo.profiled | x.profiled | random(cordz_mean_interval)
+ // Which means it will on the long run converge to 'always samples'
+ // The real fix is in CordzMaybeTrackCord, but the below is a low risk
+ // forward fix for b/187581164 and similar BT benchmark regressions.
+ if (ABSL_PREDICT_FALSE(is_profiled())) {
+ cordz_info()->Untrack();
clear_cordz_info();
}
+
+ CordRep* tree = as_tree();
+ if (CordRep* src_tree = src.tree()) {
+ data_.set_tree(CordRep::Ref(src_tree));
+ CordzInfo::MaybeTrackCord(data_, src.data_, method);
+ } else {
+ CordzInfo::MaybeUntrackCord(data_.cordz_info());
+ data_ = src.data_;
+ }
+ CordRep::Unref(tree);
}
-void Cord::InlineRep::ClearSlow() {
+void Cord::InlineRep::UnrefTree() {
if (is_tree()) {
+ CordzInfo::MaybeUntrackCord(data_.cordz_info());
CordRep::Unref(tree());
}
- ResetToEmpty();
}
// --------------------------------------------------------------------
// Constructors and destructors
-Cord::Cord(absl::string_view src) {
+Cord::Cord(absl::string_view src, MethodIdentifier method)
+ : contents_(InlineData::kDefaultInit) {
const size_t n = src.size();
if (n <= InlineRep::kMaxInline) {
- contents_.set_data(src.data(), n, false);
+ contents_.set_data(src.data(), n, true);
} else {
- contents_.set_tree(NewTree(src.data(), n, 0));
+ CordRep* rep = NewTree(src.data(), n, 0);
+ contents_.EmplaceTree(rep, method);
}
}
template <typename T, Cord::EnableIfString<T>>
-Cord::Cord(T&& src) {
- if (
- // String is short: copy data to avoid external block overhead.
- src.size() <= kMaxBytesToCopy ||
- // String is wasteful: copy data to avoid pinning too much unused memory.
- src.size() < src.capacity() / 2
- ) {
- if (src.size() <= InlineRep::kMaxInline) {
- contents_.set_data(src.data(), src.size(), false);
- } else {
- contents_.set_tree(NewTree(src.data(), src.size(), 0));
- }
+Cord::Cord(T&& src) : contents_(InlineData::kDefaultInit) {
+ if (src.size() <= InlineRep::kMaxInline) {
+ contents_.set_data(src.data(), src.size(), true);
} else {
- struct StringReleaser {
- void operator()(absl::string_view /* data */) {}
- std::string data;
- };
- const absl::string_view original_data = src;
- auto* rep = static_cast<
- ::absl::cord_internal::CordRepExternalImpl<StringReleaser>*>(
- absl::cord_internal::NewExternalRep(
- original_data, StringReleaser{std::forward<T>(src)}));
- // Moving src may have invalidated its data pointer, so adjust it.
- rep->base = rep->template get<0>().data.data();
- contents_.set_tree(rep);
+ CordRep* rep = CordRepFromString(std::forward<T>(src));
+ contents_.EmplaceTree(rep, CordzUpdateTracker::kConstructorString);
}
}
@@ -554,9 +599,9 @@ template Cord::Cord(std::string&& src);
// The destruction code is separate so that the compiler can determine
// that it does not need to call the destructor on a moved-from Cord.
void Cord::DestroyCordSlow() {
- if (CordRep* tree = contents_.tree()) {
- CordRep::Unref(VerifyTree(tree));
- }
+ assert(contents_.is_tree());
+ CordzInfo::MaybeUntrackCord(contents_.cordz_info());
+ CordRep::Unref(VerifyTree(contents_.as_tree()));
}
// --------------------------------------------------------------------
@@ -568,109 +613,117 @@ void Cord::Clear() {
}
}
-Cord& Cord::operator=(absl::string_view src) {
+Cord& Cord::AssignLargeString(std::string&& src) {
+ auto constexpr method = CordzUpdateTracker::kAssignString;
+ assert(src.size() > kMaxBytesToCopy);
+ CordRep* rep = CordRepFromString(std::move(src));
+ if (CordRep* tree = contents_.tree()) {
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ contents_.SetTree(rep, scope);
+ CordRep::Unref(tree);
+ } else {
+ contents_.EmplaceTree(rep, method);
+ }
+ return *this;
+}
+Cord& Cord::operator=(absl::string_view src) {
+ auto constexpr method = CordzUpdateTracker::kAssignString;
const char* data = src.data();
size_t length = src.size();
CordRep* tree = contents_.tree();
if (length <= InlineRep::kMaxInline) {
- // Embed into this->contents_
+ // Embed into this->contents_, which is somewhat subtle:
+ // - MaybeUntrackCord must be called before Unref(tree).
+ // - MaybeUntrackCord must be called before set_data() clobbers cordz_info.
+ // - set_data() must be called before Unref(tree) as it may reference tree.
+ if (tree != nullptr) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
contents_.set_data(data, length, true);
- if (tree) CordRep::Unref(tree);
- return *this;
- }
- if (tree != nullptr && tree->tag >= FLAT &&
- tree->flat()->Capacity() >= length &&
- tree->refcount.IsOne()) {
- // Copy in place if the existing FLAT node is reusable.
- memmove(tree->flat()->Data(), data, length);
- tree->length = length;
- VerifyTree(tree);
+ if (tree != nullptr) CordRep::Unref(tree);
return *this;
}
- contents_.set_tree(NewTree(data, length, 0));
- if (tree) CordRep::Unref(tree);
- return *this;
-}
-
-template <typename T, Cord::EnableIfString<T>>
-Cord& Cord::operator=(T&& src) {
- if (src.size() <= kMaxBytesToCopy) {
- *this = absl::string_view(src);
+ if (tree != nullptr) {
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ if (tree->tag >= FLAT && tree->flat()->Capacity() >= length &&
+ tree->refcount.IsOne()) {
+ // Copy in place if the existing FLAT node is reusable.
+ memmove(tree->flat()->Data(), data, length);
+ tree->length = length;
+ VerifyTree(tree);
+ return *this;
+ }
+ contents_.SetTree(NewTree(data, length, 0), scope);
+ CordRep::Unref(tree);
} else {
- *this = Cord(std::forward<T>(src));
+ contents_.EmplaceTree(NewTree(data, length, 0), method);
}
return *this;
}
-template Cord& Cord::operator=(std::string&& src);
-
// TODO(sanjay): Move to Cord::InlineRep section of file. For now,
// we keep it here to make diffs easier.
-void Cord::InlineRep::AppendArray(const char* src_data, size_t src_size) {
- if (src_size == 0) return; // memcpy(_, nullptr, 0) is undefined.
+void Cord::InlineRep::AppendArray(absl::string_view src,
+ MethodIdentifier method) {
+ if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
size_t appended = 0;
- CordRep* root = nullptr;
- if (is_tree()) {
- root = data_.as_tree();
+ CordRep* rep = tree();
+ const CordRep* const root = rep;
+ CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
+ if (root != nullptr) {
char* region;
- if (PrepareAppendRegion(root, &region, &appended, src_size)) {
- memcpy(region, src_data, appended);
+ if (PrepareAppendRegion(rep, &region, &appended, src.size())) {
+ memcpy(region, src.data(), appended);
}
} else {
// Try to fit in the inline buffer if possible.
size_t inline_length = inline_size();
- if (src_size <= kMaxInline - inline_length) {
+ if (src.size() <= kMaxInline - inline_length) {
// Append new data to embedded array
- memcpy(data_.as_chars() + inline_length, src_data, src_size);
- set_inline_size(inline_length + src_size);
+ memcpy(data_.as_chars() + inline_length, src.data(), src.size());
+ set_inline_size(inline_length + src.size());
return;
}
- // It is possible that src_data == data_, but when we transition from an
- // InlineRep to a tree we need to assign data_ = root via set_tree. To
- // avoid corrupting the source data before we copy it, delay calling
- // set_tree until after we've copied data.
+ // Note: we don't concern ourselves if src aliases data stored in the
+ // inlined data of 'this', as we update the InlineData only at the end.
// We are going from an inline size to beyond inline size. Make the new size
// either double the inlined size, or the added size + 10%.
- const size_t size1 = inline_length * 2 + src_size;
- const size_t size2 = inline_length + src_size / 10;
- root = CordRepFlat::New(std::max<size_t>(size1, size2));
- appended = std::min(
- src_size, root->flat()->Capacity() - inline_length);
- memcpy(root->flat()->Data(), data_.as_chars(), inline_length);
- memcpy(root->flat()->Data() + inline_length, src_data, appended);
- root->length = inline_length + appended;
- set_tree(root);
- }
-
- src_data += appended;
- src_size -= appended;
- if (src_size == 0) {
+ const size_t size1 = inline_length * 2 + src.size();
+ const size_t size2 = inline_length + src.size() / 10;
+ rep = CordRepFlat::New(std::max<size_t>(size1, size2));
+ appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
+ memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
+ memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
+ rep->length = inline_length + appended;
+ }
+
+ src.remove_prefix(appended);
+ if (src.empty()) {
+ CommitTree(root, rep, scope, method);
return;
}
if (cord_ring_enabled()) {
- absl::string_view data(src_data, src_size);
- root = ForceRing(root, (data.size() - 1) / kMaxFlatLength + 1);
- replace_tree(CordRepRing::Append(root->ring(), data));
- return;
- }
-
- // Use new block(s) for any remaining bytes that were not handled above.
- // Alloc extra memory only if the right child of the root of the new tree is
- // going to be a FLAT node, which will permit further inplace appends.
- size_t length = src_size;
- if (src_size < kMaxFlatLength) {
- // The new length is either
- // - old size + 10%
- // - old_size + src_size
- // This will cause a reasonable conservative step-up in size that is still
- // large enough to avoid excessive amounts of small fragments being added.
- length = std::max<size_t>(root->length / 10, src_size);
+ rep = ForceRing(rep, (src.size() - 1) / kMaxFlatLength + 1);
+ rep = CordRepRing::Append(rep->ring(), src);
+ } else {
+ // Use new block(s) for any remaining bytes that were not handled above.
+ // Alloc extra memory only if the right child of the root of the new tree
+ // is going to be a FLAT node, which will permit further inplace appends.
+ size_t length = src.size();
+ if (src.size() < kMaxFlatLength) {
+ // The new length is either
+ // - old size + 10%
+ // - old_size + src.size()
+ // This will cause a reasonable conservative step-up in size that is
+ // still large enough to avoid excessive amounts of small fragments
+ // being added.
+ length = std::max<size_t>(rep->length / 10, src.size());
+ }
+ rep = Concat(rep, NewTree(src.data(), src.size(), length - src.size()));
}
- set_tree(Concat(root, NewTree(src_data, src_size, length - src_size)));
+ CommitTree(root, rep, scope, method);
}
inline CordRep* Cord::TakeRep() const& {
@@ -685,10 +738,17 @@ inline CordRep* Cord::TakeRep() && {
template <typename C>
inline void Cord::AppendImpl(C&& src) {
+ auto constexpr method = CordzUpdateTracker::kAppendCord;
if (empty()) {
- // In case of an empty destination avoid allocating a new node, do not copy
- // data.
- *this = std::forward<C>(src);
+ // Since destination is empty, we can avoid allocating a node,
+ if (src.contents_.is_tree()) {
+ // by taking the tree directly
+ CordRep* rep = std::forward<C>(src).TakeRep();
+ contents_.EmplaceTree(rep, method);
+ } else {
+ // or copying over inline data
+ contents_.data_ = src.contents_.data_;
+ }
return;
}
@@ -698,12 +758,12 @@ inline void Cord::AppendImpl(C&& src) {
CordRep* src_tree = src.contents_.tree();
if (src_tree == nullptr) {
// src has embedded data.
- contents_.AppendArray(src.contents_.data(), src_size);
+ contents_.AppendArray({src.contents_.data(), src_size}, method);
return;
}
if (src_tree->tag >= FLAT) {
// src tree just has one flat node.
- contents_.AppendArray(src_tree->flat()->Data(), src_size);
+ contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
return;
}
if (&src == this) {
@@ -719,7 +779,8 @@ inline void Cord::AppendImpl(C&& src) {
}
// Guaranteed to be a tree (kMaxBytesToCopy > kInlinedSize)
- contents_.AppendTree(std::forward<C>(src).TakeRep());
+ CordRep* rep = std::forward<C>(src).TakeRep();
+ contents_.AppendTree(rep, CordzUpdateTracker::kAppendCord);
}
void Cord::Append(const Cord& src) { AppendImpl(src); }
@@ -731,7 +792,8 @@ void Cord::Append(T&& src) {
if (src.size() <= kMaxBytesToCopy) {
Append(absl::string_view(src));
} else {
- Append(Cord(std::forward<T>(src)));
+ CordRep* rep = CordRepFromString(std::forward<T>(src));
+ contents_.AppendTree(rep, CordzUpdateTracker::kAppendString);
}
}
@@ -741,7 +803,7 @@ void Cord::Prepend(const Cord& src) {
CordRep* src_tree = src.contents_.tree();
if (src_tree != nullptr) {
CordRep::Ref(src_tree);
- contents_.PrependTree(src_tree);
+ contents_.PrependTree(src_tree, CordzUpdateTracker::kPrependCord);
return;
}
@@ -764,7 +826,8 @@ void Cord::Prepend(absl::string_view src) {
return;
}
}
- contents_.PrependTree(NewTree(src.data(), src.size(), 0));
+ CordRep* rep = NewTree(src.data(), src.size(), 0);
+ contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
}
template <typename T, Cord::EnableIfString<T>>
@@ -772,7 +835,8 @@ inline void Cord::Prepend(T&& src) {
if (src.size() <= kMaxBytesToCopy) {
Prepend(absl::string_view(src));
} else {
- Prepend(Cord(std::forward<T>(src)));
+ CordRep* rep = CordRepFromString(std::forward<T>(src));
+ contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
}
}
@@ -870,12 +934,17 @@ void Cord::RemovePrefix(size_t n) {
CordRep* tree = contents_.tree();
if (tree == nullptr) {
contents_.remove_prefix(n);
- } else if (tree->tag == RING) {
- contents_.replace_tree(CordRepRing::RemovePrefix(tree->ring(), n));
} else {
- CordRep* newrep = RemovePrefixFrom(tree, n);
- CordRep::Unref(tree);
- contents_.replace_tree(VerifyTree(newrep));
+ auto constexpr method = CordzUpdateTracker::kRemovePrefix;
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ if (tree->tag == RING) {
+ tree = CordRepRing::RemovePrefix(tree->ring(), n);
+ } else {
+ CordRep* newrep = RemovePrefixFrom(tree, n);
+ CordRep::Unref(tree);
+ tree = VerifyTree(newrep);
+ }
+ contents_.SetTreeOrEmpty(tree, scope);
}
}
@@ -886,12 +955,17 @@ void Cord::RemoveSuffix(size_t n) {
CordRep* tree = contents_.tree();
if (tree == nullptr) {
contents_.reduce_size(n);
- } else if (tree->tag == RING) {
- contents_.replace_tree(CordRepRing::RemoveSuffix(tree->ring(), n));
} else {
- CordRep* newrep = RemoveSuffixFrom(tree, n);
- CordRep::Unref(tree);
- contents_.replace_tree(VerifyTree(newrep));
+ auto constexpr method = CordzUpdateTracker::kRemoveSuffix;
+ CordzUpdateScope scope(contents_.cordz_info(), method);
+ if (tree->tag == RING) {
+ tree = CordRepRing::RemoveSuffix(tree->ring(), n);
+ } else {
+ CordRep* newrep = RemoveSuffixFrom(tree, n);
+ CordRep::Unref(tree);
+ tree = VerifyTree(newrep);
+ }
+ contents_.SetTreeOrEmpty(tree, scope);
}
}
@@ -951,17 +1025,20 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
size_t length = size();
if (pos > length) pos = length;
if (new_size > length - pos) new_size = length - pos;
+ if (new_size == 0) return sub_cord;
+
CordRep* tree = contents_.tree();
if (tree == nullptr) {
// sub_cord is newly constructed, no need to re-zero-out the tail of
// contents_ memory.
sub_cord.contents_.set_data(contents_.data() + pos, new_size, false);
- } else if (new_size == 0) {
- // We want to return empty subcord, so nothing to do.
- } else if (new_size <= InlineRep::kMaxInline) {
+ return sub_cord;
+ }
+
+ if (new_size <= InlineRep::kMaxInline) {
+ char* dest = sub_cord.contents_.data_.as_chars();
Cord::ChunkIterator it = chunk_begin();
it.AdvanceBytes(pos);
- char* dest = sub_cord.contents_.data_.as_chars();
size_t remaining_size = new_size;
while (remaining_size > it->size()) {
cord_internal::SmallMemmove(dest, it->data(), it->size());
@@ -971,12 +1048,17 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
}
cord_internal::SmallMemmove(dest, it->data(), remaining_size);
sub_cord.contents_.set_inline_size(new_size);
- } else if (tree->tag == RING) {
- tree = CordRepRing::SubRing(CordRep::Ref(tree)->ring(), pos, new_size);
- sub_cord.contents_.set_tree(tree);
+ return sub_cord;
+ }
+
+ if (tree->tag == RING) {
+ CordRepRing* ring = CordRep::Ref(tree)->ring();
+ tree = CordRepRing::SubRing(ring, pos, new_size);
} else {
- sub_cord.contents_.set_tree(NewSubRange(tree, pos, new_size));
+ tree = NewSubRange(tree, pos, new_size);
}
+ sub_cord.contents_.EmplaceTree(tree, contents_.data_,
+ CordzUpdateTracker::kSubCord);
return sub_cord;
}
@@ -1418,6 +1500,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
"Attempted to iterate past `end()`");
Cord subcord;
+ auto constexpr method = CordzUpdateTracker::kCordReader;
if (n <= InlineRep::kMaxInline) {
// Range to read fits in inline data. Flatten it.
@@ -1440,11 +1523,12 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
if (ring_reader_) {
size_t chunk_size = current_chunk_.size();
if (n <= chunk_size && n <= kMaxBytesToCopy) {
- subcord = Cord(current_chunk_.substr(0, n));
+ subcord = Cord(current_chunk_.substr(0, n), method);
} else {
auto* ring = CordRep::Ref(ring_reader_.ring())->ring();
size_t offset = ring_reader_.length() - bytes_remaining_;
- subcord.contents_.set_tree(CordRepRing::SubRing(ring, offset, n));
+ CordRep* rep = CordRepRing::SubRing(ring, offset, n);
+ subcord.contents_.EmplaceTree(rep, method);
}
if (n < chunk_size) {
bytes_remaining_ -= n;
@@ -1463,7 +1547,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
const char* data = subnode->tag == EXTERNAL ? subnode->external()->base
: subnode->flat()->Data();
subnode = NewSubstring(subnode, current_chunk_.data() - data, n);
- subcord.contents_.set_tree(VerifyTree(subnode));
+ subcord.contents_.EmplaceTree(VerifyTree(subnode), method);
RemoveChunkPrefix(n);
return subcord;
}
@@ -1506,7 +1590,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
if (node == nullptr) {
// We have reached the end of the Cord.
assert(bytes_remaining_ == 0);
- subcord.contents_.set_tree(VerifyTree(subnode));
+ subcord.contents_.EmplaceTree(VerifyTree(subnode), method);
return subcord;
}
@@ -1546,7 +1630,7 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
current_chunk_ = absl::string_view(data + offset + n, length - n);
current_leaf_ = node;
bytes_remaining_ -= n;
- subcord.contents_.set_tree(VerifyTree(subnode));
+ subcord.contents_.EmplaceTree(VerifyTree(subnode), method);
return subcord;
}
@@ -1653,6 +1737,7 @@ char Cord::operator[](size_t i) const {
}
absl::string_view Cord::FlattenSlowPath() {
+ assert(contents_.is_tree());
size_t total_size = size();
CordRep* new_rep;
char* new_buffer;
@@ -1673,10 +1758,9 @@ absl::string_view Cord::FlattenSlowPath() {
s.size());
});
}
- if (CordRep* tree = contents_.tree()) {
- CordRep::Unref(tree);
- }
- contents_.set_tree(new_rep);
+ CordzUpdateScope scope(contents_.cordz_info(), CordzUpdateTracker::kFlatten);
+ CordRep::Unref(contents_.as_tree());
+ contents_.SetTree(new_rep, scope);
return absl::string_view(new_buffer, total_size);
}
@@ -1688,6 +1772,8 @@ absl::string_view Cord::FlattenSlowPath() {
} else if (rep->tag == EXTERNAL) {
*fragment = absl::string_view(rep->external()->base, rep->length);
return true;
+ } else if (rep->tag == RING) {
+ return rep->ring()->IsFlat(fragment);
} else if (rep->tag == SUBSTRING) {
CordRep* child = rep->substring()->child;
if (child->tag >= FLAT) {
@@ -1698,6 +1784,9 @@ absl::string_view Cord::FlattenSlowPath() {
*fragment = absl::string_view(
child->external()->base + rep->substring()->start, rep->length);
return true;
+ } else if (child->tag == RING) {
+ return child->ring()->IsFlat(rep->substring()->start, rep->length,
+ fragment);
}
}
return false;
@@ -1786,8 +1875,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
*os << absl::CEscape(std::string(rep->external()->base, rep->length));
*os << "]\n";
} else if (rep->tag >= FLAT) {
- *os << "FLAT cap=" << rep->flat()->Capacity()
- << " [";
+ *os << "FLAT cap=" << rep->flat()->Capacity() << " [";
if (include_data)
*os << absl::CEscape(std::string(rep->flat()->Data(), rep->length));
*os << "]\n";
@@ -1799,7 +1887,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
do {
DumpNode(ring->entry_child(head), include_data, os,
indent + kIndentStep);
- head = ring->advance(head);;
+ head = ring->advance(head);
} while (head != ring->tail());
}
if (stack.empty()) break;
@@ -1845,9 +1933,8 @@ static bool VerifyNode(CordRep* root, CordRep* start_node,
worklist.push_back(node->concat()->left);
}
} else if (node->tag >= FLAT) {
- ABSL_INTERNAL_CHECK(
- node->length <= node->flat()->Capacity(),
- ReportError(root, node));
+ ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
+ ReportError(root, node));
} else if (node->tag == EXTERNAL) {
ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
ReportError(root, node));
diff --git a/absl/strings/cord.h b/absl/strings/cord.h
index fa9cb913..e758f1cd 100644
--- a/absl/strings/cord.h
+++ b/absl/strings/cord.h
@@ -70,6 +70,7 @@
#include <string>
#include <type_traits>
+#include "absl/base/config.h"
#include "absl/base/internal/endian.h"
#include "absl/base/internal/per_thread_tls.h"
#include "absl/base/macros.h"
@@ -80,6 +81,11 @@
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cord_rep_ring.h"
#include "absl/strings/internal/cord_rep_ring_reader.h"
+#include "absl/strings/internal/cordz_functions.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_scope.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
#include "absl/strings/internal/resize_uninitialized.h"
#include "absl/strings/internal/string_constant.h"
#include "absl/strings/string_view.h"
@@ -664,10 +670,24 @@ class Cord {
explicit constexpr Cord(strings_internal::StringConstant<T>);
private:
+ using CordRep = absl::cord_internal::CordRep;
+ using CordRepFlat = absl::cord_internal::CordRepFlat;
+ using CordzInfo = cord_internal::CordzInfo;
+ using CordzUpdateScope = cord_internal::CordzUpdateScope;
+ using CordzUpdateTracker = cord_internal::CordzUpdateTracker;
+ using InlineData = cord_internal::InlineData;
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // Creates a cord instance with `method` representing the originating
+ // public API call causing the cord to be created.
+ explicit Cord(absl::string_view src, MethodIdentifier method);
+
friend class CordTestPeer;
friend bool operator==(const Cord& lhs, const Cord& rhs);
friend bool operator==(const Cord& lhs, absl::string_view rhs);
+ friend const CordzInfo* GetCordzInfoForTesting(const Cord& cord);
+
// Calls the provided function once for each cord chunk, in order. Unlike
// Chunks(), this API will not allocate memory.
void ForEachChunk(absl::FunctionRef<void(absl::string_view)>) const;
@@ -687,6 +707,7 @@ class Cord {
static_assert(kMaxInline >= sizeof(absl::cord_internal::CordRep*), "");
constexpr InlineRep() : data_() {}
+ explicit InlineRep(InlineData::DefaultInitType init) : data_(init) {}
InlineRep(const InlineRep& src);
InlineRep(InlineRep&& src);
InlineRep& operator=(const InlineRep& src);
@@ -704,23 +725,56 @@ class Cord {
// Returns nullptr if holding bytes
absl::cord_internal::CordRep* tree() const;
absl::cord_internal::CordRep* as_tree() const;
- // Discards old pointer, if any
- void set_tree(absl::cord_internal::CordRep* rep);
- // Replaces a tree with a new root. This is faster than set_tree, but it
- // should only be used when it's clear that the old rep was a tree.
- void replace_tree(absl::cord_internal::CordRep* rep);
// Returns non-null iff was holding a pointer
absl::cord_internal::CordRep* clear();
// Converts to pointer if necessary.
- absl::cord_internal::CordRep* force_tree(size_t extra_hint);
void reduce_size(size_t n); // REQUIRES: holding data
void remove_prefix(size_t n); // REQUIRES: holding data
- void AppendArray(const char* src_data, size_t src_size);
+ void AppendArray(absl::string_view src, MethodIdentifier method);
absl::string_view FindFlatStartPiece() const;
- void AppendTree(absl::cord_internal::CordRep* tree);
- void PrependTree(absl::cord_internal::CordRep* tree);
- void GetAppendRegion(char** region, size_t* size, size_t max_length);
- void GetAppendRegion(char** region, size_t* size);
+
+ // Creates a CordRepFlat instance from the current inlined data with `extra'
+ // bytes of desired additional capacity.
+ CordRepFlat* MakeFlatWithExtraCapacity(size_t extra);
+
+ // Sets the tree value for this instance. `rep` must not be null.
+ // Requires the current instance to hold a tree, and a lock to be held on
+ // any CordzInfo referenced by this instance. The latter is enforced through
+ // the CordzUpdateScope argument. If the current instance is sampled, then
+ // the CordzInfo instance is updated to reference the new `rep` value.
+ void SetTree(CordRep* rep, const CordzUpdateScope& scope);
+
+ // Identical to SetTree(), except that `rep` is allowed to be null, in
+ // which case the current instance is reset to an empty value.
+ void SetTreeOrEmpty(CordRep* rep, const CordzUpdateScope& scope);
+
+ // Sets the tree value for this instance, and randomly samples this cord.
+ // This function disregards existing contents in `data_`, and should be
+ // called when a Cord is 'promoted' from an 'uninitialized' or 'inlined'
+ // value to a non-inlined (tree / ring) value.
+ void EmplaceTree(CordRep* rep, MethodIdentifier method);
+
+ // Identical to EmplaceTree, except that it copies the parent stack from
+ // the provided `parent` data if the parent is sampled.
+ void EmplaceTree(CordRep* rep, const InlineData& parent,
+ MethodIdentifier method);
+
+ // Commits the change of a newly created, or updated `rep` root value into
+ // this cord. `old_rep` indicates the old (inlined or tree) value of the
+ // cord, and determines if the commit invokes SetTree() or EmplaceTree().
+ void CommitTree(const CordRep* old_rep, CordRep* rep,
+ const CordzUpdateScope& scope, MethodIdentifier method);
+
+ void AppendTreeToInlined(CordRep* tree, MethodIdentifier method);
+ void AppendTreeToTree(CordRep* tree, MethodIdentifier method);
+ void AppendTree(CordRep* tree, MethodIdentifier method);
+ void PrependTreeToInlined(CordRep* tree, MethodIdentifier method);
+ void PrependTreeToTree(CordRep* tree, MethodIdentifier method);
+ void PrependTree(CordRep* tree, MethodIdentifier method);
+
+ template <bool has_length>
+ void GetAppendRegion(char** region, size_t* size, size_t length);
+
bool IsSame(const InlineRep& other) const {
return memcmp(&data_, &other.data_, sizeof(data_)) == 0;
}
@@ -776,8 +830,8 @@ class Cord {
friend class Cord;
void AssignSlow(const InlineRep& src);
- // Unrefs the tree, stops profiling, and zeroes the contents
- void ClearSlow();
+ // Unrefs the tree and stops profiling.
+ void UnrefTree();
void ResetToEmpty() { data_ = {}; }
@@ -828,6 +882,10 @@ class Cord {
template <typename C>
void AppendImpl(C&& src);
+ // Assigns the value in 'src' to this instance, 'stealing' its contents.
+ // Requires src.length() > kMaxBytesToCopy.
+ Cord& AssignLargeString(std::string&& src);
+
// Helper for AbslHashValue().
template <typename H>
H HashFragmented(H hash_state) const {
@@ -930,8 +988,11 @@ inline CordRep* NewExternalRep(absl::string_view data,
template <typename Releaser>
Cord MakeCordFromExternal(absl::string_view data, Releaser&& releaser) {
Cord cord;
- cord.contents_.set_tree(::absl::cord_internal::NewExternalRep(
- data, std::forward<Releaser>(releaser)));
+ if (auto* rep = ::absl::cord_internal::NewExternalRep(
+ data, std::forward<Releaser>(releaser))) {
+ cord.contents_.EmplaceTree(rep,
+ Cord::MethodIdentifier::kMakeCordFromExternal);
+ }
return cord;
}
@@ -939,15 +1000,16 @@ constexpr Cord::InlineRep::InlineRep(cord_internal::InlineData data)
: data_(data) {}
inline Cord::InlineRep::InlineRep(const Cord::InlineRep& src)
- : data_(src.data_) {
- if (is_tree()) {
- data_.clear_cordz_info();
- absl::cord_internal::CordRep::Ref(as_tree());
+ : data_(InlineData::kDefaultInit) {
+ if (CordRep* tree = src.tree()) {
+ EmplaceTree(CordRep::Ref(tree), src.data_,
+ CordzUpdateTracker::kConstructorCord);
+ } else {
+ data_ = src.data_;
}
}
-inline Cord::InlineRep::InlineRep(Cord::InlineRep&& src) {
- data_ = src.data_;
+inline Cord::InlineRep::InlineRep(Cord::InlineRep&& src) : data_(src.data_) {
src.ResetToEmpty();
}
@@ -966,7 +1028,7 @@ inline Cord::InlineRep& Cord::InlineRep::operator=(const Cord::InlineRep& src) {
inline Cord::InlineRep& Cord::InlineRep::operator=(
Cord::InlineRep&& src) noexcept {
if (is_tree()) {
- ClearSlow();
+ UnrefTree();
}
data_ = src.data_;
src.ResetToEmpty();
@@ -1003,31 +1065,62 @@ inline size_t Cord::InlineRep::size() const {
return is_tree() ? as_tree()->length : inline_size();
}
-inline void Cord::InlineRep::set_tree(absl::cord_internal::CordRep* rep) {
- if (rep == nullptr) {
- ResetToEmpty();
+inline cord_internal::CordRepFlat* Cord::InlineRep::MakeFlatWithExtraCapacity(
+ size_t extra) {
+ static_assert(cord_internal::kMinFlatLength >= sizeof(data_), "");
+ size_t len = data_.inline_size();
+ auto* result = CordRepFlat::New(len + extra);
+ result->length = len;
+ memcpy(result->Data(), data_.as_chars(), sizeof(data_));
+ return result;
+}
+
+inline void Cord::InlineRep::EmplaceTree(CordRep* rep,
+ MethodIdentifier method) {
+ assert(rep);
+ data_.make_tree(rep);
+ CordzInfo::MaybeTrackCord(data_, method);
+}
+
+inline void Cord::InlineRep::EmplaceTree(CordRep* rep, const InlineData& parent,
+ MethodIdentifier method) {
+ data_.make_tree(rep);
+ CordzInfo::MaybeTrackCord(data_, parent, method);
+}
+
+inline void Cord::InlineRep::SetTree(CordRep* rep,
+ const CordzUpdateScope& scope) {
+ assert(rep);
+ assert(data_.is_tree());
+ data_.set_tree(rep);
+ scope.SetCordRep(rep);
+}
+
+inline void Cord::InlineRep::SetTreeOrEmpty(CordRep* rep,
+ const CordzUpdateScope& scope) {
+ assert(data_.is_tree());
+ if (rep) {
+ data_.set_tree(rep);
} else {
- if (data_.is_tree()) {
- // `data_` already holds a 'tree' value and an optional cordz_info value.
- // Replace the tree value only, leaving the cordz_info value unchanged.
- data_.set_tree(rep);
- } else {
- // `data_` contains inlined data: initialize data_ to tree value `rep`.
- data_.make_tree(rep);
- }
+ data_ = {};
}
+ scope.SetCordRep(rep);
}
-inline void Cord::InlineRep::replace_tree(absl::cord_internal::CordRep* rep) {
- ABSL_ASSERT(is_tree());
- if (ABSL_PREDICT_FALSE(rep == nullptr)) {
- set_tree(rep);
- return;
+inline void Cord::InlineRep::CommitTree(const CordRep* old_rep, CordRep* rep,
+ const CordzUpdateScope& scope,
+ MethodIdentifier method) {
+ if (old_rep) {
+ SetTree(rep, scope);
+ } else {
+ EmplaceTree(rep, method);
}
- data_.set_tree(rep);
}
inline absl::cord_internal::CordRep* Cord::InlineRep::clear() {
+ if (is_tree()) {
+ CordzInfo::MaybeUntrackCord(cordz_info());
+ }
absl::cord_internal::CordRep* result = tree();
ResetToEmpty();
return result;
@@ -1042,6 +1135,9 @@ inline void Cord::InlineRep::CopyToArray(char* dst) const {
constexpr inline Cord::Cord() noexcept {}
+inline Cord::Cord(absl::string_view src)
+ : Cord(src, CordzUpdateTracker::kConstructorString) {}
+
template <typename T>
constexpr Cord::Cord(strings_internal::StringConstant<T>)
: contents_(strings_internal::StringConstant<T>::value.size() <=
@@ -1057,6 +1153,15 @@ inline Cord& Cord::operator=(const Cord& x) {
return *this;
}
+template <typename T, Cord::EnableIfString<T>>
+Cord& Cord::operator=(T&& src) {
+ if (src.size() <= cord_internal::kMaxBytesToCopy) {
+ return operator=(absl::string_view(src));
+ } else {
+ return AssignLargeString(std::forward<T>(src));
+ }
+}
+
inline Cord::Cord(const Cord& src) : contents_(src.contents_) {}
inline Cord::Cord(Cord&& src) noexcept : contents_(std::move(src.contents_)) {}
@@ -1071,7 +1176,6 @@ inline Cord& Cord::operator=(Cord&& x) noexcept {
}
extern template Cord::Cord(std::string&& src);
-extern template Cord& Cord::operator=(std::string&& src);
inline size_t Cord::size() const {
// Length is 1st field in str.rep_
@@ -1114,7 +1218,7 @@ inline absl::string_view Cord::Flatten() {
}
inline void Cord::Append(absl::string_view src) {
- contents_.AppendArray(src.data(), src.size());
+ contents_.AppendArray(src, CordzUpdateTracker::kAppendString);
}
extern template void Cord::Append(std::string&& src);
diff --git a/absl/strings/cord_ring_reader_test.cc b/absl/strings/cord_ring_reader_test.cc
index 585616f3..d9a9a76d 100644
--- a/absl/strings/cord_ring_reader_test.cc
+++ b/absl/strings/cord_ring_reader_test.cc
@@ -78,6 +78,7 @@ TEST(CordRingReaderTest, Reset) {
EXPECT_TRUE(static_cast<bool>(reader));
EXPECT_THAT(reader.ring(), Eq(ring));
EXPECT_THAT(reader.index(), Eq(ring->head()));
+ EXPECT_THAT(reader.node(), Eq(ring->entry_child(ring->head())));
EXPECT_THAT(reader.length(), Eq(ring->length));
EXPECT_THAT(reader.consumed(), Eq(flats[0].length()));
EXPECT_THAT(reader.remaining(), Eq(ring->length - reader.consumed()));
@@ -99,11 +100,13 @@ TEST(CordRingReaderTest, Next) {
size_t consumed = reader.consumed();
size_t remaining = reader.remaining();
for (int i = 1; i < flats.size(); ++i) {
+ CordRepRing::index_type index = ring->advance(head, i);
consumed += flats[i].length();
remaining -= flats[i].length();
absl::string_view next = reader.Next();
ASSERT_THAT(next, Eq(flats[i]));
- ASSERT_THAT(reader.index(), Eq(ring->advance(head, i)));
+ ASSERT_THAT(reader.index(), Eq(index));
+ ASSERT_THAT(reader.node(), Eq(ring->entry_child(index)));
ASSERT_THAT(reader.consumed(), Eq(consumed));
ASSERT_THAT(reader.remaining(), Eq(remaining));
}
@@ -125,13 +128,15 @@ TEST(CordRingReaderTest, SeekForward) {
size_t consumed = 0;
size_t remaining = ring->length;;
for (int i = 0; i < flats.size(); ++i) {
+ CordRepRing::index_type index = ring->advance(head, i);
size_t offset = consumed;
consumed += flats[i].length();
remaining -= flats[i].length();
for (int off = 0; off < flats[i].length(); ++off) {
absl::string_view chunk = reader.Seek(offset + off);
ASSERT_THAT(chunk, Eq(flats[i].substr(off)));
- ASSERT_THAT(reader.index(), Eq(ring->advance(head, i)));
+ ASSERT_THAT(reader.index(), Eq(index));
+ ASSERT_THAT(reader.node(), Eq(ring->entry_child(index)));
ASSERT_THAT(reader.consumed(), Eq(consumed));
ASSERT_THAT(reader.remaining(), Eq(remaining));
}
@@ -150,11 +155,13 @@ TEST(CordRingReaderTest, SeekBackward) {
size_t consumed = ring->length;
size_t remaining = 0;
for (int i = flats.size() - 1; i >= 0; --i) {
+ CordRepRing::index_type index = ring->advance(head, i);
size_t offset = consumed - flats[i].length();
for (int off = 0; off < flats[i].length(); ++off) {
absl::string_view chunk = reader.Seek(offset + off);
ASSERT_THAT(chunk, Eq(flats[i].substr(off)));
- ASSERT_THAT(reader.index(), Eq(ring->advance(head, i)));
+ ASSERT_THAT(reader.index(), Eq(index));
+ ASSERT_THAT(reader.node(), Eq(ring->entry_child(index)));
ASSERT_THAT(reader.consumed(), Eq(consumed));
ASSERT_THAT(reader.remaining(), Eq(remaining));
}
diff --git a/absl/strings/cord_ring_test.cc b/absl/strings/cord_ring_test.cc
index 7d75e106..cc8fbaf9 100644
--- a/absl/strings/cord_ring_test.cc
+++ b/absl/strings/cord_ring_test.cc
@@ -31,9 +31,6 @@
extern thread_local bool cord_ring;
-// TOOD(b/177688959): weird things happened with the original test
-#define ASAN_BUG_177688959_FIXED false
-
namespace absl {
ABSL_NAMESPACE_BEGIN
namespace {
@@ -101,15 +98,22 @@ using TestParams = std::vector<TestParam>;
// Matcher validating when mutable copies are required / performed.
MATCHER_P2(EqIfPrivate, param, rep,
absl::StrCat("Equal 0x", absl::Hex(rep), " if private")) {
- return param.refcount_is_one ? arg == rep : arg != rep;
+ return param.refcount_is_one ? arg == rep : true;
}
// Matcher validating when mutable copies are required / performed.
MATCHER_P2(EqIfPrivateAndCapacity, param, rep,
absl::StrCat("Equal 0x", absl::Hex(rep),
" if private and capacity")) {
- return (param.refcount_is_one && param.with_capacity) ? arg == rep
- : arg != rep;
+ return (param.refcount_is_one && param.with_capacity) ? arg == rep : true;
+}
+
+// Matcher validating a shared ring was re-allocated. Should only be used for
+// tests doing exactly one update as subsequent updates could return the
+// original (freed and re-used) pointer.
+MATCHER_P2(NeIfShared, param, rep,
+ absl::StrCat("Not equal 0x", absl::Hex(rep), " if shared")) {
+ return param.refcount_is_one ? true : arg != rep;
}
MATCHER_P2(EqIfInputPrivate, param, rep, "Equal if input is private") {
@@ -340,19 +344,15 @@ std::string TestParamToString(const testing::TestParamInfo<TestParam>& info) {
class CordRingTest : public testing::Test {
public:
~CordRingTest() override {
-#if ASAN_BUG_177688959_FIXED
for (CordRep* rep : unrefs_) {
CordRep::Unref(rep);
}
-#endif
}
template <typename CordRepType>
CordRepType* NeedsUnref(CordRepType* rep) {
assert(rep);
-#if ASAN_BUG_177688959_FIXED
unrefs_.push_back(rep);
-#endif
return rep;
}
@@ -362,26 +362,16 @@ class CordRingTest : public testing::Test {
return NeedsUnref(rep);
}
- void Unref(CordRep* rep) {
-#if !ASAN_BUG_177688959_FIXED
- CordRep::Unref(rep);
-#endif
- }
-
private:
-#if ASAN_BUG_177688959_FIXED
std::vector<CordRep*> unrefs_;
-#endif
};
class CordRingTestWithParam : public testing::TestWithParam<TestParam> {
public:
~CordRingTestWithParam() override {
-#if ASAN_BUG_177688959_FIXED
for (CordRep* rep : unrefs_) {
CordRep::Unref(rep);
}
-#endif
}
CordRepRing* CreateWithCapacity(CordRep* child, size_t extra_capacity) {
@@ -400,9 +390,7 @@ class CordRingTestWithParam : public testing::TestWithParam<TestParam> {
template <typename CordRepType>
CordRepType* NeedsUnref(CordRepType* rep) {
assert(rep);
-#if ASAN_BUG_177688959_FIXED
unrefs_.push_back(rep);
-#endif
return rep;
}
@@ -412,43 +400,23 @@ class CordRingTestWithParam : public testing::TestWithParam<TestParam> {
return NeedsUnref(rep);
}
- void Unref(CordRep* rep) {
-#if !ASAN_BUG_177688959_FIXED
- CordRep::Unref(rep);
-#endif
- }
-
template <typename CordRepType>
CordRepType* RefIfShared(CordRepType* rep) {
return Shared() ? Ref(rep) : rep;
}
- void UnrefIfShared(CordRep* rep) {
- if (Shared()) Unref(rep);
- }
-
template <typename CordRepType>
CordRepType* RefIfInputShared(CordRepType* rep) {
return InputShared() ? Ref(rep) : rep;
}
- void UnrefIfInputShared(CordRep* rep) {
- if (InputShared()) Unref(rep);
- }
-
template <typename CordRepType>
CordRepType* RefIfInputSharedIndirect(CordRepType* rep) {
return InputSharedIndirect() ? Ref(rep) : rep;
}
- void UnrefIfInputSharedIndirect(CordRep* rep) {
- if (InputSharedIndirect()) Unref(rep);
- }
-
private:
-#if ASAN_BUG_177688959_FIXED
std::vector<CordRep*> unrefs_;
-#endif
};
class CordRingCreateTest : public CordRingTestWithParam {
@@ -520,26 +488,26 @@ class CordRingBuildInputTest : public CordRingTestWithParam {
}
};
-INSTANTIATE_TEST_CASE_P(WithParam, CordRingSubTest,
- testing::ValuesIn(CordRingSubTest::CreateTestParams()),
- TestParamToString);
+INSTANTIATE_TEST_SUITE_P(WithParam, CordRingSubTest,
+ testing::ValuesIn(CordRingSubTest::CreateTestParams()),
+ TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingCreateTest,
testing::ValuesIn(CordRingCreateTest::CreateTestParams()),
TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingCreateFromTreeTest,
testing::ValuesIn(CordRingCreateFromTreeTest::CreateTestParams()),
TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingBuildTest,
testing::ValuesIn(CordRingBuildTest::CreateTestParams()),
TestParamToString);
-INSTANTIATE_TEST_CASE_P(
+INSTANTIATE_TEST_SUITE_P(
WithParam, CordRingBuildInputTest,
testing::ValuesIn(CordRingBuildInputTest::CreateTestParams()),
TestParamToString);
@@ -550,7 +518,6 @@ TEST_P(CordRingCreateTest, CreateFromFlat) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1));
- Unref(result);
}
TEST_P(CordRingCreateTest, CreateFromRing) {
@@ -558,9 +525,8 @@ TEST_P(CordRingCreateTest, CreateFromRing) {
CordRepRing* result = NeedsUnref(CordRepRing::Create(ring));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAreArray(kFoxFlats));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringRing) {
@@ -570,23 +536,20 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringRing) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfInputPrivate(GetParam(), ring));
EXPECT_THAT(ToString(result), string_view(kFox).substr(2, 11));
- UnrefIfInputSharedIndirect(ring);
- UnrefIfInputShared(sub);
- Unref(result);
}
TEST_F(CordRingTest, CreateWithIllegalExtraCapacity) {
- CordRep* flat = NeedsUnref(MakeFlat("Hello world"));
#if defined(ABSL_HAVE_EXCEPTIONS)
+ CordRep* flat = NeedsUnref(MakeFlat("Hello world"));
try {
CordRepRing::Create(flat, CordRepRing::kMaxCapacity);
GTEST_FAIL() << "expected std::length_error exception";
} catch (const std::length_error&) {
}
#elif defined(GTEST_HAS_DEATH_TEST)
+ CordRep* flat = NeedsUnref(MakeFlat("Hello world"));
EXPECT_DEATH(CordRepRing::Create(flat, CordRepRing::kMaxCapacity), ".*");
#endif
- Unref(flat);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfFlat) {
@@ -597,9 +560,6 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfFlat) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(20));
EXPECT_THAT(ToFlats(result), ElementsAre(str1.substr(4, 20)));
- Unref(result);
- UnrefIfInputShared(flat);
- UnrefIfInputSharedIndirect(child);
}
TEST_P(CordRingCreateTest, CreateFromExternal) {
@@ -609,8 +569,6 @@ TEST_P(CordRingCreateTest, CreateFromExternal) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1));
- Unref(result);
- UnrefIfInputShared(child);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfExternal) {
@@ -621,9 +579,6 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfExternal) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(24));
EXPECT_THAT(ToFlats(result), ElementsAre(str1.substr(1, 24)));
- Unref(result);
- UnrefIfInputShared(external);
- UnrefIfInputSharedIndirect(child);
}
TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfLargeExternal) {
@@ -637,9 +592,6 @@ TEST_P(CordRingCreateFromTreeTest, CreateFromSubstringOfLargeExternal) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str.size()));
EXPECT_THAT(ToRawFlats(result), ElementsAre(str));
- Unref(result);
- UnrefIfInputShared(external);
- UnrefIfInputSharedIndirect(child);
}
TEST_P(CordRingBuildInputTest, CreateFromConcat) {
@@ -652,10 +604,6 @@ TEST_P(CordRingBuildInputTest, CreateFromConcat) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(26));
EXPECT_THAT(ToString(result), Eq(kAlphabet));
- UnrefIfInputSharedIndirect(flats[0]);
- UnrefIfInputSharedIndirect(flats[3]);
- UnrefIfInputShared(concat);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, CreateFromSubstringConcat) {
@@ -671,10 +619,6 @@ TEST_P(CordRingBuildInputTest, CreateFromSubstringConcat) {
ASSERT_THAT(result, IsValidRingBuffer());
ASSERT_THAT(result->length, Eq(len));
ASSERT_THAT(ToString(result), string_view(kAlphabet).substr(off, len));
- UnrefIfInputSharedIndirect(flats[0]);
- UnrefIfInputSharedIndirect(flats[3]);
- UnrefIfInputShared(child);
- Unref(result);
}
}
}
@@ -689,7 +633,6 @@ TEST_P(CordRingCreateTest, Properties) {
EXPECT_THAT(result->capacity(), Le(2 * 120 + 1));
EXPECT_THAT(result->entries(), Eq(1));
EXPECT_THAT(result->begin_pos(), Eq(0));
- Unref(result);
}
TEST_P(CordRingCreateTest, EntryForNewFlat) {
@@ -700,7 +643,6 @@ TEST_P(CordRingCreateTest, EntryForNewFlat) {
EXPECT_THAT(result->entry_child(0), Eq(child));
EXPECT_THAT(result->entry_end_pos(0), Eq(str1.length()));
EXPECT_THAT(result->entry_data_offset(0), Eq(0));
- Unref(result);
}
TEST_P(CordRingCreateTest, EntryForNewFlatSubstring) {
@@ -712,7 +654,6 @@ TEST_P(CordRingCreateTest, EntryForNewFlatSubstring) {
EXPECT_THAT(result->entry_child(0), Eq(child));
EXPECT_THAT(result->entry_end_pos(0), Eq(26));
EXPECT_THAT(result->entry_data_offset(0), Eq(10));
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendFlat) {
@@ -722,10 +663,9 @@ TEST_P(CordRingBuildTest, AppendFlat) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, MakeFlat(str2)));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1, str2));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependFlat) {
@@ -735,10 +675,9 @@ TEST_P(CordRingBuildTest, PrependFlat) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, MakeFlat(str2)));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str2, str1));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendString) {
@@ -748,10 +687,9 @@ TEST_P(CordRingBuildTest, AppendString) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str1, str2));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendStringHavingExtra) {
@@ -762,8 +700,7 @@ TEST_P(CordRingBuildTest, AppendStringHavingExtra) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
- UnrefIfShared(ring);
- Unref(result);
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
}
TEST_P(CordRingBuildTest, AppendStringHavingPartialExtra) {
@@ -785,13 +722,12 @@ TEST_P(CordRingBuildTest, AppendStringHavingPartialExtra) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
if (GetParam().refcount_is_one) {
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat(str1, str1a), str2a));
} else {
EXPECT_THAT(ToFlats(result), ElementsAre(str1, str2));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendStringHavingExtraInSubstring) {
@@ -802,14 +738,13 @@ TEST_P(CordRingBuildTest, AppendStringHavingExtraInSubstring) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(4 + str2.size()));
if (GetParam().refcount_is_one) {
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat("1234", str2)));
} else {
EXPECT_THAT(ToFlats(result), ElementsAre("1234", str2));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendStringHavingSharedExtra) {
@@ -837,10 +772,9 @@ TEST_P(CordRingBuildTest, AppendStringHavingSharedExtra) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(4 + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre("1234", str2));
- UnrefIfShared(ring);
- Unref(result);
CordRep::Unref(shared_type == 1 ? flat1 : flat);
}
@@ -857,8 +791,6 @@ TEST_P(CordRingBuildTest, AppendStringWithExtra) {
EXPECT_THAT(result->length, Eq(str1.size() + str2.size() + str3.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre(str1, StrCat(str2, str3)));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependString) {
@@ -875,8 +807,6 @@ TEST_P(CordRingBuildTest, PrependString) {
}
EXPECT_THAT(result->length, Eq(str1.size() + str2.size()));
EXPECT_THAT(ToFlats(result), ElementsAre(str2, str1));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependStringHavingExtra) {
@@ -887,14 +817,13 @@ TEST_P(CordRingBuildTest, PrependStringHavingExtra) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, str2));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(result->length, Eq(4 + str2.size()));
if (GetParam().refcount_is_one) {
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat(str2, "1234")));
} else {
EXPECT_THAT(ToFlats(result), ElementsAre(str2, "1234"));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, PrependStringHavingSharedExtra) {
@@ -920,9 +849,8 @@ TEST_P(CordRingBuildTest, PrependStringHavingSharedExtra) {
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result->length, Eq(str1a.size() + str2.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre(str2, str1a));
- UnrefIfShared(ring);
- Unref(result);
CordRep::Unref(shared_type == 1 ? flat1 : flat);
}
}
@@ -938,8 +866,6 @@ TEST_P(CordRingBuildTest, PrependStringWithExtra) {
EXPECT_THAT(result->length, Eq(str1.size() + str2.size() + str3.size()));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre(StrCat(str3, str2), str1));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendPrependStringMix) {
@@ -950,12 +876,10 @@ TEST_P(CordRingBuildTest, AppendPrependStringMix) {
result = CordRepRing::Prepend(result, flats[4 - i]);
result = CordRepRing::Append(result, flats[4 + i]);
}
- UnrefIfShared(ring);
NeedsUnref(result);
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
EXPECT_THAT(ToString(result), kFox);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendPrependStringMixWithExtra) {
@@ -976,8 +900,6 @@ TEST_P(CordRingBuildTest, AppendPrependStringMixWithExtra) {
EXPECT_THAT(ToFlats(result), ElementsAre("The quick brown fox ", "jumps ",
"over the lazy dog"));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendPrependStringMixWithPrependedExtra) {
@@ -998,8 +920,6 @@ TEST_P(CordRingBuildTest, AppendPrependStringMixWithPrependedExtra) {
EXPECT_THAT(ToFlats(result), ElementsAre("The quick brown fox ", "jumps ",
"over the lazy dog"));
}
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingSubTest, SubRing) {
@@ -1011,16 +931,14 @@ TEST_P(CordRingSubTest, SubRing) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::SubRing(ring, offset, 0);
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = 1; len < all.size() - offset; ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::SubRing(ring, offset, len));
ASSERT_THAT(result, IsValidRingBuffer());
ASSERT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
ASSERT_THAT(ToString(result), Eq(all.substr(offset, len)));
- UnrefIfShared(ring);
- Unref(result);
}
}
}
@@ -1039,18 +957,16 @@ TEST_P(CordRingSubTest, SubRingFromLargeExternal) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::SubRing(ring, offset, 0);
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = all.size() - 30; len < all.size() - offset; ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::SubRing(ring, offset, len));
ASSERT_THAT(result, IsValidRingBuffer());
ASSERT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
auto str = ToString(result);
ASSERT_THAT(str, SizeIs(len));
ASSERT_THAT(str, Eq(all.substr(offset, len)));
- UnrefIfShared(ring);
- Unref(result);
}
}
}
@@ -1063,16 +979,14 @@ TEST_P(CordRingSubTest, RemovePrefix) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::RemovePrefix(ring, all.size());
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = 1; len < all.size(); ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::RemovePrefix(ring, len));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToString(result), Eq(all.substr(len)));
- UnrefIfShared(ring);
- Unref(result);
}
}
@@ -1087,7 +1001,6 @@ TEST_P(CordRingSubTest, RemovePrefixFromLargeExternal) {
ElementsAre(
not_a_string_view(external1->base, 1 << 20).remove_prefix(1 << 16),
not_a_string_view(external2->base, 1 << 20)));
- Unref(result);
}
TEST_P(CordRingSubTest, RemoveSuffix) {
@@ -1098,16 +1011,14 @@ TEST_P(CordRingSubTest, RemoveSuffix) {
CordRepRing* ring = RefIfShared(FromFlats(flats, composition));
CordRepRing* result = CordRepRing::RemoveSuffix(ring, all.size());
EXPECT_THAT(result, nullptr);
- UnrefIfShared(ring);
for (size_t len = 1; len < all.size(); ++len) {
ring = RefIfShared(FromFlats(flats, composition));
result = NeedsUnref(CordRepRing::RemoveSuffix(ring, len));
ASSERT_THAT(result, IsValidRingBuffer());
- EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
- EXPECT_THAT(ToString(result), Eq(all.substr(0, all.size() - len)));
- UnrefIfShared(ring);
- Unref(result);
+ ASSERT_THAT(result, EqIfPrivate(GetParam(), ring));
+ ASSERT_THAT(result, NeIfShared(GetParam(), ring));
+ ASSERT_THAT(ToString(result), Eq(all.substr(0, all.size() - len)));
}
}
@@ -1120,9 +1031,8 @@ TEST_P(CordRingSubTest, AppendRing) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, child));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivate(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAreArray(kFoxFlats));
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingWithFlatOffset) {
@@ -1135,11 +1045,9 @@ TEST_P(CordRingBuildInputTest, AppendRingWithFlatOffset) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "brown ", "fox ", "jumps ",
"over ", "the ", "lazy ", "dog"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingWithBrokenOffset) {
@@ -1152,11 +1060,9 @@ TEST_P(CordRingBuildInputTest, AppendRingWithBrokenOffset) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("Head", "umps ", "over ", "the ", "lazy ", "dog"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingWithFlatLength) {
@@ -1169,11 +1075,9 @@ TEST_P(CordRingBuildInputTest, AppendRingWithFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "The ", "quick ", "brown ",
"fox ", "jumps ", "over ", "the "));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendRingWithBrokenFlatLength) {
@@ -1186,11 +1090,9 @@ TEST_P(CordRingBuildTest, AppendRingWithBrokenFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "The ", "quick ", "brown ",
"fox ", "jumps ", "ov"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendRingMiddlePiece) {
@@ -1203,11 +1105,9 @@ TEST_P(CordRingBuildTest, AppendRingMiddlePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("Head", "ck ", "brown ", "fox ", "jum"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildTest, AppendRingSinglePiece) {
@@ -1220,11 +1120,8 @@ TEST_P(CordRingBuildTest, AppendRingSinglePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Head", "row"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfInputShared(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, AppendRingSinglePieceWithPrefix) {
@@ -1241,11 +1138,8 @@ TEST_P(CordRingBuildInputTest, AppendRingSinglePieceWithPrefix) {
CordRepRing* result = NeedsUnref(CordRepRing::Append(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("Prepend", "Head", "row"));
- UnrefIfInputSharedIndirect(child);
- UnrefIfInputShared(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRing) {
@@ -1258,10 +1152,8 @@ TEST_P(CordRingBuildInputTest, PrependRing) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, child));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAreArray(kFoxFlats));
- UnrefIfInputShared(child);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithFlatOffset) {
@@ -1274,12 +1166,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithFlatOffset) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("brown ", "fox ", "jumps ", "over ",
"the ", "lazy ", "dog", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithBrokenOffset) {
@@ -1291,12 +1180,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithBrokenOffset) {
CordRep* stripped = RefIfInputSharedIndirect(RemovePrefix(21, child));
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("umps ", "over ", "the ", "lazy ", "dog", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithFlatLength) {
@@ -1309,12 +1195,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("The ", "quick ", "brown ", "fox ",
"jumps ", "over ", "the ", "Tail"));
- UnrefIfShared(ring);
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingWithBrokenFlatLength) {
@@ -1327,12 +1210,9 @@ TEST_P(CordRingBuildInputTest, PrependRingWithBrokenFlatLength) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("The ", "quick ", "brown ", "fox ",
"jumps ", "ov", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingMiddlePiece) {
@@ -1346,12 +1226,9 @@ TEST_P(CordRingBuildInputTest, PrependRingMiddlePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result),
ElementsAre("ck ", "brown ", "fox ", "jum", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingSinglePiece) {
@@ -1364,11 +1241,8 @@ TEST_P(CordRingBuildInputTest, PrependRingSinglePiece) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("row", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_P(CordRingBuildInputTest, PrependRingSinglePieceWithPrefix) {
@@ -1384,11 +1258,8 @@ TEST_P(CordRingBuildInputTest, PrependRingSinglePieceWithPrefix) {
CordRepRing* result = NeedsUnref(CordRepRing::Prepend(ring, stripped));
ASSERT_THAT(result, IsValidRingBuffer());
EXPECT_THAT(result, EqIfPrivateAndCapacity(GetParam(), ring));
+ EXPECT_THAT(result, NeIfShared(GetParam(), ring));
EXPECT_THAT(ToFlats(result), ElementsAre("row", "Prepend", "Tail"));
- UnrefIfInputShared(child);
- UnrefIfInputSharedIndirect(stripped);
- UnrefIfShared(ring);
- Unref(result);
}
TEST_F(CordRingTest, Find) {
@@ -1406,7 +1277,6 @@ TEST_F(CordRingTest, Find) {
ASSERT_THAT(found.offset, Lt(data.length()));
ASSERT_THAT(data[found.offset], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindWithHint) {
@@ -1442,7 +1312,6 @@ TEST_F(CordRingTest, FindWithHint) {
++flat_pos;
flat_offset += flat.length();
}
- Unref(ring);
}
TEST_F(CordRingTest, FindInLargeRing) {
@@ -1464,7 +1333,6 @@ TEST_F(CordRingTest, FindInLargeRing) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[pos.offset], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindTail) {
@@ -1483,7 +1351,6 @@ TEST_F(CordRingTest, FindTail) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[data.length() - pos.offset - 1], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindTailWithHint) {
@@ -1510,7 +1377,6 @@ TEST_F(CordRingTest, FindTailWithHint) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[data.length() - pos.offset - 1], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, FindTailInLargeRing) {
@@ -1532,7 +1398,6 @@ TEST_F(CordRingTest, FindTailInLargeRing) {
ASSERT_THAT(pos.offset, Lt(data.length()));
ASSERT_THAT(data[data.length() - pos.offset - 1], Eq(value[i]));
}
- Unref(ring);
}
TEST_F(CordRingTest, GetCharacter) {
@@ -1544,7 +1409,6 @@ TEST_F(CordRingTest, GetCharacter) {
for (int i = 0; i < value.length(); ++i) {
ASSERT_THAT(result->GetCharacter(i), Eq(value[i]));
}
- Unref(result);
}
TEST_F(CordRingTest, GetCharacterWithSubstring) {
@@ -1556,7 +1420,67 @@ TEST_F(CordRingTest, GetCharacterWithSubstring) {
for (int i = 0; i < value.length(); ++i) {
ASSERT_THAT(result->GetCharacter(i), Eq(value[i]));
}
- Unref(result);
+}
+
+TEST_F(CordRingTest, IsFlatSingleFlat) {
+ for (bool external : {false, true}) {
+ SCOPED_TRACE(external ? "With External" : "With Flat");
+ absl::string_view str = "Hello world";
+ CordRep* rep = external ? MakeExternal(str) : MakeFlat(str);
+ CordRepRing* ring = NeedsUnref(CordRepRing::Create(rep));
+
+ // The ring is a single non-fragmented flat:
+ absl::string_view fragment;
+ EXPECT_TRUE(ring->IsFlat(nullptr));
+ EXPECT_TRUE(ring->IsFlat(&fragment));
+ EXPECT_THAT(fragment, Eq("Hello world"));
+ fragment = "";
+ EXPECT_TRUE(ring->IsFlat(0, 11, nullptr));
+ EXPECT_TRUE(ring->IsFlat(0, 11, &fragment));
+ EXPECT_THAT(fragment, Eq("Hello world"));
+
+ // Arbitrary ranges must check true as well.
+ EXPECT_TRUE(ring->IsFlat(1, 4, &fragment));
+ EXPECT_THAT(fragment, Eq("ello"));
+ EXPECT_TRUE(ring->IsFlat(6, 5, &fragment));
+ EXPECT_THAT(fragment, Eq("world"));
+ }
+}
+
+TEST_F(CordRingTest, IsFlatMultiFlat) {
+ for (bool external : {false, true}) {
+ SCOPED_TRACE(external ? "With External" : "With Flat");
+ absl::string_view str1 = "Hello world";
+ absl::string_view str2 = "Halt and catch fire";
+ CordRep* rep1 = external ? MakeExternal(str1) : MakeFlat(str1);
+ CordRep* rep2 = external ? MakeExternal(str2) : MakeFlat(str2);
+ CordRepRing* ring = CordRepRing::Append(CordRepRing::Create(rep1), rep2);
+ NeedsUnref(ring);
+
+ // The ring is fragmented, IsFlat() on the entire cord must be false.
+ EXPECT_FALSE(ring->IsFlat(nullptr));
+ absl::string_view fragment = "Don't touch this";
+ EXPECT_FALSE(ring->IsFlat(&fragment));
+ EXPECT_THAT(fragment, Eq("Don't touch this"));
+
+ // Check for ranges exactly within both flats.
+ EXPECT_TRUE(ring->IsFlat(0, 11, &fragment));
+ EXPECT_THAT(fragment, Eq("Hello world"));
+ EXPECT_TRUE(ring->IsFlat(11, 19, &fragment));
+ EXPECT_THAT(fragment, Eq("Halt and catch fire"));
+
+ // Check for arbitrary partial range inside each flat.
+ EXPECT_TRUE(ring->IsFlat(1, 4, &fragment));
+ EXPECT_THAT(fragment, "ello");
+ EXPECT_TRUE(ring->IsFlat(26, 4, &fragment));
+ EXPECT_THAT(fragment, "fire");
+
+ // Check ranges spanning across both flats
+ fragment = "Don't touch this";
+ EXPECT_FALSE(ring->IsFlat(1, 18, &fragment));
+ EXPECT_FALSE(ring->IsFlat(10, 2, &fragment));
+ EXPECT_THAT(fragment, Eq("Don't touch this"));
+ }
}
TEST_F(CordRingTest, Dump) {
@@ -1564,7 +1488,6 @@ TEST_F(CordRingTest, Dump) {
auto flats = MakeSpan(kFoxFlats);
CordRepRing* ring = NeedsUnref(FromFlats(flats, kPrepend));
ss << *ring;
- Unref(ring);
}
} // namespace
diff --git a/absl/strings/cord_test.cc b/absl/strings/cord_test.cc
index f9982428..14eca155 100644
--- a/absl/strings/cord_test.cc
+++ b/absl/strings/cord_test.cc
@@ -35,6 +35,7 @@
#include "absl/base/macros.h"
#include "absl/container/fixed_array.h"
#include "absl/strings/cord_test_helpers.h"
+#include "absl/strings/cordz_test_helpers.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
@@ -187,6 +188,19 @@ class CordTestPeer {
static cord_internal::CordzInfo* GetCordzInfo(const Cord& c) {
return c.contents_.cordz_info();
}
+
+ static Cord MakeSubstring(Cord src, size_t offset, size_t length) {
+ ABSL_RAW_CHECK(src.contents_.is_tree(), "Can not be inlined");
+ Cord cord;
+ auto* rep = new cord_internal::CordRepSubstring;
+ rep->tag = cord_internal::SUBSTRING;
+ rep->child = cord_internal::CordRep::Ref(src.contents_.tree());
+ rep->start = offset;
+ rep->length = length;
+ cord.contents_.EmplaceTree(rep,
+ cord_internal::CordzUpdateTracker::kSubCord);
+ return cord;
+ }
};
ABSL_NAMESPACE_END
@@ -227,7 +241,6 @@ TEST(GigabyteCord, FromExternal) {
// caused crashes in production. We grow exponentially so that the code will
// execute in a reasonable amount of time.
absl::Cord c;
- ABSL_RAW_LOG(INFO, "Made a Cord with %zu bytes!", c.size());
c.Append(from);
while (c.size() < max_size) {
c.Append(c);
@@ -466,8 +479,8 @@ TEST(TryFlat, SubstrInlined) {
TEST(TryFlat, SubstrFlat) {
absl::Cord c("longer than 15 bytes");
- c.RemovePrefix(1);
- EXPECT_EQ(c.TryFlat(), "onger than 15 bytes");
+ absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1);
+ EXPECT_EQ(sub.TryFlat(), "onger than 15 bytes");
}
TEST(TryFlat, Concat) {
@@ -482,16 +495,46 @@ TEST(TryFlat, External) {
TEST(TryFlat, SubstrExternal) {
absl::Cord c = absl::MakeCordFromExternal("hell", [](absl::string_view) {});
- c.RemovePrefix(1);
- EXPECT_EQ(c.TryFlat(), "ell");
+ absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1);
+ EXPECT_EQ(sub.TryFlat(), "ell");
}
TEST(TryFlat, SubstrConcat) {
absl::Cord c = absl::MakeFragmentedCord({"hello", " world"});
+ absl::Cord sub = absl::CordTestPeer::MakeSubstring(c, 1, c.size() - 1);
+ EXPECT_EQ(sub.TryFlat(), absl::nullopt);
c.RemovePrefix(1);
EXPECT_EQ(c.TryFlat(), absl::nullopt);
}
+TEST(TryFlat, CommonlyAssumedInvariants) {
+ // The behavior tested below is not part of the API contract of Cord, but it's
+ // something we intend to be true in our current implementation. This test
+ // exists to detect and prevent accidental breakage of the implementation.
+ absl::string_view fragments[] = {"A fragmented test",
+ " cord",
+ " to test subcords",
+ " of ",
+ "a",
+ " cord for",
+ " each chunk "
+ "returned by the ",
+ "iterator"};
+ absl::Cord c = absl::MakeFragmentedCord(fragments);
+ int fragment = 0;
+ int offset = 0;
+ absl::Cord::CharIterator itc = c.char_begin();
+ for (absl::string_view sv : c.Chunks()) {
+ absl::string_view expected = fragments[fragment];
+ absl::Cord subcord1 = c.Subcord(offset, sv.length());
+ absl::Cord subcord2 = absl::Cord::AdvanceAndRead(&itc, sv.size());
+ EXPECT_EQ(subcord1.TryFlat(), expected);
+ EXPECT_EQ(subcord2.TryFlat(), expected);
+ ++fragment;
+ offset += sv.length();
+ }
+}
+
static bool IsFlat(const absl::Cord& c) {
return c.chunk_begin() == c.chunk_end() || ++c.chunk_begin() == c.chunk_end();
}
@@ -1274,6 +1317,26 @@ TEST(Cord, Concat_Append) {
EXPECT_EQ(s2.size(), size + 1);
}
+TEST(Cord, DiabolicalGrowth) {
+ // This test exercises a diabolical Append(<one char>) on a cord, making the
+ // cord shared before each Append call resulting in a terribly fragmented
+ // resulting cord.
+ // TODO(b/183983616): Apply some minimum compaction when copying a shared
+ // source cord into a mutable copy for updates in CordRepRing.
+ RandomEngine rng(testing::GTEST_FLAG(random_seed));
+ const std::string expected = RandomLowercaseString(&rng, 5000);
+ absl::Cord cord;
+ for (char c : expected) {
+ absl::Cord shared(cord);
+ cord.Append(absl::string_view(&c, 1));
+ }
+ std::string value;
+ absl::CopyCordToString(cord, &value);
+ EXPECT_EQ(value, expected);
+ ABSL_RAW_LOG(INFO, "Diabolical size allocated = %zu",
+ cord.EstimatedMemoryUsage());
+}
+
TEST(MakeFragmentedCord, MakeFragmentedCordFromInitializerList) {
absl::Cord fragmented =
absl::MakeFragmentedCord({"A ", "fragmented ", "Cord"});
diff --git a/absl/strings/cord_test_helpers.h b/absl/strings/cord_test_helpers.h
index f1036e3b..31a1dc89 100644
--- a/absl/strings/cord_test_helpers.h
+++ b/absl/strings/cord_test_helpers.h
@@ -17,11 +17,73 @@
#ifndef ABSL_STRINGS_CORD_TEST_HELPERS_H_
#define ABSL_STRINGS_CORD_TEST_HELPERS_H_
+#include <cstdint>
+#include <iostream>
+#include <string>
+
+#include "absl/base/config.h"
#include "absl/strings/cord.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/string_view.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
+// Cord sizes relevant for testing
+enum class TestCordSize {
+ // An empty value
+ kEmpty = 0,
+
+ // An inlined string value
+ kInlined = cord_internal::kMaxInline / 2 + 1,
+
+ // 'Well known' SSO lengths (excluding terminating zero).
+ // libstdcxx has a maximum SSO of 15, libc++ has a maximum SSO of 22.
+ kStringSso1 = 15,
+ kStringSso2 = 22,
+
+ // A string value which is too large to fit in inlined data, but small enough
+ // such that Cord prefers copying the value if possible, i.e.: not stealing
+ // std::string inputs, or referencing existing CordReps on Append, etc.
+ kSmall = cord_internal::kMaxBytesToCopy / 2 + 1,
+
+ // A string value large enough that Cord prefers to reference or steal from
+ // existing inputs rather than copying contents of the input.
+ kMedium = cord_internal::kMaxFlatLength / 2 + 1,
+
+ // A string value large enough to cause it to be stored in mutliple flats.
+ kLarge = cord_internal::kMaxFlatLength * 4
+};
+
+// To string helper
+inline absl::string_view ToString(TestCordSize size) {
+ switch (size) {
+ case TestCordSize::kEmpty:
+ return "Empty";
+ case TestCordSize::kInlined:
+ return "Inlined";
+ case TestCordSize::kSmall:
+ return "Small";
+ case TestCordSize::kStringSso1:
+ return "StringSso1";
+ case TestCordSize::kStringSso2:
+ return "StringSso2";
+ case TestCordSize::kMedium:
+ return "Medium";
+ case TestCordSize::kLarge:
+ return "Large";
+ }
+ return "???";
+}
+
+// Returns the length matching the specified size
+inline size_t Length(TestCordSize size) { return static_cast<size_t>(size); }
+
+// Stream output helper
+inline std::ostream& operator<<(std::ostream& stream, TestCordSize size) {
+ return stream << ToString(size);
+}
+
// Creates a multi-segment Cord from an iterable container of strings. The
// resulting Cord is guaranteed to have one segment for every string in the
// container. This allows code to be unit tested with multi-segment Cord
diff --git a/absl/strings/cordz_test.cc b/absl/strings/cordz_test.cc
new file mode 100644
index 00000000..0e11f5c8
--- /dev/null
+++ b/absl/strings/cordz_test.cc
@@ -0,0 +1,421 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <cstdint>
+#include <string>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/base/internal/raw_logging.h"
+#include "absl/base/macros.h"
+#include "absl/strings/cord.h"
+#include "absl/strings/cord_test_helpers.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cordz_functions.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_sample_token.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/strings/str_cat.h"
+#include "absl/strings/string_view.h"
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+using testing::Eq;
+using testing::AnyOf;
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+
+using cord_internal::CordzInfo;
+using cord_internal::CordzSampleToken;
+using cord_internal::CordzStatistics;
+using cord_internal::CordzUpdateTracker;
+using Method = CordzUpdateTracker::MethodIdentifier;
+
+// Do not print cord contents, we only care about 'size' perhaps.
+// Note that this method must be inside the named namespace.
+inline void PrintTo(const Cord& cord, std::ostream* s) {
+ if (s) *s << "Cord[" << cord.size() << "]";
+}
+
+namespace {
+
+auto constexpr kMaxInline = cord_internal::kMaxInline;
+
+// Returns a string_view value of the specified length
+// We do this to avoid 'consuming' large strings in Cord by default.
+absl::string_view MakeString(size_t size) {
+ thread_local std::string str;
+ str = std::string(size, '.');
+ return str;
+}
+
+absl::string_view MakeString(TestCordSize size) {
+ return MakeString(Length(size));
+}
+
+// Returns a cord with a sampled method of kAppendString.
+absl::Cord MakeAppendStringCord(TestCordSize size) {
+ absl::Cord cord;
+ cord.Append(MakeString(size));
+ return cord;
+}
+
+std::string TestParamToString(::testing::TestParamInfo<TestCordSize> size) {
+ return absl::StrCat("On", ToString(size.param), "Cord");
+}
+
+class CordzUpdateTest : public testing::TestWithParam<TestCordSize> {
+ public:
+ Cord& cord() { return cord_; }
+
+ Method InitialOr(Method method) const {
+ return (GetParam() > TestCordSize::kInlined) ? Method::kConstructorString
+ : method;
+ }
+
+ private:
+ CordzSamplingIntervalHelper sample_every_{1};
+ Cord cord_{MakeString(GetParam())};
+};
+
+template <typename T>
+std::string ParamToString(::testing::TestParamInfo<T> param) {
+ return std::string(ToString(param.param));
+}
+
+INSTANTIATE_TEST_SUITE_P(WithParam, CordzUpdateTest,
+ testing::Values(TestCordSize::kEmpty,
+ TestCordSize::kInlined,
+ TestCordSize::kLarge),
+ TestParamToString);
+
+class CordzStringTest : public testing::TestWithParam<TestCordSize> {
+ private:
+ CordzSamplingIntervalHelper sample_every_{1};
+};
+
+INSTANTIATE_TEST_SUITE_P(WithParam, CordzStringTest,
+ testing::Values(TestCordSize::kInlined,
+ TestCordSize::kStringSso1,
+ TestCordSize::kStringSso2,
+ TestCordSize::kSmall,
+ TestCordSize::kLarge),
+ ParamToString<TestCordSize>);
+
+TEST(CordzTest, ConstructSmallArray) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(MakeString(TestCordSize::kSmall));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST(CordzTest, ConstructLargeArray) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(MakeString(TestCordSize::kLarge));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST_P(CordzStringTest, ConstructString) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(std::string(Length(GetParam()), '.'));
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ }
+}
+
+TEST(CordzTest, CopyConstruct) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ Cord cord(src);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorCord));
+}
+
+TEST(CordzTest, CopyConstructFromSampled) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src(MakeString(TestCordSize::kLarge));
+ Cord cord(src);
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kConstructorString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(1));
+}
+
+TEST(CordzTest, MoveConstruct) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src(MakeString(TestCordSize::kLarge));
+ Cord cord(std::move(src));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST_P(CordzUpdateTest, AssignCord) {
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord() = src;
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord())->GetCordzStatistics();
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST_P(CordzUpdateTest, AssignSampledCord) {
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ cord() = src;
+ ASSERT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord())->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzUpdateTest, AssignSampledCordToUnsampledCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord = src;
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzUpdateTest, AssignSampledCordToSampledCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = MakeAppendStringCord(TestCordSize::kLarge);
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord = src;
+ ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
+}
+
+TEST(CordzTest, AssignInlinedCord) {
+ CordzSampleToken token;
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord(MakeString(TestCordSize::kLarge));
+ const CordzInfo* info = GetCordzInfoForTesting(cord);
+ Cord src = UnsampledCord(MakeString(TestCordSize::kInlined));
+ cord = src;
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+ EXPECT_FALSE(CordzInfoIsListed(info));
+}
+
+TEST(CordzUpdateTest, MoveAssignCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord;
+ Cord src(MakeString(TestCordSize::kLarge));
+ cord = std::move(src);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+}
+
+TEST_P(CordzUpdateTest, AssignLargeArray) {
+ cord() = MakeString(TestCordSize::kSmall);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignString));
+}
+
+TEST_P(CordzUpdateTest, AssignSmallArray) {
+ cord() = MakeString(TestCordSize::kSmall);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignString));
+}
+
+TEST_P(CordzUpdateTest, AssignInlinedArray) {
+ cord() = MakeString(TestCordSize::kInlined);
+ EXPECT_THAT(GetCordzInfoForTesting(cord()), Eq(nullptr));
+}
+
+TEST_P(CordzStringTest, AssignStringToInlined) {
+ Cord cord;
+ cord = std::string(Length(GetParam()), '.');
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAssignString));
+ }
+}
+
+TEST_P(CordzStringTest, AssignStringToCord) {
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord = std::string(Length(GetParam()), '.');
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kAssignString, 1));
+ }
+}
+
+TEST_P(CordzUpdateTest, AssignInlinedString) {
+ cord() = std::string(Length(TestCordSize::kInlined), '.');
+ EXPECT_THAT(GetCordzInfoForTesting(cord()), Eq(nullptr));
+}
+
+TEST_P(CordzUpdateTest, AppendCord) {
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord().Append(src);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendCord)));
+}
+
+TEST_P(CordzUpdateTest, MoveAppendCord) {
+ cord().Append(UnsampledCord(MakeString(TestCordSize::kLarge)));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendCord)));
+}
+
+TEST_P(CordzUpdateTest, AppendSmallArray) {
+ cord().Append(MakeString(TestCordSize::kSmall));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendString)));
+}
+
+TEST_P(CordzUpdateTest, AppendLargeArray) {
+ cord().Append(MakeString(TestCordSize::kLarge));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendString)));
+}
+
+TEST_P(CordzStringTest, AppendStringToEmpty) {
+ Cord cord;
+ cord.Append(std::string(Length(GetParam()), '.'));
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAppendString));
+ }
+}
+
+TEST_P(CordzStringTest, AppendStringToInlined) {
+ Cord cord(MakeString(TestCordSize::kInlined));
+ cord.Append(std::string(Length(GetParam()), '.'));
+ if (Length(TestCordSize::kInlined) + Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAppendString));
+ }
+}
+
+TEST_P(CordzStringTest, AppendStringToCord) {
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord.Append(std::string(Length(GetParam()), '.'));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kAppendString, 1));
+}
+
+TEST(CordzTest, MakeCordFromExternal) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord = MakeCordFromExternal("Hello world", [](absl::string_view) {});
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kMakeCordFromExternal));
+}
+
+TEST(CordzTest, MakeCordFromEmptyExternal) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord cord = MakeCordFromExternal({}, [](absl::string_view) {});
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST_P(CordzUpdateTest, PrependCord) {
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ cord().Prepend(src);
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependCord)));
+}
+
+TEST_P(CordzUpdateTest, PrependSmallArray) {
+ cord().Prepend(MakeString(TestCordSize::kSmall));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependString)));
+}
+
+TEST_P(CordzUpdateTest, PrependLargeArray) {
+ cord().Prepend(MakeString(TestCordSize::kLarge));
+ EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependString)));
+}
+
+TEST_P(CordzStringTest, PrependStringToEmpty) {
+ Cord cord;
+ cord.Prepend(std::string(Length(GetParam()), '.'));
+ if (Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kPrependString));
+ }
+}
+
+TEST_P(CordzStringTest, PrependStringToInlined) {
+ Cord cord(MakeString(TestCordSize::kInlined));
+ cord.Prepend(std::string(Length(GetParam()), '.'));
+ if (Length(TestCordSize::kInlined) + Length(GetParam()) > kMaxInline) {
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kPrependString));
+ }
+}
+
+TEST_P(CordzStringTest, PrependStringToCord) {
+ Cord cord(MakeString(TestCordSize::kLarge));
+ cord.Prepend(std::string(Length(GetParam()), '.'));
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kPrependString, 1));
+}
+
+TEST(CordzTest, RemovePrefix) {
+ CordzSamplingIntervalHelper sample_every(1);
+ Cord cord(MakeString(TestCordSize::kLarge));
+
+ // Half the cord
+ cord.RemovePrefix(cord.size() / 2);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemovePrefix, 1));
+
+ // TODO(mvels): RemovePrefix does not reset to inlined, except if empty?
+ cord.RemovePrefix(cord.size() - kMaxInline);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemovePrefix, 2));
+
+ cord.RemovePrefix(cord.size());
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST(CordzTest, RemoveSuffix) {
+ CordzSamplingIntervalHelper sample_every(1);
+ Cord cord(MakeString(TestCordSize::kLarge));
+
+ // Half the cord
+ cord.RemoveSuffix(cord.size() / 2);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemoveSuffix, 1));
+
+ // TODO(mvels): RemoveSuffix does not reset to inlined, except if empty?
+ cord.RemoveSuffix(cord.size() - kMaxInline);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
+ EXPECT_THAT(cord, CordzMethodCountEq(Method::kRemoveSuffix, 2));
+
+ cord.RemoveSuffix(cord.size());
+ EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
+}
+
+TEST(CordzTest, SubCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ Cord cord = src.Subcord(10, src.size() / 2);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
+}
+
+TEST(CordzTest, SmallSubCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
+ Cord cord = src.Subcord(10, kMaxInline + 1);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
+}
+
+TEST(CordzTest, SubCordFromSampledCord) {
+ CordzSamplingIntervalHelper sample_every{1};
+ Cord src(MakeString(TestCordSize::kLarge));
+ Cord cord = src.Subcord(10, src.size() / 2);
+ EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
+ CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
+ EXPECT_THAT(stats.parent_method, Eq(Method::kConstructorString));
+ EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(1));
+}
+
+} // namespace
+
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
diff --git a/absl/strings/cordz_test_helpers.h b/absl/strings/cordz_test_helpers.h
new file mode 100644
index 00000000..e410eecf
--- /dev/null
+++ b/absl/strings/cordz_test_helpers.h
@@ -0,0 +1,151 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
+#define ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
+
+#include <utility>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/base/macros.h"
+#include "absl/strings/cord.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_sample_token.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/strings/str_cat.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+
+// Returns the CordzInfo for the cord, or nullptr if the cord is not sampled.
+inline const cord_internal::CordzInfo* GetCordzInfoForTesting(
+ const Cord& cord) {
+ if (!cord.contents_.is_tree()) return nullptr;
+ return cord.contents_.cordz_info();
+}
+
+// Returns true if the provided cordz_info is in the list of sampled cords.
+inline bool CordzInfoIsListed(const cord_internal::CordzInfo* cordz_info,
+ cord_internal::CordzSampleToken token = {}) {
+ for (const cord_internal::CordzInfo& info : token) {
+ if (cordz_info == &info) return true;
+ }
+ return false;
+}
+
+// Matcher on Cord that verifies all of:
+// - the cord is sampled
+// - the CordzInfo of the cord is listed / discoverable.
+// - the reported CordzStatistics match the cord's actual properties
+// - the cord has an (initial) UpdateTracker count of 1 for `method`
+MATCHER_P(HasValidCordzInfoOf, method, "CordzInfo matches cord") {
+ const cord_internal::CordzInfo* cord_info = GetCordzInfoForTesting(arg);
+ if (cord_info == nullptr) {
+ *result_listener << "cord is not sampled";
+ return false;
+ }
+ if (!CordzInfoIsListed(cord_info)) {
+ *result_listener << "cord is sampled, but not listed";
+ return false;
+ }
+ cord_internal::CordzStatistics stat = cord_info->GetCordzStatistics();
+ if (stat.size != arg.size()) {
+ *result_listener << "cordz size " << stat.size
+ << " does not match cord size " << arg.size();
+ return false;
+ }
+ if (stat.update_tracker.Value(method) != 1) {
+ *result_listener << "Expected method count 1 for " << method << ", found "
+ << stat.update_tracker.Value(method);
+ return false;
+ }
+ return true;
+}
+
+// Matcher on Cord that verifies that the cord is sampled and that the CordzInfo
+// update tracker has 'method' with a call count of 'n'
+MATCHER_P2(CordzMethodCountEq, method, n,
+ absl::StrCat("CordzInfo method count equals ", n)) {
+ const cord_internal::CordzInfo* cord_info = GetCordzInfoForTesting(arg);
+ if (cord_info == nullptr) {
+ *result_listener << "cord is not sampled";
+ return false;
+ }
+ cord_internal::CordzStatistics stat = cord_info->GetCordzStatistics();
+ if (stat.update_tracker.Value(method) != n) {
+ *result_listener << "Expected method count " << n << " for " << method
+ << ", found " << stat.update_tracker.Value(method);
+ return false;
+ }
+ return true;
+}
+
+// Cordz will only update with a new rate once the previously scheduled event
+// has fired. When we disable Cordz, a long delay takes place where we won't
+// consider profiling new Cords. CordzSampleIntervalHelper will burn through
+// that interval and allow for testing that assumes that the average sampling
+// interval is a particular value.
+class CordzSamplingIntervalHelper {
+ public:
+ explicit CordzSamplingIntervalHelper(int32_t interval)
+ : orig_mean_interval_(absl::cord_internal::get_cordz_mean_interval()) {
+ absl::cord_internal::set_cordz_mean_interval(interval);
+ absl::cord_internal::cordz_set_next_sample_for_testing(interval);
+ }
+
+ ~CordzSamplingIntervalHelper() {
+ absl::cord_internal::set_cordz_mean_interval(orig_mean_interval_);
+ absl::cord_internal::cordz_set_next_sample_for_testing(orig_mean_interval_);
+ }
+
+ private:
+ int32_t orig_mean_interval_;
+};
+
+// Wrapper struct managing a small CordRep `rep`
+struct TestCordRep {
+ cord_internal::CordRepFlat* rep;
+
+ TestCordRep() {
+ rep = cord_internal::CordRepFlat::New(100);
+ rep->length = 100;
+ memset(rep->Data(), 1, 100);
+ }
+ ~TestCordRep() { cord_internal::CordRep::Unref(rep); }
+};
+
+// Wrapper struct managing a small CordRep `rep`, and
+// an InlineData `data` initialized with that CordRep.
+struct TestCordData {
+ TestCordRep rep;
+ cord_internal::InlineData data{rep.rep};
+};
+
+// Creates a Cord that is not sampled
+template <typename... Args>
+Cord UnsampledCord(Args... args) {
+ CordzSamplingIntervalHelper never(9999);
+ Cord cord(std::forward<Args>(args)...);
+ ABSL_ASSERT(GetCordzInfoForTesting(cord) == nullptr);
+ return cord;
+}
+
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
diff --git a/absl/strings/internal/charconv_parse.cc b/absl/strings/internal/charconv_parse.cc
index 8b11868c..d29acaf4 100644
--- a/absl/strings/internal/charconv_parse.cc
+++ b/absl/strings/internal/charconv_parse.cc
@@ -52,7 +52,7 @@ static_assert(std::numeric_limits<double>::digits == 53, "IEEE double fact");
// The lowest valued 19-digit decimal mantissa we can read still contains
// sufficient information to reconstruct a binary mantissa.
-static_assert(1000000000000000000u > (uint64_t(1) << (53 + 3)), "(b) above");
+static_assert(1000000000000000000u > (uint64_t{1} << (53 + 3)), "(b) above");
// ParseFloat<16> will read the first 15 significant digits of the mantissa.
//
diff --git a/absl/strings/internal/cord_internal.h b/absl/strings/internal/cord_internal.h
index a1ba67fe..813b3f35 100644
--- a/absl/strings/internal/cord_internal.h
+++ b/absl/strings/internal/cord_internal.h
@@ -329,18 +329,17 @@ static constexpr cordz_info_t BigEndianByte(unsigned char value) {
class InlineData {
public:
+ // DefaultInitType forces the use of the default initialization constructor.
+ enum DefaultInitType { kDefaultInit };
+
// kNullCordzInfo holds the big endian representation of intptr_t(1)
// This is the 'null' / initial value of 'cordz_info'. The null value
// is specifically big endian 1 as with 64-bit pointers, the last
// byte of cordz_info overlaps with the last byte holding the tag.
static constexpr cordz_info_t kNullCordzInfo = BigEndianByte(1);
- // kFakeCordzInfo holds a 'fake', non-null cordz-info value we use to
- // emulate the previous 'kProfiled' tag logic in 'set_profiled' until
- // cord code is changed to store cordz_info values in InlineData.
- static constexpr cordz_info_t kFakeCordzInfo = BigEndianByte(9);
-
constexpr InlineData() : as_chars_{0} {}
+ explicit InlineData(DefaultInitType) {}
explicit constexpr InlineData(CordRep* rep) : as_tree_(rep) {}
explicit constexpr InlineData(absl::string_view chars)
: as_chars_{
@@ -367,6 +366,16 @@ class InlineData {
return as_tree_.cordz_info != kNullCordzInfo;
}
+ // Returns true if either of the provided instances hold a cordz_info value.
+ // This method is more efficient than the equivalent `data1.is_profiled() ||
+ // data2.is_profiled()`. Requires both arguments to hold a tree.
+ static bool is_either_profiled(const InlineData& data1,
+ const InlineData& data2) {
+ assert(data1.is_tree() && data2.is_tree());
+ return (data1.as_tree_.cordz_info | data2.as_tree_.cordz_info) !=
+ kNullCordzInfo;
+ }
+
// Returns the cordz_info sampling instance for this instance, or nullptr
// if the current instance is not sampled and does not have CordzInfo data.
// Requires the current instance to hold a tree value.
@@ -454,13 +463,6 @@ class InlineData {
tag() = static_cast<char>(size << 1);
}
- // Sets or unsets the 'is_profiled' state of this instance.
- // Requires the current instance to hold a tree value.
- void set_profiled(bool profiled) {
- assert(is_tree());
- as_tree_.cordz_info = profiled ? kFakeCordzInfo : kNullCordzInfo;
- }
-
private:
// See cordz_info_t for forced alignment and size of `cordz_info` details.
struct AsTree {
diff --git a/absl/strings/internal/cord_rep_ring.cc b/absl/strings/internal/cord_rep_ring.cc
index 4d31d1d9..09951290 100644
--- a/absl/strings/internal/cord_rep_ring.cc
+++ b/absl/strings/internal/cord_rep_ring.cc
@@ -301,7 +301,7 @@ bool CordRepRing::IsValid(std::ostream& output) const {
if (offset >= child->length || entry_length > child->length - offset) {
output << "entry[" << head << "] has offset " << offset
<< " and entry length " << entry_length
- << " which are outside of the childs length of " << child->length;
+ << " which are outside of the child's length of " << child->length;
return false;
}
@@ -400,10 +400,11 @@ CordRepRing* CordRepRing::Mutable(CordRepRing* rep, size_t extra) {
// Get current number of entries, and check for max capacity.
size_t entries = rep->entries();
- size_t min_extra = (std::max)(extra, rep->capacity() * 2 - entries);
if (!rep->refcount.IsOne()) {
- return Copy(rep, rep->head(), rep->tail(), min_extra);
+ return Copy(rep, rep->head(), rep->tail(), extra);
} else if (entries + extra > rep->capacity()) {
+ const size_t min_grow = rep->capacity() + rep->capacity() / 2;
+ const size_t min_extra = (std::max)(extra, min_grow - entries);
CordRepRing* newrep = CordRepRing::New(entries, min_extra);
newrep->Fill<false>(rep, rep->head(), rep->tail());
CordRepRing::Delete(rep);
diff --git a/absl/strings/internal/cord_rep_ring.h b/absl/strings/internal/cord_rep_ring.h
index c74d3353..830f2b2a 100644
--- a/absl/strings/internal/cord_rep_ring.h
+++ b/absl/strings/internal/cord_rep_ring.h
@@ -237,6 +237,18 @@ class CordRepRing : public CordRep {
// Returns the character at `offset`. Requires that `offset < length`.
char GetCharacter(size_t offset) const;
+ // Returns true if this instance manages a single contiguous buffer, in which
+ // case the (optional) output parameter `fragment` is set. Otherwise, the
+ // function returns false, and `fragment` is left unchanged.
+ bool IsFlat(absl::string_view* fragment) const;
+
+ // Returns true if the data starting at `offset` with length `length` is
+ // managed by this instance inside a single contiguous buffer, in which case
+ // the (optional) output parameter `fragment` is set to the contiguous memory
+ // starting at offset `offset` with length `length`. Otherwise, the function
+ // returns false, and `fragment` is left unchanged.
+ bool IsFlat(size_t offset, size_t length, absl::string_view* fragment) const;
+
// Testing only: set capacity to requested capacity.
void SetCapacityForTesting(size_t capacity);
@@ -576,6 +588,25 @@ inline const CordRepRing* CordRep::ring() const {
return static_cast<const CordRepRing*>(this);
}
+inline bool CordRepRing::IsFlat(absl::string_view* fragment) const {
+ if (entries() == 1) {
+ if (fragment) *fragment = entry_data(head());
+ return true;
+ }
+ return false;
+}
+
+inline bool CordRepRing::IsFlat(size_t offset, size_t length,
+ absl::string_view* fragment) const {
+ const Position pos = Find(offset);
+ const absl::string_view data = entry_data(pos.index);
+ if (data.length() >= length && data.length() - length >= pos.offset) {
+ if (fragment) *fragment = data.substr(pos.offset, length);
+ return true;
+ }
+ return false;
+}
+
std::ostream& operator<<(std::ostream& s, const CordRepRing& rep);
#ifdef __clang__
diff --git a/absl/strings/internal/cord_rep_ring_reader.h b/absl/strings/internal/cord_rep_ring_reader.h
index 396c0e2c..7ceeaa00 100644
--- a/absl/strings/internal/cord_rep_ring_reader.h
+++ b/absl/strings/internal/cord_rep_ring_reader.h
@@ -40,6 +40,10 @@ class CordRepRingReader {
// The returned value is undefined if this instance is empty.
CordRepRing::index_type index() const { return index_; }
+ // Returns the current node inside the ring buffer for this instance.
+ // The returned value is undefined if this instance is empty.
+ CordRep* node() const { return ring_->entry_child(index_); }
+
// Returns the length of the referenced ring buffer.
// Requires the current instance to be non empty.
size_t length() const {
diff --git a/absl/strings/internal/cordz_functions.cc b/absl/strings/internal/cordz_functions.cc
new file mode 100644
index 00000000..6ad864f1
--- /dev/null
+++ b/absl/strings/internal/cordz_functions.cc
@@ -0,0 +1,104 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_functions.h"
+
+#include <atomic>
+#include <cmath>
+#include <limits>
+#include <random>
+
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/base/internal/exponential_biased.h"
+#include "absl/base/internal/raw_logging.h"
+
+// TODO(b/162942788): weak 'cordz_disabled' value.
+// A strong version is in the 'cordz_disabled_hack_for_odr' library which can
+// be linked in to disable cordz at compile time.
+extern "C" {
+bool absl_internal_cordz_disabled ABSL_ATTRIBUTE_WEAK = false;
+}
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+// The average interval until the next sample. A value of 0 disables profiling
+// while a value of 1 will profile all Cords.
+std::atomic<int> g_cordz_mean_interval(50000);
+
+} // namespace
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+ABSL_CONST_INIT thread_local int64_t cordz_next_sample = 0;
+
+// kIntervalIfDisabled is the number of profile-eligible events need to occur
+// before the code will confirm that cordz is still disabled.
+constexpr int64_t kIntervalIfDisabled = 1 << 16;
+
+ABSL_ATTRIBUTE_NOINLINE bool cordz_should_profile_slow() {
+ // TODO(b/162942788): check if profiling is disabled at compile time.
+ if (absl_internal_cordz_disabled) {
+ ABSL_RAW_LOG(WARNING, "Cordz info disabled at compile time");
+ // We are permanently disabled: set counter to highest possible value.
+ cordz_next_sample = std::numeric_limits<int64_t>::max();
+ return false;
+ }
+
+ thread_local absl::base_internal::ExponentialBiased
+ exponential_biased_generator;
+ int32_t mean_interval = get_cordz_mean_interval();
+
+ // Check if we disabled profiling. If so, set the next sample to a "large"
+ // number to minimize the overhead of the should_profile codepath.
+ if (mean_interval <= 0) {
+ cordz_next_sample = kIntervalIfDisabled;
+ return false;
+ }
+
+ // Check if we're always sampling.
+ if (mean_interval == 1) {
+ cordz_next_sample = 1;
+ return true;
+ }
+
+ if (cordz_next_sample <= 0) {
+ cordz_next_sample = exponential_biased_generator.GetStride(mean_interval);
+ return true;
+ }
+
+ --cordz_next_sample;
+ return false;
+}
+
+void cordz_set_next_sample_for_testing(int64_t next_sample) {
+ cordz_next_sample = next_sample;
+}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+int32_t get_cordz_mean_interval() {
+ return g_cordz_mean_interval.load(std::memory_order_acquire);
+}
+
+void set_cordz_mean_interval(int32_t mean_interval) {
+ g_cordz_mean_interval.store(mean_interval, std::memory_order_release);
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_functions.h b/absl/strings/internal/cordz_functions.h
new file mode 100644
index 00000000..c9ba1450
--- /dev/null
+++ b/absl/strings/internal/cordz_functions.h
@@ -0,0 +1,85 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+#define ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+
+#include <stdint.h>
+
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/base/optimization.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Returns the current sample rate. This represents the average interval
+// between samples.
+int32_t get_cordz_mean_interval();
+
+// Sets the sample rate with the average interval between samples.
+void set_cordz_mean_interval(int32_t mean_interval);
+
+// Enable cordz unless any of the following applies:
+// - no thread local support
+// - MSVC build
+// - Android build
+// - Apple build
+// - DLL build
+// Hashtablez is turned off completely in opensource builds.
+// MSVC's static atomics are dynamically initialized in debug mode, which breaks
+// sampling.
+#if defined(ABSL_HAVE_THREAD_LOCAL) && !defined(_MSC_VER) && \
+ !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL) && \
+ !defined(__ANDROID__) && !defined(__APPLE__)
+#define ABSL_INTERNAL_CORDZ_ENABLED 1
+#endif
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+// cordz_next_sample is the number of events until the next sample event. If
+// the value is 1 or less, the code will check on the next event if cordz is
+// enabled, and if so, will sample the Cord. cordz is only enabled when we can
+// use thread locals.
+ABSL_CONST_INIT extern thread_local int64_t cordz_next_sample;
+
+// Determines if the next sample should be profiled. If it is, the value pointed
+// at by next_sample will be set with the interval until the next sample.
+bool cordz_should_profile_slow();
+
+// Returns true if the next cord should be sampled.
+inline bool cordz_should_profile() {
+ if (ABSL_PREDICT_TRUE(cordz_next_sample > 1)) {
+ cordz_next_sample--;
+ return false;
+ }
+ return cordz_should_profile_slow();
+}
+
+// Sets the interval until the next sample (for testing only)
+void cordz_set_next_sample_for_testing(int64_t next_sample);
+
+#else // ABSL_INTERNAL_CORDZ_ENABLED
+
+inline bool cordz_should_profile() { return false; }
+inline void cordz_set_next_sample_for_testing(int64_t) {}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_FUNCTIONS_H_
diff --git a/absl/strings/internal/cordz_functions_test.cc b/absl/strings/internal/cordz_functions_test.cc
new file mode 100644
index 00000000..f2cefae3
--- /dev/null
+++ b/absl/strings/internal/cordz_functions_test.cc
@@ -0,0 +1,131 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_functions.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::Eq;
+using ::testing::Ge;
+using ::testing::Le;
+
+TEST(CordzFunctionsTest, SampleRate) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+ int32_t expected_sample_rate = 123;
+ set_cordz_mean_interval(expected_sample_rate);
+ EXPECT_THAT(get_cordz_mean_interval(), Eq(expected_sample_rate));
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+// Cordz is disabled when we don't have thread_local. All calls to
+// should_profile will return false when cordz is diabled, so we might want to
+// avoid those tests.
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+TEST(CordzFunctionsTest, ShouldProfileDisable) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(0);
+ cordz_set_next_sample_for_testing(0);
+ EXPECT_FALSE(cordz_should_profile());
+ // 1 << 16 is from kIntervalIfDisabled in cordz_functions.cc.
+ EXPECT_THAT(cordz_next_sample, Eq(1 << 16));
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+TEST(CordzFunctionsTest, ShouldProfileAlways) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(1);
+ cordz_set_next_sample_for_testing(1);
+ EXPECT_TRUE(cordz_should_profile());
+ EXPECT_THAT(cordz_next_sample, Le(1));
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+TEST(CordzFunctionsTest, ShouldProfileRate) {
+ static constexpr int kDesiredMeanInterval = 1000;
+ static constexpr int kSamples = 10000;
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(kDesiredMeanInterval);
+
+ int64_t sum_of_intervals = 0;
+ for (int i = 0; i < kSamples; i++) {
+ // Setting next_sample to 0 will force cordz_should_profile to generate a
+ // new value for next_sample each iteration.
+ cordz_set_next_sample_for_testing(0);
+ cordz_should_profile();
+ sum_of_intervals += cordz_next_sample;
+ }
+
+ // The sum of independent exponential variables is an Erlang distribution,
+ // which is a gamma distribution where the shape parameter is equal to the
+ // number of summands. The distribution used for cordz_should_profile is
+ // actually floor(Exponential(1/mean)) which introduces bias. However, we can
+ // apply the squint-really-hard correction factor. That is, when mean is
+ // large, then if we squint really hard the shape of the distribution between
+ // N and N+1 looks like a uniform distribution. On average, each value for
+ // next_sample will be about 0.5 lower than we would expect from an
+ // exponential distribution. This squint-really-hard correction approach won't
+ // work when mean is smaller than about 10 but works fine when mean is 1000.
+ //
+ // We can use R to calculate a confidence interval. This
+ // shows how to generate a confidence interval with a false positive rate of
+ // one in a billion.
+ //
+ // $ R -q
+ // > mean = 1000
+ // > kSamples = 10000
+ // > errorRate = 1e-9
+ // > correction = -kSamples / 2
+ // > low = qgamma(errorRate/2, kSamples, 1/mean) + correction
+ // > high = qgamma(1 - errorRate/2, kSamples, 1/mean) + correction
+ // > low
+ // [1] 9396115
+ // > high
+ // [1] 10618100
+ EXPECT_THAT(sum_of_intervals, Ge(9396115));
+ EXPECT_THAT(sum_of_intervals, Le(10618100));
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+#else // ABSL_INTERNAL_CORDZ_ENABLED
+
+TEST(CordzFunctionsTest, ShouldProfileDisabled) {
+ int32_t orig_sample_rate = get_cordz_mean_interval();
+
+ set_cordz_mean_interval(1);
+ cordz_set_next_sample_for_testing(0);
+ EXPECT_FALSE(cordz_should_profile());
+
+ set_cordz_mean_interval(orig_sample_rate);
+}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_handle.cc b/absl/strings/internal/cordz_handle.cc
new file mode 100644
index 00000000..a73fefed
--- /dev/null
+++ b/absl/strings/internal/cordz_handle.cc
@@ -0,0 +1,139 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include "absl/strings/internal/cordz_handle.h"
+
+#include <atomic>
+
+#include "absl/base/internal/raw_logging.h" // For ABSL_RAW_CHECK
+#include "absl/base/internal/spinlock.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ::absl::base_internal::SpinLockHolder;
+
+ABSL_CONST_INIT CordzHandle::Queue CordzHandle::global_queue_(absl::kConstInit);
+
+CordzHandle::CordzHandle(bool is_snapshot) : is_snapshot_(is_snapshot) {
+ if (is_snapshot) {
+ SpinLockHolder lock(&queue_->mutex);
+ CordzHandle* dq_tail = queue_->dq_tail.load(std::memory_order_acquire);
+ if (dq_tail != nullptr) {
+ dq_prev_ = dq_tail;
+ dq_tail->dq_next_ = this;
+ }
+ queue_->dq_tail.store(this, std::memory_order_release);
+ }
+}
+
+CordzHandle::~CordzHandle() {
+ ODRCheck();
+ if (is_snapshot_) {
+ std::vector<CordzHandle*> to_delete;
+ {
+ SpinLockHolder lock(&queue_->mutex);
+ CordzHandle* next = dq_next_;
+ if (dq_prev_ == nullptr) {
+ // We were head of the queue, delete every CordzHandle until we reach
+ // either the end of the list, or a snapshot handle.
+ while (next && !next->is_snapshot_) {
+ to_delete.push_back(next);
+ next = next->dq_next_;
+ }
+ } else {
+ // Another CordzHandle existed before this one, don't delete anything.
+ dq_prev_->dq_next_ = next;
+ }
+ if (next) {
+ next->dq_prev_ = dq_prev_;
+ } else {
+ queue_->dq_tail.store(dq_prev_, std::memory_order_release);
+ }
+ }
+ for (CordzHandle* handle : to_delete) {
+ delete handle;
+ }
+ }
+}
+
+bool CordzHandle::SafeToDelete() const {
+ return is_snapshot_ || queue_->IsEmpty();
+}
+
+void CordzHandle::Delete(CordzHandle* handle) {
+ assert(handle);
+ if (handle) {
+ handle->ODRCheck();
+ Queue* const queue = handle->queue_;
+ if (!handle->SafeToDelete()) {
+ SpinLockHolder lock(&queue->mutex);
+ CordzHandle* dq_tail = queue->dq_tail.load(std::memory_order_acquire);
+ if (dq_tail != nullptr) {
+ handle->dq_prev_ = dq_tail;
+ dq_tail->dq_next_ = handle;
+ queue->dq_tail.store(handle, std::memory_order_release);
+ return;
+ }
+ }
+ delete handle;
+ }
+}
+
+std::vector<const CordzHandle*> CordzHandle::DiagnosticsGetDeleteQueue() {
+ std::vector<const CordzHandle*> handles;
+ SpinLockHolder lock(&global_queue_.mutex);
+ CordzHandle* dq_tail = global_queue_.dq_tail.load(std::memory_order_acquire);
+ for (const CordzHandle* p = dq_tail; p; p = p->dq_prev_) {
+ handles.push_back(p);
+ }
+ return handles;
+}
+
+bool CordzHandle::DiagnosticsHandleIsSafeToInspect(
+ const CordzHandle* handle) const {
+ ODRCheck();
+ if (!is_snapshot_) return false;
+ if (handle == nullptr) return true;
+ if (handle->is_snapshot_) return false;
+ bool snapshot_found = false;
+ SpinLockHolder lock(&queue_->mutex);
+ for (const CordzHandle* p = queue_->dq_tail; p; p = p->dq_prev_) {
+ if (p == handle) return !snapshot_found;
+ if (p == this) snapshot_found = true;
+ }
+ ABSL_ASSERT(snapshot_found); // Assert that 'this' is in delete queue.
+ return true;
+}
+
+std::vector<const CordzHandle*>
+CordzHandle::DiagnosticsGetSafeToInspectDeletedHandles() {
+ ODRCheck();
+ std::vector<const CordzHandle*> handles;
+ if (!is_snapshot()) {
+ return handles;
+ }
+
+ SpinLockHolder lock(&queue_->mutex);
+ for (const CordzHandle* p = dq_next_; p != nullptr; p = p->dq_next_) {
+ if (!p->is_snapshot()) {
+ handles.push_back(p);
+ }
+ }
+ return handles;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_handle.h b/absl/strings/internal/cordz_handle.h
new file mode 100644
index 00000000..5df53c78
--- /dev/null
+++ b/absl/strings/internal/cordz_handle.h
@@ -0,0 +1,131 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_HANDLE_H_
+#define ABSL_STRINGS_CORDZ_HANDLE_H_
+
+#include <atomic>
+#include <vector>
+
+#include "absl/base/config.h"
+#include "absl/base/internal/raw_logging.h"
+#include "absl/base/internal/spinlock.h"
+#include "absl/synchronization/mutex.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// This base class allows multiple types of object (CordzInfo and
+// CordzSampleToken) to exist simultaneously on the delete queue (pointed to by
+// global_dq_tail and traversed using dq_prev_ and dq_next_). The
+// delete queue guarantees that once a profiler creates a CordzSampleToken and
+// has gained visibility into a CordzInfo object, that CordzInfo object will not
+// be deleted prematurely. This allows the profiler to inspect all CordzInfo
+// objects that are alive without needing to hold a global lock.
+class CordzHandle {
+ public:
+ CordzHandle() : CordzHandle(false) {}
+
+ bool is_snapshot() const { return is_snapshot_; }
+
+ // Returns true if this instance is safe to be deleted because it is either a
+ // snapshot, which is always safe to delete, or not included in the global
+ // delete queue and thus not included in any snapshot.
+ // Callers are responsible for making sure this instance can not be newly
+ // discovered by other threads. For example, CordzInfo instances first de-list
+ // themselves from the global CordzInfo list before determining if they are
+ // safe to be deleted directly.
+ // If SafeToDelete returns false, callers MUST use the Delete() method to
+ // safely queue CordzHandle instances for deletion.
+ bool SafeToDelete() const;
+
+ // Deletes the provided instance, or puts it on the delete queue to be deleted
+ // once there are no more sample tokens (snapshot) instances potentially
+ // referencing the instance. `handle` should not be null.
+ static void Delete(CordzHandle* handle);
+
+ // Returns the current entries in the delete queue in LIFO order.
+ static std::vector<const CordzHandle*> DiagnosticsGetDeleteQueue();
+
+ // Returns true if the provided handle is nullptr or guarded by this handle.
+ // Since the CordzSnapshot token is itself a CordzHandle, this method will
+ // allow tests to check if that token is keeping an arbitrary CordzHandle
+ // alive.
+ bool DiagnosticsHandleIsSafeToInspect(const CordzHandle* handle) const;
+
+ // Returns the current entries in the delete queue, in LIFO order, that are
+ // protected by this. CordzHandle objects are only placed on the delete queue
+ // after CordzHandle::Delete is called with them as an argument. Only
+ // CordzHandle objects that are not also CordzSnapshot objects will be
+ // included in the return vector. For each of the handles in the return
+ // vector, the earliest that their memory can be freed is when this
+ // CordzSnapshot object is deleted.
+ std::vector<const CordzHandle*> DiagnosticsGetSafeToInspectDeletedHandles();
+
+ protected:
+ explicit CordzHandle(bool is_snapshot);
+ virtual ~CordzHandle();
+
+ private:
+ // Global queue data. CordzHandle stores a pointer to the global queue
+ // instance to harden against ODR violations.
+ struct Queue {
+ constexpr explicit Queue(absl::ConstInitType)
+ : mutex(absl::kConstInit,
+ absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
+
+ absl::base_internal::SpinLock mutex;
+ std::atomic<CordzHandle*> dq_tail ABSL_GUARDED_BY(mutex){nullptr};
+
+ // Returns true if this delete queue is empty. This method does not acquire
+ // the lock, but does a 'load acquire' observation on the delete queue tail.
+ // It is used inside Delete() to check for the presence of a delete queue
+ // without holding the lock. The assumption is that the caller is in the
+ // state of 'being deleted', and can not be newly discovered by a concurrent
+ // 'being constructed' snapshot instance. Practically, this means that any
+ // such discovery (`find`, 'first' or 'next', etc) must have proper 'happens
+ // before / after' semantics and atomic fences.
+ bool IsEmpty() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ return dq_tail.load(std::memory_order_acquire) == nullptr;
+ }
+ };
+
+ void ODRCheck() const {
+#ifndef NDEBUG
+ ABSL_RAW_CHECK(queue_ == &global_queue_, "ODR violation in Cord");
+#endif
+ }
+
+ ABSL_CONST_INIT static Queue global_queue_;
+ Queue* const queue_ = &global_queue_;
+ const bool is_snapshot_;
+
+ // dq_prev_ and dq_next_ require the global queue mutex to be held.
+ // Unfortunately we can't use thread annotations such that the thread safety
+ // analysis understands that queue_ and global_queue_ are one and the same.
+ CordzHandle* dq_prev_ = nullptr;
+ CordzHandle* dq_next_ = nullptr;
+};
+
+class CordzSnapshot : public CordzHandle {
+ public:
+ CordzSnapshot() : CordzHandle(true) {}
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_HANDLE_H_
diff --git a/absl/strings/internal/cordz_handle_test.cc b/absl/strings/internal/cordz_handle_test.cc
new file mode 100644
index 00000000..fd68e06b
--- /dev/null
+++ b/absl/strings/internal/cordz_handle_test.cc
@@ -0,0 +1,265 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include "absl/strings/internal/cordz_handle.h"
+
+#include <random>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/memory/memory.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "absl/synchronization/notification.h"
+#include "absl/time/clock.h"
+#include "absl/time/time.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Gt;
+using ::testing::IsEmpty;
+using ::testing::SizeIs;
+
+// Local less verbose helper
+std::vector<const CordzHandle*> DeleteQueue() {
+ return CordzHandle::DiagnosticsGetDeleteQueue();
+}
+
+struct CordzHandleDeleteTracker : public CordzHandle {
+ bool* deleted;
+ explicit CordzHandleDeleteTracker(bool* deleted) : deleted(deleted) {}
+ ~CordzHandleDeleteTracker() override { *deleted = true; }
+};
+
+TEST(CordzHandleTest, DeleteQueueIsEmpty) {
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+}
+
+TEST(CordzHandleTest, CordzHandleCreateDelete) {
+ bool deleted = false;
+ auto* handle = new CordzHandleDeleteTracker(&deleted);
+ EXPECT_FALSE(handle->is_snapshot());
+ EXPECT_TRUE(handle->SafeToDelete());
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+
+ CordzHandle::Delete(handle);
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+ EXPECT_TRUE(deleted);
+}
+
+TEST(CordzHandleTest, CordzSnapshotCreateDelete) {
+ auto* snapshot = new CordzSnapshot();
+ EXPECT_TRUE(snapshot->is_snapshot());
+ EXPECT_TRUE(snapshot->SafeToDelete());
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot));
+ delete snapshot;
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+}
+
+TEST(CordzHandleTest, CordzHandleCreateDeleteWithSnapshot) {
+ bool deleted = false;
+ auto* snapshot = new CordzSnapshot();
+ auto* handle = new CordzHandleDeleteTracker(&deleted);
+ EXPECT_FALSE(handle->SafeToDelete());
+
+ CordzHandle::Delete(handle);
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle, snapshot));
+ EXPECT_FALSE(deleted);
+ EXPECT_FALSE(handle->SafeToDelete());
+
+ delete snapshot;
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+ EXPECT_TRUE(deleted);
+}
+
+TEST(CordzHandleTest, MultiSnapshot) {
+ bool deleted[3] = {false, false, false};
+
+ CordzSnapshot* snapshot[3];
+ CordzHandleDeleteTracker* handle[3];
+ for (int i = 0; i < 3; ++i) {
+ snapshot[i] = new CordzSnapshot();
+ handle[i] = new CordzHandleDeleteTracker(&deleted[i]);
+ CordzHandle::Delete(handle[i]);
+ }
+
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle[2], snapshot[2], handle[1],
+ snapshot[1], handle[0], snapshot[0]));
+ EXPECT_THAT(deleted, ElementsAre(false, false, false));
+
+ delete snapshot[1];
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle[2], snapshot[2], handle[1],
+ handle[0], snapshot[0]));
+ EXPECT_THAT(deleted, ElementsAre(false, false, false));
+
+ delete snapshot[0];
+ EXPECT_THAT(DeleteQueue(), ElementsAre(handle[2], snapshot[2]));
+ EXPECT_THAT(deleted, ElementsAre(true, true, false));
+
+ delete snapshot[2];
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+ EXPECT_THAT(deleted, ElementsAre(true, true, deleted));
+}
+
+TEST(CordzHandleTest, DiagnosticsHandleIsSafeToInspect) {
+ CordzSnapshot snapshot1;
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(nullptr));
+
+ auto* handle1 = new CordzHandle();
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+
+ CordzHandle::Delete(handle1);
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+
+ CordzSnapshot snapshot2;
+ auto* handle2 = new CordzHandle();
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle2));
+ EXPECT_FALSE(snapshot2.DiagnosticsHandleIsSafeToInspect(handle1));
+ EXPECT_TRUE(snapshot2.DiagnosticsHandleIsSafeToInspect(handle2));
+
+ CordzHandle::Delete(handle2);
+ EXPECT_TRUE(snapshot1.DiagnosticsHandleIsSafeToInspect(handle1));
+}
+
+TEST(CordzHandleTest, DiagnosticsGetSafeToInspectDeletedHandles) {
+ EXPECT_THAT(DeleteQueue(), IsEmpty());
+
+ auto* handle = new CordzHandle();
+ auto* snapshot1 = new CordzSnapshot();
+
+ // snapshot1 should be able to see handle.
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot1));
+ EXPECT_TRUE(snapshot1->DiagnosticsHandleIsSafeToInspect(handle));
+ EXPECT_THAT(snapshot1->DiagnosticsGetSafeToInspectDeletedHandles(),
+ IsEmpty());
+
+ // This handle will be safe to inspect as long as snapshot1 is alive. However,
+ // since only snapshot1 can prove that it's alive, it will be hidden from
+ // snapshot2.
+ CordzHandle::Delete(handle);
+
+ // This snapshot shouldn't be able to see handle because handle was already
+ // sent to Delete.
+ auto* snapshot2 = new CordzSnapshot();
+
+ // DeleteQueue elements are LIFO order.
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot2, handle, snapshot1));
+
+ EXPECT_TRUE(snapshot1->DiagnosticsHandleIsSafeToInspect(handle));
+ EXPECT_FALSE(snapshot2->DiagnosticsHandleIsSafeToInspect(handle));
+
+ EXPECT_THAT(snapshot1->DiagnosticsGetSafeToInspectDeletedHandles(),
+ ElementsAre(handle));
+ EXPECT_THAT(snapshot2->DiagnosticsGetSafeToInspectDeletedHandles(),
+ IsEmpty());
+
+ CordzHandle::Delete(snapshot1);
+ EXPECT_THAT(DeleteQueue(), ElementsAre(snapshot2));
+
+ CordzHandle::Delete(snapshot2);
+ EXPECT_THAT(DeleteQueue(), IsEmpty());
+}
+
+// Create and delete CordzHandle and CordzSnapshot objects in multiple threads
+// so that tsan has some time to chew on it and look for memory problems.
+TEST(CordzHandleTest, MultiThreaded) {
+ Notification stop;
+ static constexpr int kNumThreads = 4;
+ // Keep the number of handles relatively small so that the test will naturally
+ // transition to an empty delete queue during the test. If there are, say, 100
+ // handles, that will virtually never happen. With 10 handles and around 50k
+ // iterations in each of 4 threads, the delete queue appears to become empty
+ // around 200 times.
+ static constexpr int kNumHandles = 10;
+
+ // Each thread is going to pick a random index and atomically swap its
+ // CordzHandle with one in handles. This way, each thread can avoid
+ // manipulating a CordzHandle that might be operated upon in another thread.
+ std::vector<std::atomic<CordzHandle*>> handles(kNumHandles);
+
+ // global bool which is set when any thread did get some 'safe to inspect'
+ // handles. On some platforms and OSS tests, we might risk that some pool
+ // threads are starved, stalled, or just got a few unlikely random 'handle'
+ // coin tosses, so we satisfy this test with simply observing 'some' thread
+ // did something meaningful, which should minimize the potential for flakes.
+ std::atomic<bool> found_safe_to_inspect(false);
+
+ {
+ absl::synchronization_internal::ThreadPool pool(kNumThreads);
+ for (int i = 0; i < kNumThreads; ++i) {
+ pool.Schedule([&stop, &handles, &found_safe_to_inspect]() {
+ std::minstd_rand gen;
+ std::uniform_int_distribution<int> dist_type(0, 2);
+ std::uniform_int_distribution<int> dist_handle(0, kNumHandles - 1);
+
+ while (!stop.HasBeenNotified()) {
+ CordzHandle* handle;
+ switch (dist_type(gen)) {
+ case 0:
+ handle = new CordzHandle();
+ break;
+ case 1:
+ handle = new CordzSnapshot();
+ break;
+ default:
+ handle = nullptr;
+ break;
+ }
+ CordzHandle* old_handle = handles[dist_handle(gen)].exchange(handle);
+ if (old_handle != nullptr) {
+ std::vector<const CordzHandle*> safe_to_inspect =
+ old_handle->DiagnosticsGetSafeToInspectDeletedHandles();
+ for (const CordzHandle* handle : safe_to_inspect) {
+ // We're in a tight loop, so don't generate too many error
+ // messages.
+ ASSERT_FALSE(handle->is_snapshot());
+ }
+ if (!safe_to_inspect.empty()) {
+ found_safe_to_inspect.store(true);
+ }
+ CordzHandle::Delete(old_handle);
+ }
+ }
+
+ // Have each thread attempt to clean up everything. Some thread will be
+ // the last to reach this cleanup code, and it will be guaranteed to
+ // clean up everything because nothing remains to create new handles.
+ for (auto& h : handles) {
+ if (CordzHandle* handle = h.exchange(nullptr)) {
+ CordzHandle::Delete(handle);
+ }
+ }
+ });
+ }
+
+ // The threads will hammer away. Give it a little bit of time for tsan to
+ // spot errors.
+ absl::SleepFor(absl::Seconds(3));
+ stop.Notify();
+ }
+
+ // Confirm that the test did *something*. This check will be satisfied as
+ // long as any thread has deleted a CordzSnapshot object and a non-snapshot
+ // CordzHandle was deleted after the CordzSnapshot was created.
+ // See also comments on `found_safe_to_inspect`
+ EXPECT_TRUE(found_safe_to_inspect.load());
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_info.cc b/absl/strings/internal/cordz_info.cc
new file mode 100644
index 00000000..a6b045ff
--- /dev/null
+++ b/absl/strings/internal/cordz_info.cc
@@ -0,0 +1,426 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_info.h"
+
+#include "absl/base/config.h"
+#include "absl/base/internal/spinlock.h"
+#include "absl/container/inlined_vector.h"
+#include "absl/debugging/stacktrace.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cord_rep_ring.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/synchronization/mutex.h"
+#include "absl/types/span.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ::absl::base_internal::SpinLockHolder;
+
+constexpr int CordzInfo::kMaxStackDepth;
+
+ABSL_CONST_INIT CordzInfo::List CordzInfo::global_list_{absl::kConstInit};
+
+namespace {
+
+// CordRepAnalyzer performs the analysis of a cord.
+//
+// It computes absolute node counts and total memory usage, and an 'estimated
+// fair share memory usage` statistic.
+// Conceptually, it divides the 'memory usage' at each location in the 'cord
+// graph' by the cumulative reference count of that location. The cumulative
+// reference count is the factored total of all edges leading into that node.
+//
+// The top level node is treated specially: we assume the current thread
+// (typically called from the CordzHandler) to hold a reference purely to
+// perform a safe analysis, and not being part of the application. So we
+// substract 1 from the reference count of the top node to compute the
+// 'application fair share' excluding the reference of the current thread.
+//
+// An example of fair sharing, and why we multiply reference counts:
+// Assume we have 2 CordReps, both being a Substring referencing a Flat:
+// CordSubstring A (refcount = 5) --> child Flat C (refcount = 2)
+// CordSubstring B (refcount = 9) --> child Flat C (refcount = 2)
+//
+// Flat C has 2 incoming edges from the 2 substrings (refcount = 2) and is not
+// referenced directly anywhere else. Translated into a 'fair share', we then
+// attribute 50% of the memory (memory / refcount = 2) to each incoming edge.
+// Rep A has a refcount of 5, so we attribute each incoming edge 1 / 5th of the
+// memory cost below it, i.e.: the fair share of Rep A of the memory used by C
+// is then 'memory C / (refcount C * refcount A) + (memory A / refcount A)'.
+// It is also easy to see how all incoming edges add up to 100%.
+class CordRepAnalyzer {
+ public:
+ // Creates an analyzer instance binding to `statistics`.
+ explicit CordRepAnalyzer(CordzStatistics& statistics)
+ : statistics_(statistics) {}
+
+ // Analyzes the memory statistics and node counts for the provided `rep`, and
+ // adds the results to `statistics`. Note that node counts and memory sizes
+ // are not initialized, computed values are added to any existing values.
+ void AnalyzeCordRep(const CordRep* rep) {
+ // Process all linear nodes.
+ // As per the class comments, use refcout - 1 on the top level node, as the
+ // top level node is assumed to be referenced only for analysis purposes.
+ size_t refcount = rep->refcount.Get();
+ RepRef repref{rep, (refcount > 1) ? refcount - 1 : 1};
+
+ // Process all top level linear nodes (substrings and flats).
+ repref = CountLinearReps(repref, memory_usage_);
+
+ // We should have have either a concat or ring node node if not null.
+ if (repref.rep != nullptr) {
+ assert(repref.rep->tag == RING || repref.rep->tag == CONCAT);
+ if (repref.rep->tag == RING) {
+ AnalyzeRing(repref);
+ } else if (repref.rep->tag == CONCAT) {
+ AnalyzeConcat(repref);
+ }
+ }
+
+ // Adds values to output
+ statistics_.estimated_memory_usage += memory_usage_.total;
+ statistics_.estimated_fair_share_memory_usage += memory_usage_.fair_share;
+ }
+
+ private:
+ // RepRef identifies a CordRep* inside the Cord tree with its cumulative
+ // refcount including itself. For example, a tree consisting of a substring
+ // with a refcount of 3 and a child flat with a refcount of 4 will have RepRef
+ // refcounts of 3 and 12 respectively.
+ struct RepRef {
+ const CordRep* rep;
+ size_t refcount;
+
+ // Returns a 'child' RepRef which contains the cumulative reference count of
+ // this instance multiplied by the child's reference count.
+ RepRef Child(const CordRep* child) const {
+ return RepRef{child, refcount * child->refcount.Get()};
+ }
+ };
+
+ // Memory usage values
+ struct MemoryUsage {
+ size_t total = 0;
+ size_t fair_share = 0;
+
+ // Adds 'size` memory usage to this class, with a cumulative (recursive)
+ // reference count of `refcount`
+ void Add(size_t size, size_t refcount) {
+ total += size;
+ fair_share += size / refcount;
+ }
+ };
+
+ // Returns `rr` if `rr.rep` is not null and a CONCAT type.
+ // Asserts that `rr.rep` is a concat node or null.
+ static RepRef AssertConcat(RepRef repref) {
+ const CordRep* rep = repref.rep;
+ assert(rep == nullptr || rep->tag == CONCAT);
+ return (rep != nullptr && rep->tag == CONCAT) ? repref : RepRef{nullptr, 0};
+ }
+
+ // Counts a flat of the provide allocated size
+ void CountFlat(size_t size) {
+ statistics_.node_count++;
+ statistics_.node_counts.flat++;
+ if (size <= 64) {
+ statistics_.node_counts.flat_64++;
+ } else if (size <= 128) {
+ statistics_.node_counts.flat_128++;
+ } else if (size <= 256) {
+ statistics_.node_counts.flat_256++;
+ } else if (size <= 512) {
+ statistics_.node_counts.flat_512++;
+ } else if (size <= 1024) {
+ statistics_.node_counts.flat_1k++;
+ }
+ }
+
+ // Processes 'linear' reps (substring, flat, external) not requiring iteration
+ // or recursion. Returns RefRep{null} if all reps were processed, else returns
+ // the top-most non-linear concat or ring cordrep.
+ // Node counts are updated into `statistics_`, memory usage is update into
+ // `memory_usage`, which typically references `memory_usage_` except for ring
+ // buffers where we count children unrounded.
+ RepRef CountLinearReps(RepRef rep, MemoryUsage& memory_usage) {
+ // Consume all substrings
+ while (rep.rep->tag == SUBSTRING) {
+ statistics_.node_count++;
+ statistics_.node_counts.substring++;
+ memory_usage.Add(sizeof(CordRepSubstring), rep.refcount);
+ rep = rep.Child(rep.rep->substring()->child);
+ }
+
+ // Consume possible FLAT
+ if (rep.rep->tag >= FLAT) {
+ size_t size = rep.rep->flat()->AllocatedSize();
+ CountFlat(size);
+ memory_usage.Add(size, rep.refcount);
+ return RepRef{nullptr, 0};
+ }
+
+ // Consume possible external
+ if (rep.rep->tag == EXTERNAL) {
+ statistics_.node_count++;
+ statistics_.node_counts.external++;
+ size_t size = rep.rep->length + sizeof(CordRepExternalImpl<intptr_t>);
+ memory_usage.Add(size, rep.refcount);
+ return RepRef{nullptr, 0};
+ }
+
+ return rep;
+ }
+
+ // Analyzes the provided concat node in a flattened recursive way.
+ void AnalyzeConcat(RepRef rep) {
+ absl::InlinedVector<RepRef, 47> pending;
+
+ while (rep.rep != nullptr) {
+ const CordRepConcat* concat = rep.rep->concat();
+ RepRef left = rep.Child(concat->left);
+ RepRef right = rep.Child(concat->right);
+
+ statistics_.node_count++;
+ statistics_.node_counts.concat++;
+ memory_usage_.Add(sizeof(CordRepConcat), rep.refcount);
+
+ right = AssertConcat(CountLinearReps(right, memory_usage_));
+ rep = AssertConcat(CountLinearReps(left, memory_usage_));
+ if (rep.rep != nullptr) {
+ if (right.rep != nullptr) {
+ pending.push_back(right);
+ }
+ } else if (right.rep != nullptr) {
+ rep = right;
+ } else if (!pending.empty()) {
+ rep = pending.back();
+ pending.pop_back();
+ }
+ }
+ }
+
+ // Counts the provided ring buffer child into `child_usage`.
+ void CountRingChild(const CordRep* child, MemoryUsage& child_usage) {
+ RepRef rep{child, static_cast<size_t>(child->refcount.Get())};
+ rep = CountLinearReps(rep, child_usage);
+ assert(rep.rep == nullptr);
+ }
+
+ // Analyzes the provided ring. As ring buffers can have many child nodes, the
+ // effect of rounding errors can become non trivial, so we compute the totals
+ // first at the ring level, and then divide the fair share of the total
+ // including children fair share totals.
+ void AnalyzeRing(RepRef rep) {
+ statistics_.node_count++;
+ statistics_.node_counts.ring++;
+ MemoryUsage ring_usage;
+ const CordRepRing* ring = rep.rep->ring();
+ ring_usage.Add(CordRepRing::AllocSize(ring->capacity()), 1);
+ ring->ForEach([&](CordRepRing::index_type pos) {
+ CountRingChild(ring->entry_child(pos), ring_usage);
+ });
+ memory_usage_.total += ring_usage.total;
+ memory_usage_.fair_share += ring_usage.fair_share / rep.refcount;
+ }
+
+ CordzStatistics& statistics_;
+ MemoryUsage memory_usage_;
+};
+
+} // namespace
+
+CordzInfo* CordzInfo::Head(const CordzSnapshot& snapshot) {
+ ABSL_ASSERT(snapshot.is_snapshot());
+
+ // We can do an 'unsafe' load of 'head', as we are guaranteed that the
+ // instance it points to is kept alive by the provided CordzSnapshot, so we
+ // can simply return the current value using an acquire load.
+ // We do enforce in DEBUG builds that the 'head' value is present in the
+ // delete queue: ODR violations may lead to 'snapshot' and 'global_list_'
+ // being in different libraries / modules.
+ CordzInfo* head = global_list_.head.load(std::memory_order_acquire);
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(head));
+ return head;
+}
+
+CordzInfo* CordzInfo::Next(const CordzSnapshot& snapshot) const {
+ ABSL_ASSERT(snapshot.is_snapshot());
+
+ // Similar to the 'Head()' function, we do not need a mutex here.
+ CordzInfo* next = ci_next_.load(std::memory_order_acquire);
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(this));
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(next));
+ return next;
+}
+
+void CordzInfo::TrackCord(InlineData& cord, MethodIdentifier method) {
+ assert(cord.is_tree());
+ assert(!cord.is_profiled());
+ CordzInfo* cordz_info = new CordzInfo(cord.as_tree(), nullptr, method);
+ cord.set_cordz_info(cordz_info);
+ cordz_info->Track();
+}
+
+void CordzInfo::TrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method) {
+ assert(cord.is_tree());
+ assert(src.is_tree());
+
+ // Unsample current as we the current cord is being replaced with 'src',
+ // so any method history is no longer relevant.
+ CordzInfo* cordz_info = cord.cordz_info();
+ if (cordz_info != nullptr) cordz_info->Untrack();
+
+ // Start new cord sample
+ cordz_info = new CordzInfo(cord.as_tree(), src.cordz_info(), method);
+ cord.set_cordz_info(cordz_info);
+ cordz_info->Track();
+}
+
+CordzInfo::MethodIdentifier CordzInfo::GetParentMethod(const CordzInfo* src) {
+ if (src == nullptr) return MethodIdentifier::kUnknown;
+ return src->parent_method_ != MethodIdentifier::kUnknown ? src->parent_method_
+ : src->method_;
+}
+
+int CordzInfo::FillParentStack(const CordzInfo* src, void** stack) {
+ assert(stack);
+ if (src == nullptr) return 0;
+ if (src->parent_stack_depth_) {
+ memcpy(stack, src->parent_stack_, src->parent_stack_depth_ * sizeof(void*));
+ return src->parent_stack_depth_;
+ }
+ memcpy(stack, src->stack_, src->stack_depth_ * sizeof(void*));
+ return src->stack_depth_;
+}
+
+CordzInfo::CordzInfo(CordRep* rep, const CordzInfo* src,
+ MethodIdentifier method)
+ : rep_(rep),
+ stack_depth_(absl::GetStackTrace(stack_, /*max_depth=*/kMaxStackDepth,
+ /*skip_count=*/1)),
+ parent_stack_depth_(FillParentStack(src, parent_stack_)),
+ method_(method),
+ parent_method_(GetParentMethod(src)),
+ create_time_(absl::Now()) {
+ update_tracker_.LossyAdd(method);
+ if (src) {
+ // Copy parent counters.
+ update_tracker_.LossyAdd(src->update_tracker_);
+ }
+}
+
+CordzInfo::~CordzInfo() {
+ // `rep_` is potentially kept alive if CordzInfo is included
+ // in a collection snapshot (which should be rare).
+ if (ABSL_PREDICT_FALSE(rep_)) {
+ CordRep::Unref(rep_);
+ }
+}
+
+void CordzInfo::Track() {
+ SpinLockHolder l(&list_->mutex);
+
+ CordzInfo* const head = list_->head.load(std::memory_order_acquire);
+ if (head != nullptr) {
+ head->ci_prev_.store(this, std::memory_order_release);
+ }
+ ci_next_.store(head, std::memory_order_release);
+ list_->head.store(this, std::memory_order_release);
+}
+
+void CordzInfo::Untrack() {
+ ODRCheck();
+ {
+ SpinLockHolder l(&list_->mutex);
+
+ CordzInfo* const head = list_->head.load(std::memory_order_acquire);
+ CordzInfo* const next = ci_next_.load(std::memory_order_acquire);
+ CordzInfo* const prev = ci_prev_.load(std::memory_order_acquire);
+
+ if (next) {
+ ABSL_ASSERT(next->ci_prev_.load(std::memory_order_acquire) == this);
+ next->ci_prev_.store(prev, std::memory_order_release);
+ }
+ if (prev) {
+ ABSL_ASSERT(head != this);
+ ABSL_ASSERT(prev->ci_next_.load(std::memory_order_acquire) == this);
+ prev->ci_next_.store(next, std::memory_order_release);
+ } else {
+ ABSL_ASSERT(head == this);
+ list_->head.store(next, std::memory_order_release);
+ }
+ }
+
+ // We can no longer be discovered: perform a fast path check if we are not
+ // listed on any delete queue, so we can directly delete this instance.
+ if (SafeToDelete()) {
+ UnsafeSetCordRep(nullptr);
+ delete this;
+ return;
+ }
+
+ // We are likely part of a snapshot, extend the life of the CordRep
+ {
+ absl::MutexLock lock(&mutex_);
+ if (rep_) CordRep::Ref(rep_);
+ }
+ CordzHandle::Delete(this);
+}
+
+void CordzInfo::Lock(MethodIdentifier method)
+ ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_) {
+ mutex_.Lock();
+ update_tracker_.LossyAdd(method);
+ assert(rep_);
+}
+
+void CordzInfo::Unlock() ABSL_UNLOCK_FUNCTION(mutex_) {
+ bool tracked = rep_ != nullptr;
+ mutex_.Unlock();
+ if (!tracked) {
+ Untrack();
+ }
+}
+
+absl::Span<void* const> CordzInfo::GetStack() const {
+ return absl::MakeConstSpan(stack_, stack_depth_);
+}
+
+absl::Span<void* const> CordzInfo::GetParentStack() const {
+ return absl::MakeConstSpan(parent_stack_, parent_stack_depth_);
+}
+
+CordzStatistics CordzInfo::GetCordzStatistics() const {
+ CordzStatistics stats;
+ stats.method = method_;
+ stats.parent_method = parent_method_;
+ stats.update_tracker = update_tracker_;
+ if (CordRep* rep = RefCordRep()) {
+ stats.size = rep->length;
+ CordRepAnalyzer analyzer(stats);
+ analyzer.AnalyzeCordRep(rep);
+ CordRep::Unref(rep);
+ }
+ return stats;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_info.h b/absl/strings/internal/cordz_info.h
new file mode 100644
index 00000000..29237930
--- /dev/null
+++ b/absl/strings/internal/cordz_info.h
@@ -0,0 +1,270 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_INFO_H_
+#define ABSL_STRINGS_CORDZ_INFO_H_
+
+#include <atomic>
+#include <cstdint>
+#include <functional>
+
+#include "absl/base/config.h"
+#include "absl/base/internal/raw_logging.h"
+#include "absl/base/internal/spinlock.h"
+#include "absl/base/thread_annotations.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cordz_functions.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/synchronization/mutex.h"
+#include "absl/types/span.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzInfo tracks a profiled Cord. Each of these objects can be in two places.
+// If a Cord is alive, the CordzInfo will be in the global_cordz_infos map, and
+// can also be retrieved via the linked list starting with
+// global_cordz_infos_head and continued via the cordz_info_next() method. When
+// a Cord has reached the end of its lifespan, the CordzInfo object will be
+// migrated out of the global_cordz_infos list and the global_cordz_infos_map,
+// and will either be deleted or appended to the global_delete_queue. If it is
+// placed on the global_delete_queue, the CordzInfo object will be cleaned in
+// the destructor of a CordzSampleToken object.
+class ABSL_LOCKABLE CordzInfo : public CordzHandle {
+ public:
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // TrackCord creates a CordzInfo instance which tracks important metrics of
+ // a sampled cord, and stores the created CordzInfo instance into `cord'. All
+ // CordzInfo instances are placed in a global list which is used to discover
+ // and snapshot all actively tracked cords. Callers are responsible for
+ // calling UntrackCord() before the tracked Cord instance is deleted, or to
+ // stop tracking the sampled Cord. Callers are also responsible for guarding
+ // changes to the 'tree' value of a Cord (InlineData.tree) through the Lock()
+ // and Unlock() calls. Any change resulting in a new tree value for the cord
+ // requires a call to SetCordRep() before the old tree has been unreffed
+ // and/or deleted. `method` identifies the Cord public API method initiating
+ // the cord to be sampled.
+ // Requires `cord` to hold a tree, and `cord.cordz_info()` to be null.
+ static void TrackCord(InlineData& cord, MethodIdentifier method);
+
+ // Identical to TrackCord(), except that this function fills the
+ // `parent_stack` and `parent_method` properties of the returned CordzInfo
+ // instance from the provided `src` instance if `src` is sampled.
+ // This function should be used for sampling 'copy constructed' and 'copy
+ // assigned' cords. This function allows 'cord` to be already sampled, in
+ // which case the CordzInfo will be newly created from `src`.
+ static void TrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ // Maybe sample the cord identified by 'cord' for method 'method'.
+ // Uses `cordz_should_profile` to randomly pick cords to be sampled, and if
+ // so, invokes `TrackCord` to start sampling `cord`.
+ static void MaybeTrackCord(InlineData& cord, MethodIdentifier method);
+
+ // Maybe sample the cord identified by 'cord' for method 'method'.
+ // `src` identifies a 'parent' cord which content is copied into the current
+ // cord, typically the input cord for an assign emthod or copy constructor.
+ // Invokes the corresponding `TrackCord` method if either cord is sampled, or
+ // if `cord` is randomly picked for sampling. Possible scenarios:
+ // * `src` is sampled: `cord` will be set to sampled if not already sampled.
+ // Parent stack and update stats of `src` are copied into `cord`
+ // * `src` is not sampled: `cord` may be randomly picked for sampling.
+ static void MaybeTrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ // Stops tracking changes for a sampled cord, and deletes the provided info.
+ // This function must be called before the sampled cord instance is deleted,
+ // and before the root cordrep of the sampled cord is unreffed.
+ // This function may extend the lifetime of the cordrep in cases where the
+ // CordInfo instance is being held by a concurrent collection thread.
+ void Untrack();
+
+ // Invokes UntrackCord() on `info` if `info` is not null.
+ static void MaybeUntrackCord(CordzInfo* info);
+
+ CordzInfo() = delete;
+ CordzInfo(const CordzInfo&) = delete;
+ CordzInfo& operator=(const CordzInfo&) = delete;
+
+ // Retrieves the oldest existing CordzInfo.
+ static CordzInfo* Head(const CordzSnapshot& snapshot)
+ ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ // Retrieves the next oldest existing CordzInfo older than 'this' instance.
+ CordzInfo* Next(const CordzSnapshot& snapshot) const
+ ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ // Locks this instance for the update identified by `method`.
+ // Increases the count for `method` in `update_tracker`.
+ void Lock(MethodIdentifier method) ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_);
+
+ // Unlocks this instance. If the contained `rep` has been set to null
+ // indicating the Cord has been cleared or is otherwise no longer sampled,
+ // then this method will delete this CordzInfo instance.
+ void Unlock() ABSL_UNLOCK_FUNCTION(mutex_);
+
+ // Asserts that this CordzInfo instance is locked.
+ void AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_);
+
+ // Updates the `rep` property of this instance. This methods is invoked by
+ // Cord logic each time the root node of a sampled Cord changes, and before
+ // the old root reference count is deleted. This guarantees that collection
+ // code can always safely take a reference on the tracked cord.
+ // Requires a lock to be held through the `Lock()` method.
+ // TODO(b/117940323): annotate with ABSL_EXCLUSIVE_LOCKS_REQUIRED once all
+ // Cord code is in a state where this can be proven true by the compiler.
+ void SetCordRep(CordRep* rep);
+
+ // Returns the current `rep` property of this instance with a reference
+ // added, or null if this instance represents a cord that has since been
+ // deleted or untracked.
+ CordRep* RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_);
+
+ // Returns the current value of `rep_` for testing purposes only.
+ CordRep* GetCordRepForTesting() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ return rep_;
+ }
+
+ // Returns the stack trace for where the cord was first sampled. Cords are
+ // potentially sampled when they promote from an inlined cord to a tree or
+ // ring representation, which is not necessarily the location where the cord
+ // was first created. Some cords are created as inlined cords, and only as
+ // data is added do they become a non-inlined cord. However, typically the
+ // location represents reasonably well where the cord is 'created'.
+ absl::Span<void* const> GetStack() const;
+
+ // Returns the stack trace for a sampled cord's 'parent stack trace'. This
+ // value may be set if the cord is sampled (promoted) after being created
+ // from, or being assigned the value of an existing (sampled) cord.
+ absl::Span<void* const> GetParentStack() const;
+
+ // Retrieves the CordzStatistics associated with this Cord. The statistics
+ // are only updated when a Cord goes through a mutation, such as an Append
+ // or RemovePrefix.
+ CordzStatistics GetCordzStatistics() const;
+
+ private:
+ using SpinLock = absl::base_internal::SpinLock;
+ using SpinLockHolder = ::absl::base_internal::SpinLockHolder;
+
+ // Global cordz info list. CordzInfo stores a pointer to the global list
+ // instance to harden against ODR violations.
+ struct List {
+ constexpr explicit List(absl::ConstInitType)
+ : mutex(absl::kConstInit,
+ absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
+
+ SpinLock mutex;
+ std::atomic<CordzInfo*> head ABSL_GUARDED_BY(mutex){nullptr};
+ };
+
+ static constexpr int kMaxStackDepth = 64;
+
+ explicit CordzInfo(CordRep* rep, const CordzInfo* src,
+ MethodIdentifier method);
+ ~CordzInfo() override;
+
+ // Sets `rep_` without holding a lock.
+ void UnsafeSetCordRep(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ void Track();
+
+ // Returns the parent method from `src`, which is either `parent_method_` or
+ // `method_` depending on `parent_method_` being kUnknown.
+ // Returns kUnknown if `src` is null.
+ static MethodIdentifier GetParentMethod(const CordzInfo* src);
+
+ // Fills the provided stack from `src`, copying either `parent_stack_` or
+ // `stack_` depending on `parent_stack_` being empty, returning the size of
+ // the parent stack.
+ // Returns 0 if `src` is null.
+ static int FillParentStack(const CordzInfo* src, void** stack);
+
+ void ODRCheck() const {
+#ifndef NDEBUG
+ ABSL_RAW_CHECK(list_ == &global_list_, "ODR violation in Cord");
+#endif
+ }
+
+ ABSL_CONST_INIT static List global_list_;
+ List* const list_ = &global_list_;
+
+ // ci_prev_ and ci_next_ require the global list mutex to be held.
+ // Unfortunately we can't use thread annotations such that the thread safety
+ // analysis understands that list_ and global_list_ are one and the same.
+ std::atomic<CordzInfo*> ci_prev_{nullptr};
+ std::atomic<CordzInfo*> ci_next_{nullptr};
+
+ mutable absl::Mutex mutex_;
+ CordRep* rep_ ABSL_GUARDED_BY(mutex_);
+
+ void* stack_[kMaxStackDepth];
+ void* parent_stack_[kMaxStackDepth];
+ const int stack_depth_;
+ const int parent_stack_depth_;
+ const MethodIdentifier method_;
+ const MethodIdentifier parent_method_;
+ CordzUpdateTracker update_tracker_;
+ const absl::Time create_time_;
+};
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
+ InlineData& cord, MethodIdentifier method) {
+ if (ABSL_PREDICT_FALSE(cordz_should_profile())) {
+ TrackCord(cord, method);
+ }
+}
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
+ InlineData& cord, const InlineData& src, MethodIdentifier method) {
+ if (ABSL_PREDICT_FALSE(InlineData::is_either_profiled(cord, src)) ||
+ ABSL_PREDICT_FALSE(cordz_should_profile())) {
+ TrackCord(cord, src, method);
+ }
+}
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeUntrackCord(
+ CordzInfo* info) {
+ if (ABSL_PREDICT_FALSE(info)) {
+ info->Untrack();
+ }
+}
+
+inline void CordzInfo::AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_) {
+#ifndef NDEBUG
+ mutex_.AssertHeld();
+#endif
+}
+
+inline void CordzInfo::SetCordRep(CordRep* rep) {
+ AssertHeld();
+ rep_ = rep;
+}
+
+inline void CordzInfo::UnsafeSetCordRep(CordRep* rep) { rep_ = rep; }
+
+inline CordRep* CordzInfo::RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
+ return rep_ ? CordRep::Ref(rep_) : nullptr;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_INFO_H_
diff --git a/absl/strings/internal/cordz_info_statistics_test.cc b/absl/strings/internal/cordz_info_statistics_test.cc
new file mode 100644
index 00000000..9f2842d9
--- /dev/null
+++ b/absl/strings/internal/cordz_info_statistics_test.cc
@@ -0,0 +1,508 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <iostream>
+#include <random>
+#include <vector>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/strings/cord.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cord_rep_ring.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_sample_token.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_scope.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "absl/synchronization/notification.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Do not print statistics contents, the matcher prints them as needed.
+inline void PrintTo(const CordzStatistics& stats, std::ostream* s) {
+ if (s) *s << "CordzStatistics{...}";
+}
+
+namespace {
+
+// Creates a flat of the specified allocated size
+CordRepFlat* Flat(size_t size) {
+ // Round up to a tag size, as we are going to poke an exact tag size back into
+ // the allocated flat. 'size returning allocators' could grant us more than we
+ // wanted, but we are ok to poke the 'requested' size in the tag, even in the
+ // presence of sized deletes, so we need to make sure the size rounds
+ // perfectly to a tag value.
+ assert(size >= kMinFlatSize);
+ size = RoundUpForTag(size);
+ CordRepFlat* flat = CordRepFlat::New(size - kFlatOverhead);
+ flat->tag = AllocatedSizeToTag(size);
+ flat->length = size - kFlatOverhead;
+ return flat;
+}
+
+// Creates an external of the specified length
+CordRepExternal* External(int length = 512) {
+ return static_cast<CordRepExternal*>(
+ NewExternalRep(absl::string_view("", length), [](absl::string_view) {}));
+}
+
+// Creates a substring on the provided rep of length - 1
+CordRepSubstring* Substring(CordRep* rep) {
+ auto* substring = new CordRepSubstring;
+ substring->length = rep->length - 1;
+ substring->tag = SUBSTRING;
+ substring->child = rep;
+ return substring;
+}
+
+// Creates a concat on the provided reps
+CordRepConcat* Concat(CordRep* left, CordRep* right) {
+ auto* concat = new CordRepConcat;
+ concat->length = left->length + right->length;
+ concat->tag = CONCAT;
+ concat->left = left;
+ concat->right = right;
+ return concat;
+}
+
+// Reference count helper
+struct RefHelper {
+ std::vector<CordRep*> refs;
+
+ ~RefHelper() {
+ for (CordRep* rep : refs) {
+ CordRep::Unref(rep);
+ }
+ }
+
+ // Invokes CordRep::Unref() on `rep` when this instance is destroyed.
+ template <typename T>
+ T* NeedsUnref(T* rep) {
+ refs.push_back(rep);
+ return rep;
+ }
+
+ // Adds `n` reference counts to `rep` which will be unreffed when this
+ // instance is destroyed.
+ template <typename T>
+ T* Ref(T* rep, size_t n = 1) {
+ while (n--) {
+ NeedsUnref(CordRep::Ref(rep));
+ }
+ return rep;
+ }
+};
+
+// Sizeof helper. Returns the allocated size of `p`, excluding any child
+// elements for substring, concat and ring cord reps.
+template <typename T>
+size_t SizeOf(const T* rep) {
+ return sizeof(T);
+}
+
+template <>
+size_t SizeOf(const CordRepFlat* rep) {
+ return rep->AllocatedSize();
+}
+
+template <>
+size_t SizeOf(const CordRepExternal* rep) {
+ // See cord.cc
+ return sizeof(CordRepExternalImpl<intptr_t>) + rep->length;
+}
+
+template <>
+size_t SizeOf(const CordRepRing* rep) {
+ return CordRepRing::AllocSize(rep->capacity());
+}
+
+// Computes fair share memory used in a naive 'we dare to recurse' way.
+size_t FairShare(CordRep* rep, size_t ref = 1) {
+ size_t self = 0, children = 0;
+ ref *= rep->refcount.Get();
+ if (rep->tag >= FLAT) {
+ self = SizeOf(rep->flat());
+ } else if (rep->tag == EXTERNAL) {
+ self = SizeOf(rep->external());
+ } else if (rep->tag == SUBSTRING) {
+ self = SizeOf(rep->substring());
+ children = FairShare(rep->substring()->child, ref);
+ } else if (rep->tag == RING) {
+ self = SizeOf(rep->ring());
+ rep->ring()->ForEach([&](CordRepRing::index_type i) {
+ self += FairShare(rep->ring()->entry_child(i));
+ });
+ } else if (rep->tag == CONCAT) {
+ self = SizeOf(rep->concat());
+ children = FairShare(rep->concat()->left, ref) +
+ FairShare(rep->concat()->right, ref);
+ } else {
+ assert(false);
+ }
+ return self / ref + children;
+}
+
+// Samples the cord and returns CordzInfo::GetStatistics()
+CordzStatistics SampleCord(CordRep* rep) {
+ InlineData cord(rep);
+ CordzInfo::TrackCord(cord, CordzUpdateTracker::kUnknown);
+ CordzStatistics stats = cord.cordz_info()->GetCordzStatistics();
+ cord.cordz_info()->Untrack();
+ return stats;
+}
+
+MATCHER_P(EqStatistics, stats, "Statistics equal expected values") {
+ bool ok = true;
+
+#define STATS_MATCHER_EXPECT_EQ(member) \
+ if (stats.member != arg.member) { \
+ *result_listener << "\n stats." << #member \
+ << ": actual = " << arg.member << ", expected " \
+ << stats.member; \
+ ok = false; \
+ }
+
+ STATS_MATCHER_EXPECT_EQ(size);
+ STATS_MATCHER_EXPECT_EQ(node_count);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_64);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_128);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_256);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_512);
+ STATS_MATCHER_EXPECT_EQ(node_counts.flat_1k);
+ STATS_MATCHER_EXPECT_EQ(node_counts.external);
+ STATS_MATCHER_EXPECT_EQ(node_counts.concat);
+ STATS_MATCHER_EXPECT_EQ(node_counts.substring);
+ STATS_MATCHER_EXPECT_EQ(node_counts.ring);
+ STATS_MATCHER_EXPECT_EQ(estimated_memory_usage);
+ STATS_MATCHER_EXPECT_EQ(estimated_fair_share_memory_usage);
+
+#undef STATS_MATCHER_EXPECT_EQ
+
+ return ok;
+}
+
+TEST(CordzInfoStatisticsTest, Flat) {
+ RefHelper ref;
+ auto* flat = ref.NeedsUnref(Flat(512));
+
+ CordzStatistics expected;
+ expected.size = flat->length;
+ expected.estimated_memory_usage = SizeOf(flat);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 1;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_512 = 1;
+
+ EXPECT_THAT(SampleCord(flat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedFlat) {
+ RefHelper ref;
+ auto* flat = ref.Ref(ref.NeedsUnref(Flat(64)));
+
+ CordzStatistics expected;
+ expected.size = flat->length;
+ expected.estimated_memory_usage = SizeOf(flat);
+ expected.estimated_fair_share_memory_usage = SizeOf(flat) / 2;
+ expected.node_count = 1;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_64 = 1;
+
+ EXPECT_THAT(SampleCord(flat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, External) {
+ RefHelper ref;
+ auto* external = ref.NeedsUnref(External());
+
+ CordzStatistics expected;
+ expected.size = external->length;
+ expected.estimated_memory_usage = SizeOf(external);
+ expected.estimated_fair_share_memory_usage = SizeOf(external);
+ expected.node_count = 1;
+ expected.node_counts.external = 1;
+
+ EXPECT_THAT(SampleCord(external), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedExternal) {
+ RefHelper ref;
+ auto* external = ref.Ref(ref.NeedsUnref(External()));
+
+ CordzStatistics expected;
+ expected.size = external->length;
+ expected.estimated_memory_usage = SizeOf(external);
+ expected.estimated_fair_share_memory_usage = SizeOf(external) / 2;
+ expected.node_count = 1;
+ expected.node_counts.external = 1;
+
+ EXPECT_THAT(SampleCord(external), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, Substring) {
+ RefHelper ref;
+ auto* flat = Flat(1024);
+ auto* substring = ref.NeedsUnref(Substring(flat));
+
+ CordzStatistics expected;
+ expected.size = substring->length;
+ expected.estimated_memory_usage = SizeOf(substring) + SizeOf(flat);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 2;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_1k = 1;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedSubstring) {
+ RefHelper ref;
+ auto* flat = ref.Ref(Flat(511), 2);
+ auto* substring = ref.Ref(ref.NeedsUnref(Substring(flat)));
+
+ CordzStatistics expected;
+ expected.size = substring->length;
+ expected.estimated_memory_usage = SizeOf(flat) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage =
+ SizeOf(substring) / 2 + SizeOf(flat) / 6;
+ expected.node_count = 2;
+ expected.node_counts.flat = 1;
+ expected.node_counts.flat_512 = 1;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, Concat) {
+ RefHelper ref;
+ auto* flat1 = Flat(300);
+ auto* flat2 = Flat(2000);
+ auto* concat = ref.NeedsUnref(Concat(flat1, flat2));
+
+ CordzStatistics expected;
+ expected.size = concat->length;
+ expected.estimated_memory_usage =
+ SizeOf(concat) + SizeOf(flat1) + SizeOf(flat2);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 3;
+ expected.node_counts.flat = 2;
+ expected.node_counts.flat_512 = 1;
+ expected.node_counts.concat = 1;
+
+ EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, DeepConcat) {
+ RefHelper ref;
+ auto* flat1 = Flat(300);
+ auto* flat2 = Flat(2000);
+ auto* flat3 = Flat(400);
+ auto* external = External(3000);
+ auto* substring = Substring(external);
+ auto* concat1 = Concat(flat1, flat2);
+ auto* concat2 = Concat(flat3, substring);
+ auto* concat = ref.NeedsUnref(Concat(concat1, concat2));
+
+ CordzStatistics expected;
+ expected.size = concat->length;
+ expected.estimated_memory_usage = SizeOf(concat) * 3 + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+
+ expected.node_count = 8;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_512 = 2;
+ expected.node_counts.external = 1;
+ expected.node_counts.concat = 3;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, DeepSharedConcat) {
+ RefHelper ref;
+ auto* flat1 = Flat(40);
+ auto* flat2 = ref.Ref(Flat(2000), 4);
+ auto* flat3 = Flat(70);
+ auto* external = ref.Ref(External(3000));
+ auto* substring = ref.Ref(Substring(external), 3);
+ auto* concat1 = Concat(flat1, flat2);
+ auto* concat2 = Concat(flat3, substring);
+ auto* concat = ref.Ref(ref.NeedsUnref(Concat(concat1, concat2)));
+
+ CordzStatistics expected;
+ expected.size = concat->length;
+ expected.estimated_memory_usage = SizeOf(concat) * 3 + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage = FairShare(concat);
+ expected.node_count = 8;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_64 = 1;
+ expected.node_counts.flat_128 = 1;
+ expected.node_counts.external = 1;
+ expected.node_counts.concat = 3;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, Ring) {
+ RefHelper ref;
+ auto* flat1 = Flat(240);
+ auto* flat2 = Flat(2000);
+ auto* flat3 = Flat(70);
+ auto* external = External(3000);
+ CordRepRing* ring = CordRepRing::Create(flat1);
+ ring = CordRepRing::Append(ring, flat2);
+ ring = CordRepRing::Append(ring, flat3);
+ ring = ref.NeedsUnref(CordRepRing::Append(ring, external));
+
+ CordzStatistics expected;
+ expected.size = ring->length;
+ expected.estimated_memory_usage = SizeOf(ring) + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external);
+ expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
+ expected.node_count = 5;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_128 = 1;
+ expected.node_counts.flat_256 = 1;
+ expected.node_counts.external = 1;
+ expected.node_counts.ring = 1;
+
+ EXPECT_THAT(SampleCord(ring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, SharedSubstringRing) {
+ RefHelper ref;
+ auto* flat1 = ref.Ref(Flat(240));
+ auto* flat2 = Flat(200);
+ auto* flat3 = Flat(70);
+ auto* external = ref.Ref(External(3000), 5);
+ CordRepRing* ring = CordRepRing::Create(flat1);
+ ring = CordRepRing::Append(ring, flat2);
+ ring = CordRepRing::Append(ring, flat3);
+ ring = ref.Ref(CordRepRing::Append(ring, external), 4);
+ auto* substring = ref.Ref(ref.NeedsUnref(Substring(ring)));
+
+
+ CordzStatistics expected;
+ expected.size = substring->length;
+ expected.estimated_memory_usage = SizeOf(ring) + SizeOf(flat1) +
+ SizeOf(flat2) + SizeOf(flat3) +
+ SizeOf(external) + SizeOf(substring);
+ expected.estimated_fair_share_memory_usage = FairShare(substring);
+ expected.node_count = 6;
+ expected.node_counts.flat = 3;
+ expected.node_counts.flat_128 = 1;
+ expected.node_counts.flat_256 = 2;
+ expected.node_counts.external = 1;
+ expected.node_counts.ring = 1;
+ expected.node_counts.substring = 1;
+
+ EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
+}
+
+TEST(CordzInfoStatisticsTest, ThreadSafety) {
+ Notification stop;
+ static constexpr int kNumThreads = 8;
+ int64_t sampled_node_count = 0;
+
+ {
+ absl::synchronization_internal::ThreadPool pool(kNumThreads);
+
+ // Run analyzer thread emulating a CordzHandler collection.
+ pool.Schedule([&]() {
+ while (!stop.HasBeenNotified()) {
+ // Run every 10us (about 100K total collections).
+ absl::SleepFor(absl::Microseconds(10));
+ CordzSampleToken token;
+ for (const CordzInfo& cord_info : token) {
+ CordzStatistics stats = cord_info.GetCordzStatistics();
+ sampled_node_count += stats.node_count;
+ }
+ }
+ });
+
+ // Run 'application threads'
+ for (int i = 0; i < kNumThreads; ++i) {
+ pool.Schedule([&]() {
+ // Track 0 - 2 cordz infos at a time, providing permutations of 0, 1
+ // and 2 CordzHandle and CordzInfo queues being active, with plenty of
+ // 'empty to non empty' transitions.
+ InlineData cords[2];
+ std::minstd_rand gen;
+ std::uniform_int_distribution<int> coin_toss(0, 1);
+
+ while (!stop.HasBeenNotified()) {
+ for (InlineData& cord : cords) {
+ // 50/50 flip the state of the cord
+ if (coin_toss(gen) != 0) {
+ if (cord.is_tree()) {
+ // 50/50 simulate delete (untrack) or 'edit to empty'
+ if (coin_toss(gen) != 0) {
+ CordzInfo::MaybeUntrackCord(cord.cordz_info());
+ } else {
+ CordzUpdateScope scope(cord.cordz_info(),
+ CordzUpdateTracker::kUnknown);
+ scope.SetCordRep(nullptr);
+ }
+ CordRep::Unref(cord.as_tree());
+ cord.set_inline_size(0);
+ } else {
+ // 50/50 Ring or Flat coin toss
+ CordRep* rep = Flat(256);
+ rep = (coin_toss(gen) != 0) ? CordRepRing::Create(rep) : rep;
+ cord.make_tree(rep);
+
+ // 50/50 sample
+ if (coin_toss(gen) != 0) {
+ CordzInfo::TrackCord(cord, CordzUpdateTracker::kUnknown);
+ }
+ }
+ }
+ }
+ }
+ for (InlineData& cord : cords) {
+ if (cord.is_tree()) {
+ CordzInfo::MaybeUntrackCord(cord.cordz_info());
+ CordRep::Unref(cord.as_tree());
+ }
+ }
+ });
+ }
+
+ // Run for 1 second to give memory and thread safety analyzers plenty of
+ // time to detect any mishaps or undefined behaviors.
+ absl::SleepFor(absl::Seconds(1));
+ stop.Notify();
+ }
+
+ std::cout << "Sampled " << sampled_node_count << " nodes\n";
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_info_test.cc b/absl/strings/internal/cordz_info_test.cc
new file mode 100644
index 00000000..59a8c525
--- /dev/null
+++ b/absl/strings/internal/cordz_info_test.cc
@@ -0,0 +1,311 @@
+// Copyright 2019 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_info.h"
+
+#include <vector>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/debugging/stacktrace.h"
+#include "absl/debugging/symbolize.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_statistics.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+#include "absl/strings/str_cat.h"
+#include "absl/types/span.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Eq;
+using ::testing::HasSubstr;
+using ::testing::Ne;
+using ::testing::SizeIs;
+
+// Used test values
+auto constexpr kUnknownMethod = CordzUpdateTracker::kUnknown;
+auto constexpr kTrackCordMethod = CordzUpdateTracker::kConstructorString;
+auto constexpr kChildMethod = CordzUpdateTracker::kConstructorCord;
+auto constexpr kUpdateMethod = CordzUpdateTracker::kAppendString;
+
+// Local less verbose helper
+std::vector<const CordzHandle*> DeleteQueue() {
+ return CordzHandle::DiagnosticsGetDeleteQueue();
+}
+
+std::string FormatStack(absl::Span<void* const> raw_stack) {
+ static constexpr size_t buf_size = 1 << 14;
+ std::unique_ptr<char[]> buf(new char[buf_size]);
+ std::string output;
+ for (void* stackp : raw_stack) {
+ if (absl::Symbolize(stackp, buf.get(), buf_size)) {
+ absl::StrAppend(&output, " ", buf.get(), "\n");
+ }
+ }
+ return output;
+}
+
+TEST(CordzInfoTest, TrackCord) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+ ASSERT_THAT(info, Ne(nullptr));
+ EXPECT_FALSE(info->is_snapshot());
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(info));
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(data.rep.rep));
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, MaybeTrackCordOnSampledCord) {
+ TestCordData data1;
+ CordzInfo::TrackCord(data1.data, kTrackCordMethod);
+ CordzInfo* info1 = data1.data.cordz_info();
+ TestCordData data2;
+ CordzInfo::MaybeTrackCord(data2.data, data1.data, kTrackCordMethod);
+ CordzInfo* info2 = data2.data.cordz_info();
+ ASSERT_THAT(info2, Ne(nullptr));
+ EXPECT_THAT(info2->GetCordRepForTesting(), Eq(data2.rep.rep));
+ info2->Untrack();
+ info1->Untrack();
+}
+
+
+TEST(CordzInfoTest, UntrackCord) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ info->Untrack();
+ EXPECT_THAT(DeleteQueue(), SizeIs(0));
+}
+
+TEST(CordzInfoTest, UntrackCordWithSnapshot) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ CordzSnapshot snapshot;
+ info->Untrack();
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(nullptr));
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(data.rep.rep));
+ EXPECT_THAT(DeleteQueue(), ElementsAre(info, &snapshot));
+}
+
+TEST(CordzInfoTest, SetCordRep) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ TestCordRep rep;
+ info->Lock(CordzUpdateTracker::kAppendCord);
+ info->SetCordRep(rep.rep);
+ info->Unlock();
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(rep.rep));
+
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, SetCordRepNullUntracksCordOnUnlock) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ info->Lock(CordzUpdateTracker::kAppendString);
+ info->SetCordRep(nullptr);
+ EXPECT_THAT(info->GetCordRepForTesting(), Eq(nullptr));
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(info));
+
+ info->Unlock();
+ EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, RefCordRep) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+
+ size_t refcount = data.rep.rep->refcount.Get();
+ EXPECT_THAT(info->RefCordRep(), Eq(data.rep.rep));
+ EXPECT_THAT(data.rep.rep->refcount.Get(), Eq(refcount + 1));
+ CordRep::Unref(data.rep.rep);
+ info->Untrack();
+}
+
+#if GTEST_HAS_DEATH_TEST
+
+TEST(CordzInfoTest, SetCordRepRequiresMutex) {
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+ TestCordRep rep;
+ EXPECT_DEBUG_DEATH(info->SetCordRep(rep.rep), ".*");
+ info->Untrack();
+}
+
+#endif // GTEST_HAS_DEATH_TEST
+
+TEST(CordzInfoTest, TrackUntrackHeadFirstV2) {
+ CordzSnapshot snapshot;
+ EXPECT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info1 = data.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ TestCordData data2;
+ CordzInfo::TrackCord(data2.data, kTrackCordMethod);
+ CordzInfo* info2 = data2.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
+ EXPECT_THAT(info2->Next(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ info2->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ info1->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, TrackUntrackTailFirstV2) {
+ CordzSnapshot snapshot;
+ EXPECT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+
+ TestCordData data;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info1 = data.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ TestCordData data2;
+ CordzInfo::TrackCord(data2.data, kTrackCordMethod);
+ CordzInfo* info2 = data2.data.cordz_info();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
+ EXPECT_THAT(info2->Next(snapshot), Eq(info1));
+ EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
+
+ info1->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
+ EXPECT_THAT(info2->Next(snapshot), Eq(nullptr));
+
+ info2->Untrack();
+ ASSERT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
+}
+
+TEST(CordzInfoTest, StackV2) {
+ TestCordData data;
+ // kMaxStackDepth is intentionally less than 64 (which is the max depth that
+ // Cordz will record) because if the actual stack depth is over 64
+ // (which it is on Apple platforms) then the expected_stack will end up
+ // catching a few frames at the end that the actual_stack didn't get and
+ // it will no longer be subset. At the time of this writing 58 is the max
+ // that will allow this test to pass (with a minimum os version of iOS 9), so
+ // rounded down to 50 to hopefully not run into this in the future if Apple
+ // makes small modifications to its testing stack. 50 is sufficient to prove
+ // that we got a decent stack.
+ static constexpr int kMaxStackDepth = 50;
+ CordzInfo::TrackCord(data.data, kTrackCordMethod);
+ CordzInfo* info = data.data.cordz_info();
+ std::vector<void*> local_stack;
+ local_stack.resize(kMaxStackDepth);
+ // In some environments we don't get stack traces. For example in Android
+ // absl::GetStackTrace will return 0 indicating it didn't find any stack. The
+ // resultant formatted stack will be "", but that still equals the stack
+ // recorded in CordzInfo, which is also empty. The skip_count is 1 so that the
+ // line number of the current stack isn't included in the HasSubstr check.
+ local_stack.resize(absl::GetStackTrace(local_stack.data(), kMaxStackDepth,
+ /*skip_count=*/1));
+
+ std::string got_stack = FormatStack(info->GetStack());
+ std::string expected_stack = FormatStack(local_stack);
+ // If TrackCord is inlined, got_stack should match expected_stack. If it isn't
+ // inlined, got_stack should include an additional frame not present in
+ // expected_stack. Either way, expected_stack should be a substring of
+ // got_stack.
+ EXPECT_THAT(got_stack, HasSubstr(expected_stack));
+
+ info->Untrack();
+}
+
+// Local helper functions to get different stacks for child and parent.
+CordzInfo* TrackChildCord(InlineData& data, const InlineData& parent) {
+ CordzInfo::TrackCord(data, parent, kChildMethod);
+ return data.cordz_info();
+}
+CordzInfo* TrackParentCord(InlineData& data) {
+ CordzInfo::TrackCord(data, kTrackCordMethod);
+ return data.cordz_info();
+}
+
+TEST(CordzInfoTest, GetStatistics) {
+ TestCordData data;
+ CordzInfo* info = TrackParentCord(data.data);
+
+ CordzStatistics statistics = info->GetCordzStatistics();
+ EXPECT_THAT(statistics.size, Eq(data.rep.rep->length));
+ EXPECT_THAT(statistics.method, Eq(kTrackCordMethod));
+ EXPECT_THAT(statistics.parent_method, Eq(kUnknownMethod));
+ EXPECT_THAT(statistics.update_tracker.Value(kTrackCordMethod), Eq(1));
+
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, LockCountsMethod) {
+ TestCordData data;
+ CordzInfo* info = TrackParentCord(data.data);
+
+ info->Lock(kUpdateMethod);
+ info->Unlock();
+ info->Lock(kUpdateMethod);
+ info->Unlock();
+
+ CordzStatistics statistics = info->GetCordzStatistics();
+ EXPECT_THAT(statistics.update_tracker.Value(kUpdateMethod), Eq(2));
+
+ info->Untrack();
+}
+
+TEST(CordzInfoTest, FromParent) {
+ TestCordData parent;
+ TestCordData child;
+ CordzInfo* info_parent = TrackParentCord(parent.data);
+ CordzInfo* info_child = TrackChildCord(child.data, parent.data);
+
+ std::string stack = FormatStack(info_parent->GetStack());
+ std::string parent_stack = FormatStack(info_child->GetParentStack());
+ EXPECT_THAT(stack, Eq(parent_stack));
+
+ CordzStatistics statistics = info_child->GetCordzStatistics();
+ EXPECT_THAT(statistics.size, Eq(child.rep.rep->length));
+ EXPECT_THAT(statistics.method, Eq(kChildMethod));
+ EXPECT_THAT(statistics.parent_method, Eq(kTrackCordMethod));
+ EXPECT_THAT(statistics.update_tracker.Value(kChildMethod), Eq(1));
+
+ info_parent->Untrack();
+ info_child->Untrack();
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_sample_token.cc b/absl/strings/internal/cordz_sample_token.cc
new file mode 100644
index 00000000..ba1270d8
--- /dev/null
+++ b/absl/strings/internal/cordz_sample_token.cc
@@ -0,0 +1,64 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_sample_token.h"
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_info.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+CordzSampleToken::Iterator& CordzSampleToken::Iterator::operator++() {
+ if (current_) {
+ current_ = current_->Next(*token_);
+ }
+ return *this;
+}
+
+CordzSampleToken::Iterator CordzSampleToken::Iterator::operator++(int) {
+ Iterator it(*this);
+ operator++();
+ return it;
+}
+
+bool operator==(const CordzSampleToken::Iterator& lhs,
+ const CordzSampleToken::Iterator& rhs) {
+ return lhs.current_ == rhs.current_ &&
+ (lhs.current_ == nullptr || lhs.token_ == rhs.token_);
+}
+
+bool operator!=(const CordzSampleToken::Iterator& lhs,
+ const CordzSampleToken::Iterator& rhs) {
+ return !(lhs == rhs);
+}
+
+CordzSampleToken::Iterator::reference CordzSampleToken::Iterator::operator*()
+ const {
+ return *current_;
+}
+
+CordzSampleToken::Iterator::pointer CordzSampleToken::Iterator::operator->()
+ const {
+ return current_;
+}
+
+CordzSampleToken::Iterator::Iterator(const CordzSampleToken* token)
+ : token_(token), current_(CordzInfo::Head(*token)) {}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_sample_token.h b/absl/strings/internal/cordz_sample_token.h
new file mode 100644
index 00000000..28a1d70c
--- /dev/null
+++ b/absl/strings/internal/cordz_sample_token.h
@@ -0,0 +1,97 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_info.h"
+
+#ifndef ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+#define ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// The existence of a CordzSampleToken guarantees that a reader can traverse the
+// global_cordz_infos_head linked-list without needing to hold a mutex. When a
+// CordzSampleToken exists, all CordzInfo objects that would be destroyed are
+// instead appended to a deletion queue. When the CordzSampleToken is destroyed,
+// it will also clean up any of these CordzInfo objects.
+//
+// E.g., ST are CordzSampleToken objects and CH are CordzHandle objects.
+// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- global_delete_queue_tail
+//
+// This list tracks that CH1 and CH2 were created after ST1, so the thread
+// holding ST1 might have a referece to CH1, CH2, ST2, and CH3. However, ST2 was
+// created later, so the thread holding the ST2 token cannot have a reference to
+// ST1, CH1, or CH2. If ST1 is cleaned up first, that thread will delete ST1,
+// CH1, and CH2. If instead ST2 is cleaned up first, that thread will only
+// delete ST2.
+//
+// If ST1 is cleaned up first, the new list will be:
+// ST2 <- CH3 <- global_delete_queue_tail
+//
+// If ST2 is cleaned up first, the new list will be:
+// ST1 <- CH1 <- CH2 <- CH3 <- global_delete_queue_tail
+//
+// All new CordzHandle objects are appended to the list, so if a new thread
+// comes along before either ST1 or ST2 are cleaned up, the new list will be:
+// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- ST3 <- global_delete_queue_tail
+//
+// A thread must hold the global_delete_queue_mu mutex whenever it's altering
+// this list.
+//
+// It is safe for thread that holds a CordzSampleToken to read
+// global_cordz_infos at any time since the objects it is able to retrieve will
+// not be deleted while the CordzSampleToken exists.
+class CordzSampleToken : public CordzSnapshot {
+ public:
+ class Iterator {
+ public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = const CordzInfo&;
+ using difference_type = ptrdiff_t;
+ using pointer = const CordzInfo*;
+ using reference = value_type;
+
+ Iterator() = default;
+
+ Iterator& operator++();
+ Iterator operator++(int);
+ friend bool operator==(const Iterator& lhs, const Iterator& rhs);
+ friend bool operator!=(const Iterator& lhs, const Iterator& rhs);
+ reference operator*() const;
+ pointer operator->() const;
+
+ private:
+ friend class CordzSampleToken;
+ explicit Iterator(const CordzSampleToken* token);
+
+ const CordzSampleToken* token_ = nullptr;
+ pointer current_ = nullptr;
+ };
+
+ CordzSampleToken() = default;
+ CordzSampleToken(const CordzSampleToken&) = delete;
+ CordzSampleToken& operator=(const CordzSampleToken&) = delete;
+
+ Iterator begin() { return Iterator(this); }
+ Iterator end() { return Iterator(); }
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
diff --git a/absl/strings/internal/cordz_sample_token_test.cc b/absl/strings/internal/cordz_sample_token_test.cc
new file mode 100644
index 00000000..9f54301d
--- /dev/null
+++ b/absl/strings/internal/cordz_sample_token_test.cc
@@ -0,0 +1,208 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_sample_token.h"
+
+#include <memory>
+#include <type_traits>
+#include <vector>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/memory/memory.h"
+#include "absl/random/random.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cordz_handle.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "absl/synchronization/notification.h"
+#include "absl/time/clock.h"
+#include "absl/time/time.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::ElementsAre;
+using ::testing::Eq;
+using ::testing::Ne;
+
+// Used test values
+auto constexpr kTrackCordMethod = CordzUpdateTracker::kConstructorString;
+
+TEST(CordzSampleTokenTest, IteratorTraits) {
+ static_assert(std::is_copy_constructible<CordzSampleToken::Iterator>::value,
+ "");
+ static_assert(std::is_copy_assignable<CordzSampleToken::Iterator>::value, "");
+ static_assert(std::is_move_constructible<CordzSampleToken::Iterator>::value,
+ "");
+ static_assert(std::is_move_assignable<CordzSampleToken::Iterator>::value, "");
+ static_assert(
+ std::is_same<
+ std::iterator_traits<CordzSampleToken::Iterator>::iterator_category,
+ std::input_iterator_tag>::value,
+ "");
+ static_assert(
+ std::is_same<std::iterator_traits<CordzSampleToken::Iterator>::value_type,
+ const CordzInfo&>::value,
+ "");
+ static_assert(
+ std::is_same<
+ std::iterator_traits<CordzSampleToken::Iterator>::difference_type,
+ ptrdiff_t>::value,
+ "");
+ static_assert(
+ std::is_same<std::iterator_traits<CordzSampleToken::Iterator>::pointer,
+ const CordzInfo*>::value,
+ "");
+ static_assert(
+ std::is_same<std::iterator_traits<CordzSampleToken::Iterator>::reference,
+ const CordzInfo&>::value,
+ "");
+}
+
+TEST(CordzSampleTokenTest, IteratorEmpty) {
+ CordzSampleToken token;
+ EXPECT_THAT(token.begin(), Eq(token.end()));
+}
+
+TEST(CordzSampleTokenTest, Iterator) {
+ TestCordData cord1, cord2, cord3;
+ CordzInfo::TrackCord(cord1.data, kTrackCordMethod);
+ CordzInfo* info1 = cord1.data.cordz_info();
+ CordzInfo::TrackCord(cord2.data, kTrackCordMethod);
+ CordzInfo* info2 = cord2.data.cordz_info();
+ CordzInfo::TrackCord(cord3.data, kTrackCordMethod);
+ CordzInfo* info3 = cord3.data.cordz_info();
+
+ CordzSampleToken token;
+ std::vector<const CordzInfo*> found;
+ for (const CordzInfo& cord_info : token) {
+ found.push_back(&cord_info);
+ }
+
+ EXPECT_THAT(found, ElementsAre(info3, info2, info1));
+
+ info1->Untrack();
+ info2->Untrack();
+ info3->Untrack();
+}
+
+TEST(CordzSampleTokenTest, IteratorEquality) {
+ TestCordData cord1;
+ TestCordData cord2;
+ TestCordData cord3;
+ CordzInfo::TrackCord(cord1.data, kTrackCordMethod);
+ CordzInfo* info1 = cord1.data.cordz_info();
+
+ CordzSampleToken token1;
+ // lhs starts with the CordzInfo corresponding to cord1 at the head.
+ CordzSampleToken::Iterator lhs = token1.begin();
+
+ CordzInfo::TrackCord(cord2.data, kTrackCordMethod);
+ CordzInfo* info2 = cord2.data.cordz_info();
+
+ CordzSampleToken token2;
+ // rhs starts with the CordzInfo corresponding to cord2 at the head.
+ CordzSampleToken::Iterator rhs = token2.begin();
+
+ CordzInfo::TrackCord(cord3.data, kTrackCordMethod);
+ CordzInfo* info3 = cord3.data.cordz_info();
+
+ // lhs is on cord1 while rhs is on cord2.
+ EXPECT_THAT(lhs, Ne(rhs));
+
+ rhs++;
+ // lhs and rhs are both on cord1, but they didn't come from the same
+ // CordzSampleToken.
+ EXPECT_THAT(lhs, Ne(rhs));
+
+ lhs++;
+ rhs++;
+ // Both lhs and rhs are done, so they are on nullptr.
+ EXPECT_THAT(lhs, Eq(rhs));
+
+ info1->Untrack();
+ info2->Untrack();
+ info3->Untrack();
+}
+
+TEST(CordzSampleTokenTest, MultiThreaded) {
+ Notification stop;
+ static constexpr int kNumThreads = 4;
+ static constexpr int kNumCords = 3;
+ static constexpr int kNumTokens = 3;
+ absl::synchronization_internal::ThreadPool pool(kNumThreads);
+
+ for (int i = 0; i < kNumThreads; ++i) {
+ pool.Schedule([&stop]() {
+ absl::BitGen gen;
+ TestCordData cords[kNumCords];
+ std::unique_ptr<CordzSampleToken> tokens[kNumTokens];
+
+ while (!stop.HasBeenNotified()) {
+ // Randomly perform one of five actions:
+ // 1) Untrack
+ // 2) Track
+ // 3) Iterate over Cords visible to a token.
+ // 4) Unsample
+ // 5) Sample
+ int index = absl::Uniform(gen, 0, kNumCords);
+ if (absl::Bernoulli(gen, 0.5)) {
+ TestCordData& cord = cords[index];
+ // Track/untrack.
+ if (cord.data.is_profiled()) {
+ // 1) Untrack
+ cord.data.cordz_info()->Untrack();
+ cord.data.clear_cordz_info();;
+ } else {
+ // 2) Track
+ CordzInfo::TrackCord(cord.data, kTrackCordMethod);
+ }
+ } else {
+ std::unique_ptr<CordzSampleToken>& token = tokens[index];
+ if (token) {
+ if (absl::Bernoulli(gen, 0.5)) {
+ // 3) Iterate over Cords visible to a token.
+ for (const CordzInfo& info : *token) {
+ // This is trivial work to allow us to compile the loop.
+ EXPECT_THAT(info.Next(*token), Ne(&info));
+ }
+ } else {
+ // 4) Unsample
+ token = nullptr;
+ }
+ } else {
+ // 5) Sample
+ token = absl::make_unique<CordzSampleToken>();
+ }
+ }
+ }
+ for (TestCordData& cord : cords) {
+ CordzInfo::MaybeUntrackCord(cord.data.cordz_info());
+ }
+ });
+ }
+ // The threads will hammer away. Give it a little bit of time for tsan to
+ // spot errors.
+ absl::SleepFor(absl::Seconds(3));
+ stop.Notify();
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/cordz_statistics.h b/absl/strings/internal/cordz_statistics.h
new file mode 100644
index 00000000..e03c651e
--- /dev/null
+++ b/absl/strings/internal/cordz_statistics.h
@@ -0,0 +1,84 @@
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+
+#include <cstdint>
+
+#include "absl/base/config.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzStatistics captures some meta information about a Cord's shape.
+struct CordzStatistics {
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // Node counts information
+ struct NodeCounts {
+ size_t flat = 0; // #flats
+ size_t flat_64 = 0; // #flats up to 64 bytes
+ size_t flat_128 = 0; // #flats up to 128 bytes
+ size_t flat_256 = 0; // #flats up to 256 bytes
+ size_t flat_512 = 0; // #flats up to 512 bytes
+ size_t flat_1k = 0; // #flats up to 1K bytes
+ size_t external = 0; // #external reps
+ size_t substring = 0; // #substring reps
+ size_t concat = 0; // #concat reps
+ size_t ring = 0; // #ring buffer reps
+ };
+
+ // The size of the cord in bytes. This matches the result of Cord::size().
+ int64_t size = 0;
+
+ // The estimated memory used by the sampled cord. This value matches the
+ // value as reported by Cord::EstimatedMemoryUsage().
+ // A value of 0 implies the property has not been recorded.
+ int64_t estimated_memory_usage = 0;
+
+ // The effective memory used by the sampled cord, inversely weighted by the
+ // effective indegree of each allocated node. This is a representation of the
+ // fair share of memory usage that should be attributed to the sampled cord.
+ // This value is more useful for cases where one or more nodes are referenced
+ // by multiple Cord instances, and for cases where a Cord includes the same
+ // node multiple times (either directly or indirectly).
+ // A value of 0 implies the property has not been recorded.
+ int64_t estimated_fair_share_memory_usage = 0;
+
+ // The total number of nodes referenced by this cord.
+ // For ring buffer Cords, this includes the 'ring buffer' node.
+ // A value of 0 implies the property has not been recorded.
+ int64_t node_count = 0;
+
+ // Detailed node counts per type
+ NodeCounts node_counts;
+
+ // The cord method responsible for sampling the cord.
+ MethodIdentifier method = MethodIdentifier::kUnknown;
+
+ // The cord method responsible for sampling the parent cord if applicable.
+ MethodIdentifier parent_method = MethodIdentifier::kUnknown;
+
+ // Update tracker tracking invocation count per cord method.
+ CordzUpdateTracker update_tracker;
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
diff --git a/absl/strings/internal/cordz_update_scope.h b/absl/strings/internal/cordz_update_scope.h
new file mode 100644
index 00000000..57ba75de
--- /dev/null
+++ b/absl/strings/internal/cordz_update_scope.h
@@ -0,0 +1,71 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+
+#include "absl/base/config.h"
+#include "absl/base/optimization.h"
+#include "absl/base/thread_annotations.h"
+#include "absl/strings/internal/cord_internal.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzUpdateScope scopes an update to the provided CordzInfo.
+// The class invokes `info->Lock(method)` and `info->Unlock()` to guard
+// cordrep updates. This class does nothing if `info` is null.
+// See also the 'Lock`, `Unlock` and `SetCordRep` methods in `CordzInfo`.
+class ABSL_SCOPED_LOCKABLE CordzUpdateScope {
+ public:
+ CordzUpdateScope(CordzInfo* info, CordzUpdateTracker::MethodIdentifier method)
+ ABSL_EXCLUSIVE_LOCK_FUNCTION(info)
+ : info_(info) {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info->Lock(method);
+ }
+ }
+
+ // CordzUpdateScope can not be copied or assigned to.
+ CordzUpdateScope(CordzUpdateScope&& rhs) = delete;
+ CordzUpdateScope(const CordzUpdateScope&) = delete;
+ CordzUpdateScope& operator=(CordzUpdateScope&& rhs) = delete;
+ CordzUpdateScope& operator=(const CordzUpdateScope&) = delete;
+
+ ~CordzUpdateScope() ABSL_UNLOCK_FUNCTION() {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info_->Unlock();
+ }
+ }
+
+ void SetCordRep(CordRep* rep) const {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info_->SetCordRep(rep);
+ }
+ }
+
+ CordzInfo* info() const { return info_; }
+
+ private:
+ CordzInfo* info_;
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
diff --git a/absl/strings/internal/cordz_update_scope_test.cc b/absl/strings/internal/cordz_update_scope_test.cc
new file mode 100644
index 00000000..3d08c622
--- /dev/null
+++ b/absl/strings/internal/cordz_update_scope_test.cc
@@ -0,0 +1,49 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_update_scope.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/config.h"
+#include "absl/strings/cordz_test_helpers.h"
+#include "absl/strings/internal/cord_rep_flat.h"
+#include "absl/strings/internal/cordz_info.h"
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+namespace {
+
+// Used test values
+auto constexpr kTrackCordMethod = CordzUpdateTracker::kConstructorString;
+
+TEST(CordzUpdateScopeTest, ScopeNullptr) {
+ CordzUpdateScope scope(nullptr, kTrackCordMethod);
+}
+
+TEST(CordzUpdateScopeTest, ScopeSampledCord) {
+ TestCordData cord;
+ CordzInfo::TrackCord(cord.data, kTrackCordMethod);
+ CordzUpdateScope scope(cord.data.cordz_info(), kTrackCordMethod);
+ cord.data.cordz_info()->SetCordRep(nullptr);
+}
+
+} // namespace
+ABSL_NAMESPACE_END
+} // namespace cord_internal
+
+} // namespace absl
diff --git a/absl/strings/internal/cordz_update_tracker.h b/absl/strings/internal/cordz_update_tracker.h
new file mode 100644
index 00000000..02efcc3a
--- /dev/null
+++ b/absl/strings/internal/cordz_update_tracker.h
@@ -0,0 +1,119 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+
+#include <atomic>
+#include <cstdint>
+
+#include "absl/base/config.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzUpdateTracker tracks counters for Cord update methods.
+//
+// The purpose of CordzUpdateTracker is to track the number of calls to methods
+// updating Cord data for sampled cords. The class internally uses 'lossy'
+// atomic operations: Cord is thread-compatible, so there is no need to
+// synchronize updates. However, Cordz collection threads may call 'Value()' at
+// any point, so the class needs to provide thread safe access.
+//
+// This class is thread-safe. But as per above comments, all non-const methods
+// should be used single-threaded only: updates are thread-safe but lossy.
+class CordzUpdateTracker {
+ public:
+ // Tracked update methods.
+ enum MethodIdentifier {
+ kUnknown,
+ kAppendCord,
+ kAppendExternalMemory,
+ kAppendString,
+ kAssignCord,
+ kAssignString,
+ kClear,
+ kConstructorCord,
+ kConstructorString,
+ kCordReader,
+ kFlatten,
+ kGetAppendRegion,
+ kMakeCordFromExternal,
+ kMoveAppendCord,
+ kMoveAssignCord,
+ kMovePrependCord,
+ kPrependCord,
+ kPrependString,
+ kRemovePrefix,
+ kRemoveSuffix,
+ kSubCord,
+
+ // kNumMethods defines the number of entries: must be the last entry.
+ kNumMethods,
+ };
+
+ // Constructs a new instance. All counters are zero-initialized.
+ constexpr CordzUpdateTracker() noexcept : values_{} {}
+
+ // Copy constructs a new instance.
+ CordzUpdateTracker(const CordzUpdateTracker& rhs) noexcept { *this = rhs; }
+
+ // Assigns the provided value to this instance.
+ CordzUpdateTracker& operator=(const CordzUpdateTracker& rhs) noexcept {
+ for (int i = 0; i < kNumMethods; ++i) {
+ values_[i].store(rhs.values_[i].load(std::memory_order_relaxed),
+ std::memory_order_relaxed);
+ }
+ return *this;
+ }
+
+ // Returns the value for the specified method.
+ int64_t Value(MethodIdentifier method) const {
+ return values_[method].load(std::memory_order_relaxed);
+ }
+
+ // Increases the value for the specified method by `n`
+ void LossyAdd(MethodIdentifier method, int64_t n = 1) {
+ auto& value = values_[method];
+ value.store(value.load(std::memory_order_relaxed) + n,
+ std::memory_order_relaxed);
+ }
+
+ // Adds all the values from `src` to this instance
+ void LossyAdd(const CordzUpdateTracker& src) {
+ for (int i = 0; i < kNumMethods; ++i) {
+ MethodIdentifier method = static_cast<MethodIdentifier>(i);
+ if (int64_t value = src.Value(method)) {
+ LossyAdd(method, value);
+ }
+ }
+ }
+
+ private:
+ // Until C++20 std::atomic is not constexpr default-constructible, so we need
+ // a wrapper for this class to be constexpr constructible.
+ class Counter : public std::atomic<int64_t> {
+ public:
+ constexpr Counter() noexcept : std::atomic<int64_t>(0) {}
+ };
+
+ Counter values_[kNumMethods];
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
diff --git a/absl/strings/internal/cordz_update_tracker_test.cc b/absl/strings/internal/cordz_update_tracker_test.cc
new file mode 100644
index 00000000..fcd17df7
--- /dev/null
+++ b/absl/strings/internal/cordz_update_tracker_test.cc
@@ -0,0 +1,143 @@
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "absl/strings/internal/cordz_update_tracker.h"
+
+#include <array>
+#include <thread> // NOLINT
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "absl/base/attributes.h"
+#include "absl/base/config.h"
+#include "absl/synchronization/notification.h"
+
+namespace absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+using ::testing::AnyOf;
+using ::testing::Eq;
+
+using Method = CordzUpdateTracker::MethodIdentifier;
+using Methods = std::array<Method, Method::kNumMethods>;
+
+// Returns an array of all methods defined in `MethodIdentifier`
+Methods AllMethods() {
+ return Methods{Method::kUnknown,
+ Method::kAppendCord,
+ Method::kAppendExternalMemory,
+ Method::kAppendString,
+ Method::kAssignCord,
+ Method::kAssignString,
+ Method::kClear,
+ Method::kConstructorCord,
+ Method::kConstructorString,
+ Method::kCordReader,
+ Method::kFlatten,
+ Method::kGetAppendRegion,
+ Method::kMakeCordFromExternal,
+ Method::kMoveAppendCord,
+ Method::kMoveAssignCord,
+ Method::kMovePrependCord,
+ Method::kPrependCord,
+ Method::kPrependString,
+ Method::kRemovePrefix,
+ Method::kRemoveSuffix,
+ Method::kSubCord};
+}
+
+TEST(CordzUpdateTracker, IsConstExprAndInitializesToZero) {
+ constexpr CordzUpdateTracker tracker;
+ for (Method method : AllMethods()) {
+ ASSERT_THAT(tracker.Value(method), Eq(0));
+ }
+}
+
+TEST(CordzUpdateTracker, LossyAdd) {
+ int64_t n = 1;
+ CordzUpdateTracker tracker;
+ for (Method method : AllMethods()) {
+ tracker.LossyAdd(method, n);
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+}
+
+TEST(CordzUpdateTracker, CopyConstructor) {
+ int64_t n = 1;
+ CordzUpdateTracker src;
+ for (Method method : AllMethods()) {
+ src.LossyAdd(method, n);
+ n += 2;
+ }
+
+ n = 1;
+ CordzUpdateTracker tracker(src);
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+}
+
+TEST(CordzUpdateTracker, OperatorAssign) {
+ int64_t n = 1;
+ CordzUpdateTracker src;
+ CordzUpdateTracker tracker;
+ for (Method method : AllMethods()) {
+ src.LossyAdd(method, n);
+ n += 2;
+ }
+
+ n = 1;
+ tracker = src;
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+}
+
+TEST(CordzUpdateTracker, ThreadSanitizedValueCheck) {
+ absl::Notification done;
+ CordzUpdateTracker tracker;
+
+ std::thread reader([&done, &tracker] {
+ while (!done.HasBeenNotified()) {
+ int n = 1;
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), AnyOf(Eq(n), Eq(0)));
+ n += 2;
+ }
+ }
+ int n = 1;
+ for (Method method : AllMethods()) {
+ EXPECT_THAT(tracker.Value(method), Eq(n));
+ n += 2;
+ }
+ });
+
+ int64_t n = 1;
+ for (Method method : AllMethods()) {
+ tracker.LossyAdd(method, n);
+ n += 2;
+ }
+ done.Notify();
+ reader.join();
+}
+
+} // namespace
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace absl
diff --git a/absl/strings/internal/str_format/arg.h b/absl/strings/internal/str_format/arg.h
index 7040c866..3c91be70 100644
--- a/absl/strings/internal/str_format/arg.h
+++ b/absl/strings/internal/str_format/arg.h
@@ -122,6 +122,14 @@ StringConvertResult FormatConvertImpl(const std::string& v,
StringConvertResult FormatConvertImpl(string_view v,
FormatConversionSpecImpl conv,
FormatSinkImpl* sink);
+#if defined(ABSL_HAVE_STD_STRING_VIEW) && !defined(ABSL_USES_STD_STRING_VIEW)
+inline StringConvertResult FormatConvertImpl(std::string_view v,
+ FormatConversionSpecImpl conv,
+ FormatSinkImpl* sink) {
+ return FormatConvertImpl(absl::string_view(v.data(), v.size()), conv, sink);
+}
+#endif // ABSL_HAVE_STD_STRING_VIEW && !ABSL_USES_STD_STRING_VIEW
+
ArgConvertResult<FormatConversionCharSetUnion(
FormatConversionCharSetInternal::s, FormatConversionCharSetInternal::p)>
FormatConvertImpl(const char* v, const FormatConversionSpecImpl conv,
diff --git a/absl/strings/internal/str_format/convert_test.cc b/absl/strings/internal/str_format/convert_test.cc
index 926283cf..91e03609 100644
--- a/absl/strings/internal/str_format/convert_test.cc
+++ b/absl/strings/internal/str_format/convert_test.cc
@@ -229,6 +229,9 @@ TEST_F(FormatConvertTest, BasicString) {
TestStringConvert(static_cast<const char*>("hello"));
TestStringConvert(std::string("hello"));
TestStringConvert(string_view("hello"));
+#if defined(ABSL_HAVE_STD_STRING_VIEW)
+ TestStringConvert(std::string_view("hello"));
+#endif // ABSL_HAVE_STD_STRING_VIEW
}
TEST_F(FormatConvertTest, NullString) {
diff --git a/absl/strings/internal/str_split_internal.h b/absl/strings/internal/str_split_internal.h
index a2f41c15..17c1bfe8 100644
--- a/absl/strings/internal/str_split_internal.h
+++ b/absl/strings/internal/str_split_internal.h
@@ -32,7 +32,7 @@
#include <array>
#include <initializer_list>
#include <iterator>
-#include <map>
+#include <tuple>
#include <type_traits>
#include <utility>
#include <vector>
@@ -182,6 +182,13 @@ template <typename T>
struct HasConstIterator<T, absl::void_t<typename T::const_iterator>>
: std::true_type {};
+// HasEmplace<T>::value is true iff there exists a method T::emplace().
+template <typename T, typename = void>
+struct HasEmplace : std::false_type {};
+template <typename T>
+struct HasEmplace<T, absl::void_t<decltype(std::declval<T>().emplace())>>
+ : std::true_type {};
+
// IsInitializerList<T>::value is true iff T is an std::initializer_list. More
// details below in Splitter<> where this is used.
std::false_type IsInitializerListDispatch(...); // default: No
@@ -372,50 +379,43 @@ class Splitter {
// value.
template <typename Container, typename First, typename Second>
struct ConvertToContainer<Container, std::pair<const First, Second>, true> {
+ using iterator = typename Container::iterator;
+
Container operator()(const Splitter& splitter) const {
Container m;
- typename Container::iterator it;
+ iterator it;
bool insert = true;
- for (const auto& sp : splitter) {
+ for (const absl::string_view sv : splitter) {
if (insert) {
- it = Inserter<Container>::Insert(&m, First(sp), Second());
+ it = InsertOrEmplace(&m, sv);
} else {
- it->second = Second(sp);
+ it->second = Second(sv);
}
insert = !insert;
}
return m;
}
- // Inserts the key and value into the given map, returning an iterator to
- // the inserted item. Specialized for std::map and std::multimap to use
- // emplace() and adapt emplace()'s return value.
- template <typename Map>
- struct Inserter {
- using M = Map;
- template <typename... Args>
- static typename M::iterator Insert(M* m, Args&&... args) {
- return m->insert(std::make_pair(std::forward<Args>(args)...)).first;
- }
- };
-
- template <typename... Ts>
- struct Inserter<std::map<Ts...>> {
- using M = std::map<Ts...>;
- template <typename... Args>
- static typename M::iterator Insert(M* m, Args&&... args) {
- return m->emplace(std::make_pair(std::forward<Args>(args)...)).first;
- }
- };
-
- template <typename... Ts>
- struct Inserter<std::multimap<Ts...>> {
- using M = std::multimap<Ts...>;
- template <typename... Args>
- static typename M::iterator Insert(M* m, Args&&... args) {
- return m->emplace(std::make_pair(std::forward<Args>(args)...));
- }
- };
+ // Inserts the key and an empty value into the map, returning an iterator to
+ // the inserted item. We use emplace() if available, otherwise insert().
+ template <typename M>
+ static absl::enable_if_t<HasEmplace<M>::value, iterator> InsertOrEmplace(
+ M* m, absl::string_view key) {
+ // Use piecewise_construct to support old versions of gcc in which pair
+ // constructor can't otherwise construct string from string_view.
+ return ToIter(m->emplace(std::piecewise_construct, std::make_tuple(key),
+ std::tuple<>()));
+ }
+ template <typename M>
+ static absl::enable_if_t<!HasEmplace<M>::value, iterator> InsertOrEmplace(
+ M* m, absl::string_view key) {
+ return ToIter(m->insert(std::make_pair(First(key), Second(""))));
+ }
+
+ static iterator ToIter(std::pair<iterator, bool> pair) {
+ return pair.first;
+ }
+ static iterator ToIter(iterator iter) { return iter; }
};
StringType text_;
diff --git a/absl/strings/str_split_test.cc b/absl/strings/str_split_test.cc
index 7f7c097f..f472f9ed 100644
--- a/absl/strings/str_split_test.cc
+++ b/absl/strings/str_split_test.cc
@@ -29,6 +29,8 @@
#include "gtest/gtest.h"
#include "absl/base/dynamic_annotations.h"
#include "absl/base/macros.h"
+#include "absl/container/btree_map.h"
+#include "absl/container/btree_set.h"
#include "absl/container/flat_hash_map.h"
#include "absl/container/node_hash_map.h"
#include "absl/strings/numbers.h"
@@ -405,6 +407,10 @@ TEST(Splitter, ConversionOperator) {
TestConversionOperator<std::set<std::string>>(splitter);
TestConversionOperator<std::multiset<absl::string_view>>(splitter);
TestConversionOperator<std::multiset<std::string>>(splitter);
+ TestConversionOperator<absl::btree_set<absl::string_view>>(splitter);
+ TestConversionOperator<absl::btree_set<std::string>>(splitter);
+ TestConversionOperator<absl::btree_multiset<absl::string_view>>(splitter);
+ TestConversionOperator<absl::btree_multiset<std::string>>(splitter);
TestConversionOperator<std::unordered_set<std::string>>(splitter);
// Tests conversion to map-like objects.
@@ -421,6 +427,22 @@ TEST(Splitter, ConversionOperator) {
TestMapConversionOperator<std::multimap<std::string, absl::string_view>>(
splitter);
TestMapConversionOperator<std::multimap<std::string, std::string>>(splitter);
+ TestMapConversionOperator<
+ absl::btree_map<absl::string_view, absl::string_view>>(splitter);
+ TestMapConversionOperator<absl::btree_map<absl::string_view, std::string>>(
+ splitter);
+ TestMapConversionOperator<absl::btree_map<std::string, absl::string_view>>(
+ splitter);
+ TestMapConversionOperator<absl::btree_map<std::string, std::string>>(
+ splitter);
+ TestMapConversionOperator<
+ absl::btree_multimap<absl::string_view, absl::string_view>>(splitter);
+ TestMapConversionOperator<
+ absl::btree_multimap<absl::string_view, std::string>>(splitter);
+ TestMapConversionOperator<
+ absl::btree_multimap<std::string, absl::string_view>>(splitter);
+ TestMapConversionOperator<absl::btree_multimap<std::string, std::string>>(
+ splitter);
TestMapConversionOperator<std::unordered_map<std::string, std::string>>(
splitter);
TestMapConversionOperator<
diff --git a/absl/strings/string_view.h b/absl/strings/string_view.h
index 5260b5b7..1f14a758 100644
--- a/absl/strings/string_view.h
+++ b/absl/strings/string_view.h
@@ -36,6 +36,7 @@
#include <limits>
#include <string>
+#include "absl/base/attributes.h"
#include "absl/base/config.h"
#include "absl/base/internal/throw_delegate.h"
#include "absl/base/macros.h"
@@ -180,8 +181,8 @@ class string_view {
template <typename Allocator>
string_view( // NOLINT(runtime/explicit)
- const std::basic_string<char, std::char_traits<char>, Allocator>&
- str) noexcept
+ const std::basic_string<char, std::char_traits<char>, Allocator>& str
+ ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept
// This is implemented in terms of `string_view(p, n)` so `str.size()`
// doesn't need to be reevaluated after `ptr_` is set.
: string_view(str.data(), str.size()) {}
@@ -398,12 +399,10 @@ class string_view {
// string_view::compare()
//
- // Performs a lexicographical comparison between the `string_view` and
- // another `absl::string_view`, returning -1 if `this` is less than, 0 if
- // `this` is equal to, and 1 if `this` is greater than the passed string
- // view. Note that in the case of data equality, a further comparison is made
- // on the respective sizes of the two `string_view`s to determine which is
- // smaller, equal, or greater.
+ // Performs a lexicographical comparison between this `string_view` and
+ // another `string_view` `x`, returning a negative value if `*this` is less
+ // than `x`, 0 if `*this` is equal to `x`, and a positive value if `*this`
+ // is greater than `x`.
constexpr int compare(string_view x) const noexcept {
return CompareImpl(length_, x.length_,
Min(length_, x.length_) == 0
diff --git a/absl/synchronization/BUILD.bazel b/absl/synchronization/BUILD.bazel
index 5ce16958..92e2448d 100644
--- a/absl/synchronization/BUILD.bazel
+++ b/absl/synchronization/BUILD.bazel
@@ -136,6 +136,21 @@ cc_test(
],
)
+cc_binary(
+ name = "blocking_counter_benchmark",
+ testonly = 1,
+ srcs = ["blocking_counter_benchmark.cc"],
+ copts = ABSL_TEST_COPTS,
+ linkopts = ABSL_DEFAULT_LINKOPTS,
+ tags = ["benchmark"],
+ visibility = ["//visibility:private"],
+ deps = [
+ ":synchronization",
+ ":thread_pool",
+ "@com_github_google_benchmark//:benchmark_main",
+ ],
+)
+
cc_test(
name = "graphcycles_test",
size = "medium",
diff --git a/absl/synchronization/blocking_counter_benchmark.cc b/absl/synchronization/blocking_counter_benchmark.cc
new file mode 100644
index 00000000..b504d1a5
--- /dev/null
+++ b/absl/synchronization/blocking_counter_benchmark.cc
@@ -0,0 +1,83 @@
+// Copyright 2021 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <limits>
+
+#include "absl/synchronization/blocking_counter.h"
+#include "absl/synchronization/internal/thread_pool.h"
+#include "benchmark/benchmark.h"
+
+namespace {
+
+void BM_BlockingCounter_SingleThread(benchmark::State& state) {
+ for (auto _ : state) {
+ int iterations = state.range(0);
+ absl::BlockingCounter counter{iterations};
+ for (int i = 0; i < iterations; ++i) {
+ counter.DecrementCount();
+ }
+ counter.Wait();
+ }
+}
+BENCHMARK(BM_BlockingCounter_SingleThread)
+ ->ArgName("iterations")
+ ->Arg(2)
+ ->Arg(4)
+ ->Arg(16)
+ ->Arg(64)
+ ->Arg(256);
+
+void BM_BlockingCounter_DecrementCount(benchmark::State& state) {
+ static absl::BlockingCounter* counter =
+ new absl::BlockingCounter{std::numeric_limits<int>::max()};
+ for (auto _ : state) {
+ counter->DecrementCount();
+ }
+}
+BENCHMARK(BM_BlockingCounter_DecrementCount)
+ ->Threads(2)
+ ->Threads(4)
+ ->Threads(6)
+ ->Threads(8)
+ ->Threads(10)
+ ->Threads(12)
+ ->Threads(16)
+ ->Threads(32)
+ ->Threads(64)
+ ->Threads(128);
+
+void BM_BlockingCounter_Wait(benchmark::State& state) {
+ int num_threads = state.range(0);
+ absl::synchronization_internal::ThreadPool pool(num_threads);
+ for (auto _ : state) {
+ absl::BlockingCounter counter{num_threads};
+ pool.Schedule([num_threads, &counter, &pool]() {
+ for (int i = 0; i < num_threads; ++i) {
+ pool.Schedule([&counter]() { counter.DecrementCount(); });
+ }
+ });
+ counter.Wait();
+ }
+}
+BENCHMARK(BM_BlockingCounter_Wait)
+ ->ArgName("threads")
+ ->Arg(2)
+ ->Arg(4)
+ ->Arg(8)
+ ->Arg(16)
+ ->Arg(32)
+ ->Arg(64)
+ ->Arg(128);
+
+} // namespace
diff --git a/absl/synchronization/internal/per_thread_sem_test.cc b/absl/synchronization/internal/per_thread_sem_test.cc
index 8cf59e64..db1184e6 100644
--- a/absl/synchronization/internal/per_thread_sem_test.cc
+++ b/absl/synchronization/internal/per_thread_sem_test.cc
@@ -159,7 +159,7 @@ TEST_F(PerThreadSemTest, Timeouts) {
const absl::Duration elapsed = absl::Now() - start;
// Allow for a slight early return, to account for quality of implementation
// issues on various platforms.
- const absl::Duration slop = absl::Microseconds(200);
+ const absl::Duration slop = absl::Milliseconds(1);
EXPECT_LE(delay - slop, elapsed)
<< "Wait returned " << delay - elapsed
<< " early (with " << slop << " slop), start time was " << start;
diff --git a/absl/synchronization/internal/waiter.cc b/absl/synchronization/internal/waiter.cc
index 2123be60..28ef311e 100644
--- a/absl/synchronization/internal/waiter.cc
+++ b/absl/synchronization/internal/waiter.cc
@@ -79,6 +79,7 @@ bool Waiter::Wait(KernelTimeout t) {
// Note that, since the thread ticker is just reset, we don't need to check
// whether the thread is idle on the very first pass of the loop.
bool first_pass = true;
+
while (true) {
int32_t x = futex_.load(std::memory_order_relaxed);
while (x != 0) {
@@ -90,7 +91,6 @@ bool Waiter::Wait(KernelTimeout t) {
return true; // Consumed a wakeup, we are done.
}
-
if (!first_pass) MaybeBecomeIdle();
const int err = Futex::WaitUntil(&futex_, 0, t);
if (err != 0) {
diff --git a/absl/time/civil_time.cc b/absl/time/civil_time.cc
index bdfe9ce0..6a231edb 100644
--- a/absl/time/civil_time.cc
+++ b/absl/time/civil_time.cc
@@ -38,9 +38,7 @@ std::string FormatYearAnd(string_view fmt, CivilSecond cs) {
const CivilSecond ncs(NormalizeYear(cs.year()), cs.month(), cs.day(),
cs.hour(), cs.minute(), cs.second());
const TimeZone utc = UTCTimeZone();
- // TODO(absl-team): Avoid conversion of fmt string.
- return StrCat(cs.year(),
- FormatTime(std::string(fmt), FromCivil(ncs, utc), utc));
+ return StrCat(cs.year(), FormatTime(fmt, FromCivil(ncs, utc), utc));
}
template <typename CivilT>
diff --git a/absl/time/duration_test.cc b/absl/time/duration_test.cc
index fb28fa98..a3617e74 100644
--- a/absl/time/duration_test.cc
+++ b/absl/time/duration_test.cc
@@ -1320,7 +1320,7 @@ TEST(Duration, SmallConversions) {
EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(0));
// TODO(bww): Is the next one OK?
- EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(0.124999999e-9));
+ EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(std::nextafter(0.125e-9, 0)));
EXPECT_EQ(absl::Nanoseconds(1) / 4, absl::Seconds(0.125e-9));
EXPECT_EQ(absl::Nanoseconds(1) / 4, absl::Seconds(0.250e-9));
EXPECT_EQ(absl::Nanoseconds(1) / 2, absl::Seconds(0.375e-9));
@@ -1330,7 +1330,7 @@ TEST(Duration, SmallConversions) {
EXPECT_EQ(absl::Nanoseconds(1), absl::Seconds(0.875e-9));
EXPECT_EQ(absl::Nanoseconds(1), absl::Seconds(1.000e-9));
- EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(-0.124999999e-9));
+ EXPECT_EQ(absl::ZeroDuration(), absl::Seconds(std::nextafter(-0.125e-9, 0)));
EXPECT_EQ(-absl::Nanoseconds(1) / 4, absl::Seconds(-0.125e-9));
EXPECT_EQ(-absl::Nanoseconds(1) / 4, absl::Seconds(-0.250e-9));
EXPECT_EQ(-absl::Nanoseconds(1) / 2, absl::Seconds(-0.375e-9));
diff --git a/absl/time/internal/cctz/src/time_zone_fixed.cc b/absl/time/internal/cctz/src/time_zone_fixed.cc
index 303c0244..f2b3294e 100644
--- a/absl/time/internal/cctz/src/time_zone_fixed.cc
+++ b/absl/time/internal/cctz/src/time_zone_fixed.cc
@@ -53,7 +53,7 @@ int Parse02d(const char* p) {
} // namespace
bool FixedOffsetFromName(const std::string& name, seconds* offset) {
- if (name.compare(0, std::string::npos, "UTC", 3) == 0) {
+ if (name == "UTC" || name == "UTC0") {
*offset = seconds::zero();
return true;
}
diff --git a/absl/time/internal/cctz/src/time_zone_lookup_test.cc b/absl/time/internal/cctz/src/time_zone_lookup_test.cc
index 9a1a8d6e..6948c3ea 100644
--- a/absl/time/internal/cctz/src/time_zone_lookup_test.cc
+++ b/absl/time/internal/cctz/src/time_zone_lookup_test.cc
@@ -717,6 +717,18 @@ TEST(TimeZones, LoadZonesConcurrently) {
}
#endif
+TEST(TimeZone, UTC) {
+ const time_zone utc = utc_time_zone();
+
+ time_zone loaded_utc;
+ EXPECT_TRUE(load_time_zone("UTC", &loaded_utc));
+ EXPECT_EQ(loaded_utc, utc);
+
+ time_zone loaded_utc0;
+ EXPECT_TRUE(load_time_zone("UTC0", &loaded_utc0));
+ EXPECT_EQ(loaded_utc0, utc);
+}
+
TEST(TimeZone, NamedTimeZones) {
const time_zone utc = utc_time_zone();
EXPECT_EQ("UTC", utc.name());
diff --git a/absl/time/time.h b/absl/time/time.h
index 2df68581..48982df4 100644
--- a/absl/time/time.h
+++ b/absl/time/time.h
@@ -1352,7 +1352,7 @@ constexpr Duration MakeDuration(int64_t hi, int64_t lo) {
inline Duration MakePosDoubleDuration(double n) {
const int64_t int_secs = static_cast<int64_t>(n);
const uint32_t ticks = static_cast<uint32_t>(
- (n - static_cast<double>(int_secs)) * kTicksPerSecond + 0.5);
+ std::round((n - static_cast<double>(int_secs)) * kTicksPerSecond));
return ticks < kTicksPerSecond
? MakeDuration(int_secs, ticks)
: MakeDuration(int_secs + 1, ticks - kTicksPerSecond);
diff --git a/absl/types/span.h b/absl/types/span.h
index 95fe7926..41db3420 100644
--- a/absl/types/span.h
+++ b/absl/types/span.h
@@ -243,8 +243,8 @@ class Span {
//
template <typename LazyT = T,
typename = EnableIfConstView<LazyT>>
- Span(
- std::initializer_list<value_type> v) noexcept // NOLINT(runtime/explicit)
+ Span(std::initializer_list<value_type> v
+ ABSL_ATTRIBUTE_LIFETIME_BOUND) noexcept // NOLINT(runtime/explicit)
: Span(v.begin(), v.size()) {}
// Accessors
diff --git a/ci/macos_xcode_bazel.sh b/ci/macos_xcode_bazel.sh
index 738adf94..9e14e660 100755
--- a/ci/macos_xcode_bazel.sh
+++ b/ci/macos_xcode_bazel.sh
@@ -24,7 +24,7 @@ if [[ -z ${ABSEIL_ROOT:-} ]]; then
fi
# If we are running on Kokoro, check for a versioned Bazel binary.
-KOKORO_GFILE_BAZEL_BIN="bazel-2.0.0-darwin-x86_64"
+KOKORO_GFILE_BAZEL_BIN="bazel-3.7.0-darwin-x86_64"
if [[ ${KOKORO_GFILE_DIR:-} ]] && [[ -f ${KOKORO_GFILE_DIR}/${KOKORO_GFILE_BAZEL_BIN} ]]; then
BAZEL_BIN="${KOKORO_GFILE_DIR}/${KOKORO_GFILE_BAZEL_BIN}"
chmod +x ${BAZEL_BIN}
diff --git a/create_lts.py b/create_lts.py
index a98c76b4..302812ad 100755
--- a/create_lts.py
+++ b/create_lts.py
@@ -108,11 +108,16 @@ def main(argv):
'project(absl LANGUAGES CXX)':
'project(absl LANGUAGES CXX VERSION {})'.format(datestamp)
})
- # Set the SOVERSION to YYYYMMDD.0.0 - The first 0 means we only have
- # ABI compatible changes, and the second 0 means we can increment it
- # to mark changes as ABI-compatible, for patch releases.
- ReplaceStringsInFile('CMake/AbseilHelpers.cmake',
- {'SOVERSION 0': 'SOVERSION "{}.0.0"'.format(datestamp)})
+ # Set the SOVERSION to YYMM.0.0 - The first 0 means we only have ABI
+ # compatible changes, and the second 0 means we can increment it to
+ # mark changes as ABI-compatible, for patch releases. Note that we
+ # only use the last two digits of the year and the month because the
+ # MacOS linker requires the first part of the SOVERSION to fit into
+ # 16 bits.
+ # https://www.sicpers.info/2013/03/how-to-version-a-mach-o-library/
+ ReplaceStringsInFile(
+ 'CMake/AbseilHelpers.cmake',
+ {'SOVERSION 0': 'SOVERSION "{}.0.0"'.format(datestamp[2:6])})
StripContentBetweenTags('CMakeLists.txt', '# absl:lts-remove-begin',
'# absl:lts-remove-end')