diff --git a/.gitignore b/.gitignore index 5d948f008..3b3c29567 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,4 @@ folly/m4/lt~obsolete.m4 folly/generate_fingerprint_tables folly/FingerprintTables.cpp _build +/release/ diff --git a/CMake/FollyCompilerUnix.cmake b/CMake/FollyCompilerUnix.cmake index f10e1c320..05119cd58 100644 --- a/CMake/FollyCompilerUnix.cmake +++ b/CMake/FollyCompilerUnix.cmake @@ -22,7 +22,7 @@ # libraries that aren't using gnu++1z yet, provide an option to let them still # override this with gnu++14 if they need to. set( - CXX_STD "gnu++1z" + CXX_STD "c++20" CACHE STRING "The C++ standard argument to pass to the compiler. Defaults to gnu++1z" ) diff --git a/CMake/folly-deps.cmake b/CMake/folly-deps.cmake index 989259a87..49009864b 100644 --- a/CMake/folly-deps.cmake +++ b/CMake/folly-deps.cmake @@ -153,12 +153,23 @@ if (PYTHON_EXTENSIONS) find_package(Cython 0.26 REQUIRED) endif () -find_package(LibUnwind) -list(APPEND FOLLY_LINK_LIBRARIES ${LIBUNWIND_LIBRARIES}) -list(APPEND FOLLY_INCLUDE_DIRECTORIES ${LIBUNWIND_INCLUDE_DIRS}) -if (LIBUNWIND_FOUND) - set(FOLLY_HAVE_LIBUNWIND ON) +option( + FOLLY_DISABLE_LIBUNWIND + "Do not try to find libunwind" + OFF +) + +if (NOT FOLLY_DISABLE_LIBUNWIND) + find_package(LibUnwind) + list(APPEND FOLLY_LINK_LIBRARIES ${LIBUNWIND_LIBRARIES}) + list(APPEND FOLLY_INCLUDE_DIRECTORIES ${LIBUNWIND_INCLUDE_DIRS}) + if (LIBUNWIND_FOUND) + set(FOLLY_HAVE_LIBUNWIND ON) + endif() +else() + set(FOLLY_HAVE_LIBUNWIND OFF) endif() + if (CMAKE_SYSTEM_NAME MATCHES "FreeBSD") list(APPEND FOLLY_LINK_LIBRARIES "execinfo") endif () @@ -299,12 +310,12 @@ endif() add_library(folly_deps INTERFACE) -find_package(fmt CONFIG) -if (NOT DEFINED fmt_CONFIG) - # Fallback on a normal search on the current system - find_package(Fmt MODULE REQUIRED) -endif() -target_link_libraries(folly_deps INTERFACE fmt::fmt) +# find_package(fmt CONFIG) +# if (NOT DEFINED fmt_CONFIG) +# # Fallback on a normal search on the current system +# find_package(Fmt MODULE REQUIRED) +# endif() +target_link_libraries(folly_deps INTERFACE fmt) list(REMOVE_DUPLICATES FOLLY_INCLUDE_DIRECTORIES) target_include_directories(folly_deps INTERFACE ${FOLLY_INCLUDE_DIRECTORIES}) diff --git a/CMakeLists.txt b/CMakeLists.txt index 85ba70d3b..2720b66e0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -90,7 +90,7 @@ set(FOLLY_SUPPORT_SHARED_LIBRARY "${BUILD_SHARED_LIBS}") include(FBBuildOptions) fb_activate_static_library_option() -set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD 20) set(CMAKE_CXX_STANDARD_REQUIRED ON) if(NOT DEFINED IS_X86_64_ARCH AND ${CMAKE_SYSTEM_PROCESSOR} MATCHES "x86_64|AMD64") @@ -400,17 +400,20 @@ include(GenPkgConfig) gen_pkgconfig_vars(FOLLY_PKGCONFIG folly_deps) target_include_directories(folly_deps + SYSTEM BEFORE INTERFACE $ $ ) target_include_directories(folly_deps + SYSTEM INTERFACE $ ) target_include_directories(folly_base + SYSTEM PUBLIC $ ) @@ -481,13 +484,13 @@ install( DESTINATION ${CMAKE_INSTALL_DIR} COMPONENT dev ) -install( - EXPORT folly - DESTINATION ${CMAKE_INSTALL_DIR} - NAMESPACE Folly:: - FILE folly-targets.cmake - COMPONENT dev -) +# install( +# EXPORT folly +# DESTINATION ${CMAKE_INSTALL_DIR} +# NAMESPACE Folly:: +# FILE folly-targets.cmake +# COMPONENT dev +# ) # Generate a pkg-config file so that downstream projects that don't use # CMake can depend on folly using pkg-config. diff --git a/folly/FBString.h b/folly/FBString.h index f0bf32c7b..6820c23ca 100644 --- a/folly/FBString.h +++ b/folly/FBString.h @@ -685,9 +685,9 @@ inline void fbstring_core::initSmall( // If data is aligned, use fast word-wise copying. Otherwise, // use conservative memcpy. // The word-wise path reads bytes which are outside the range of -// the string, and makes ASan unhappy, so we disable it when -// compiling with ASan. -#ifndef FOLLY_SANITIZE_ADDRESS +// the string, and makes ASan/TSan unhappy, so we disable it when +// compiling with ASan/TSan. +#if not (defined(FOLLY_SANITIZE_ADDRESS) || defined(FOLLY_SANITIZE_THREAD)) if ((reinterpret_cast(data) & (sizeof(size_t) - 1)) == 0) { const size_t byteSize = size * sizeof(Char); constexpr size_t wordWidth = sizeof(size_t); diff --git a/folly/concurrency/ConcurrentHashMap.h b/folly/concurrency/ConcurrentHashMap.h index 6fe920bbd..77a9da4db 100644 --- a/folly/concurrency/ConcurrentHashMap.h +++ b/folly/concurrency/ConcurrentHashMap.h @@ -126,7 +126,7 @@ template < typename ValueType, typename HashFn = std::hash, typename KeyEqual = std::equal_to, - typename Allocator = std::allocator, + template class Allocator = std::allocator, uint8_t ShardBits = 8, template class Atom = std::atomic, class Mutex = std::mutex, @@ -136,7 +136,8 @@ template < uint8_t, typename, typename, - typename, + template + class, template class, class> @@ -152,8 +153,6 @@ class ConcurrentHashMap { Atom, Mutex, Impl>; - using SegmentTAllocator = typename std::allocator_traits< - Allocator>::template rebind_alloc; template using EnableHeterogeneousFind = std::enable_if_t< detail::EligibleForHeterogeneousFind::value, @@ -237,7 +236,7 @@ class ConcurrentHashMap { auto seg = segments_[i].load(std::memory_order_relaxed); if (seg) { seg->~SegmentT(); - SegmentTAllocator().deallocate(seg, 1); + Allocator().deallocate(seg, 1); } segments_[i].store( o.segments_[i].load(std::memory_order_relaxed), @@ -265,7 +264,7 @@ class ConcurrentHashMap { auto seg = segments_[i].load(std::memory_order_relaxed); if (seg) { seg->~SegmentT(); - SegmentTAllocator().deallocate(seg, 1); + Allocator().deallocate(seg, 1); } } cohort_shutdown_cleanup(); @@ -342,7 +341,7 @@ class ConcurrentHashMap { template std::pair emplace(Args&&... args) { using Node = typename SegmentT::Node; - auto node = (Node*)Allocator().allocate(sizeof(Node)); + auto node = Allocator().allocate(1); new (node) Node(ensureCohort(), std::forward(args)...); auto h = HashFn{}(node->getItem().first); auto segment = pickSegment(h); @@ -354,7 +353,7 @@ class ConcurrentHashMap { res.first.it_, h, node->getItem().first, node); if (!res.second) { node->~Node(); - Allocator().deallocate((uint8_t*)node, sizeof(Node)); + Allocator().deallocate(node, 1); } return res; } @@ -729,13 +728,13 @@ class ConcurrentHashMap { SegmentT* seg = segments_[i].load(std::memory_order_acquire); if (!seg) { auto b = ensureCohort(); - SegmentT* newseg = SegmentTAllocator().allocate(1); + SegmentT* newseg = Allocator().allocate(1); newseg = new (newseg) SegmentT(size_ >> ShardBits, load_factor_, max_size_ >> ShardBits, b); if (!segments_[i].compare_exchange_strong(seg, newseg)) { // seg is updated with new value, delete ours. newseg->~SegmentT(); - SegmentTAllocator().deallocate(newseg, 1); + Allocator().deallocate(newseg, 1); } else { seg = newseg; updateBeginAndEndSegments(i); @@ -765,13 +764,13 @@ class ConcurrentHashMap { hazptr_obj_cohort* ensureCohort() const { auto b = cohort(); if (!b) { - auto storage = Allocator().allocate(sizeof(hazptr_obj_cohort)); + auto storage = Allocator>().allocate(1); auto newcohort = new (storage) hazptr_obj_cohort(); if (cohort_.compare_exchange_strong(b, newcohort)) { b = newcohort; } else { newcohort->~hazptr_obj_cohort(); - Allocator().deallocate(storage, sizeof(hazptr_obj_cohort)); + Allocator>().deallocate(storage, 1); } } return b; @@ -781,7 +780,7 @@ class ConcurrentHashMap { auto b = cohort(); if (b) { b->~hazptr_obj_cohort(); - Allocator().deallocate((uint8_t*)b, sizeof(hazptr_obj_cohort)); + Allocator>().deallocate(b, 1); } } @@ -798,7 +797,7 @@ template < typename ValueType, typename HashFn = std::hash, typename KeyEqual = std::equal_to, - typename Allocator = std::allocator, + template class Allocator = std::allocator, uint8_t ShardBits = 8, template class Atom = std::atomic, class Mutex = std::mutex> diff --git a/folly/concurrency/detail/ConcurrentHashMap-detail.h b/folly/concurrency/detail/ConcurrentHashMap-detail.h index 302c3b0cf..d47d7a3cb 100644 --- a/folly/concurrency/detail/ConcurrentHashMap-detail.h +++ b/folly/concurrency/detail/ConcurrentHashMap-detail.h @@ -48,6 +48,7 @@ enum class InsertType { template < typename KeyType, typename ValueType, + template typename Allocator, template class Atom, @@ -76,6 +77,7 @@ class ValueHolder { template < typename KeyType, typename ValueType, + template typename Allocator, template class Atom> @@ -130,7 +132,7 @@ class ValueHolder< template ValueHolder(std::piecewise_construct_t, Arg&& k, Args&&... args) { - item_ = (CountedItem*)Allocator().allocate(sizeof(CountedItem)); + item_ = Allocator().allocate(1); new (item_) CountedItem( std::piecewise_construct, std::forward(k), @@ -141,7 +143,7 @@ class ValueHolder< DCHECK(item_); if (item_->releaseLink()) { item_->~CountedItem(); - Allocator().deallocate((uint8_t*)item_, sizeof(CountedItem)); + Allocator().deallocate(item_, 1); } } @@ -152,13 +154,13 @@ class ValueHolder< }; // ValueHolder specialization // hazptr deleter that can use an allocator. -template +template