Skip to content

Commit

Permalink
[chore](compile) using PCH for compilation acceleration under clang (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
deadlinefen authored May 8, 2023
1 parent af04c3a commit e08de52
Show file tree
Hide file tree
Showing 59 changed files with 825 additions and 249 deletions.
1 change: 1 addition & 0 deletions .licenserc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ header:
- "be/src/util/sse2neon.h"
- "be/src/util/utf8_check.cpp"
- "be/src/util/cityhash102"
- "be/src/pch/*"
- "build-support/run_clang_format.py"
- "regression-test/data"
- "docs/.vuepress/public/css/animate.min.css"
Expand Down
26 changes: 23 additions & 3 deletions be/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -652,11 +652,11 @@ include_directories(
${THIRDPARTY_DIR}/include
${GPERFTOOLS_HOME}/include
)
include_directories($ENV{JAVA_HOME}/include)
include_directories(${DORIS_JAVA_HOME}/include)
if (NOT OS_MACOSX)
include_directories($ENV{JAVA_HOME}/include/linux)
include_directories(${DORIS_JAVA_HOME}/include/linux)
else()
include_directories($ENV{JAVA_HOME}/include/darwin)
include_directories(${DORIS_JAVA_HOME}/include/darwin)
endif()

if (NOT OS_MACOSX)
Expand Down Expand Up @@ -976,6 +976,26 @@ else()
message(STATUS "Link Flags: ${TEST_LINK_LIBS}")
endif()

if (ENABLE_PCH)
add_library(pch STATIC ${SRC_DIR}pch/pch.cc)
target_precompile_headers(
pch
PUBLIC
${SRC_DIR}pch/pch.h
)
if (COMPILER_CLANG)
add_compile_options(-Xclang -fno-pch-timestamp)
else()
add_compile_options(-fpch-preprocess)
endif()
endif()

function(pch_reuse target)
if (ENABLE_PCH)
target_precompile_headers(${target} REUSE_FROM pch)
endif()
endfunction(pch_reuse target)

add_subdirectory(${SRC_DIR}/agent)
add_subdirectory(${SRC_DIR}/common)
add_subdirectory(${SRC_DIR}/exec)
Expand Down
2 changes: 2 additions & 0 deletions be/src/agent/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,5 @@ if (OS_MACOSX)
endif()

add_library(Agent STATIC ${AGENT_SOURCES})

pch_reuse(Agent)
2 changes: 2 additions & 0 deletions be/src/common/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,7 @@ add_library(Common STATIC
exception.cpp
)

pch_reuse(Common)

# Generate env_config.h according to env_config.h.in
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/env_config.h.in ${GENSRC_DIR}/common/env_config.h)
2 changes: 2 additions & 0 deletions be/src/exec/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -67,3 +67,5 @@ endif()
add_library(Exec STATIC
${EXEC_FILES}
)

pch_reuse(Exec)
2 changes: 2 additions & 0 deletions be/src/exprs/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,5 @@ add_library(Exprs
string_functions.cpp
json_functions.cpp
)

pch_reuse(Exprs)
2 changes: 1 addition & 1 deletion be/src/gen_cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ add_library(DorisGen STATIC ${SRC_FILES})

# Setting these files as code-generated lets make clean and incremental builds work
# correctly
set_source_files_properties(${SRC_FILES} PROPERTIES GENERATED TRUE)
# set_source_files_properties(${SRC_FILES} PROPERTIES GENERATED TRUE)

#add_dependencies(DorisGen thrift-cpp)
#add_dependencies(Opcode function)
Expand Down
8 changes: 6 additions & 2 deletions be/src/geo/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,21 @@ set(LIBRARY_OUTPUT_PATH "${BUILD_DIR}/src/geo")
# where to put generated binaries
set(EXECUTABLE_OUTPUT_PATH "${BUILD_DIR}/src/geo")

add_library(geo_type STATIC geo_types.cpp)

add_library(Geo STATIC
geo_common.cpp
geo_types.cpp
wkt_parse.cpp
wkb_parse.cpp
${GENSRC_DIR}/geo/wkt_lex.l.cpp
${GENSRC_DIR}/geo/wkt_yacc.y.cpp
geo_tobinary.cpp
ByteOrderValues.cpp
machine.h)
machine.h
)
pch_reuse(Geo)

target_link_libraries(Geo geo_type)
include(CheckCXXCompilerFlag)
set(WARNING_OPTION "-Wno-unused-but-set-variable")
check_cxx_compiler_flag(${WARNING_OPTION} HAS_WARNING_OPTION)
Expand Down
9 changes: 5 additions & 4 deletions be/src/gutil/atomicops.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,18 +66,19 @@
// #endif
// ------------------------------------------------------------------------

#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
#define GUTILS_GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)

#define CLANG_VERSION (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__)
#define GUTILS_CLANG_VERSION \
(__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__)

// ThreadSanitizer provides own implementation of atomicops.
#if defined(THREAD_SANITIZER)
#include "gutil/atomicops-internals-tsan.h" // IWYU pragma: export
#elif defined(__GNUC__) && (defined(__i386) || defined(__x86_64__))
#include "gutil/atomicops-internals-x86.h" // IWYU pragma: export
#elif defined(__GNUC__) && GCC_VERSION >= 40700
#elif defined(__GNUC__) && GUTILS_GCC_VERSION >= 40700
#include "gutil/atomicops-internals-gcc.h" // IWYU pragma: export
#elif defined(__clang__) && CLANG_VERSION >= 30400
#elif defined(__clang__) && GUTILS_CLANG_VERSION >= 30400
#include "gutil/atomicops-internals-gcc.h" // IWYU pragma: export
#else
#error You need to implement atomic operations for this architecture
Expand Down
2 changes: 2 additions & 0 deletions be/src/http/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,5 @@ add_library(Webserver STATIC
action/version_action.cpp
action/jeprofile_actions.cpp
action/file_cache_action.cpp)

pch_reuse(Webserver)
2 changes: 2 additions & 0 deletions be/src/io/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -61,3 +61,5 @@ set(IO_FILES
add_library(IO STATIC
${IO_FILES}
)

pch_reuse(IO)
2 changes: 2 additions & 0 deletions be/src/olap/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,5 @@ add_library(Olap STATIC
if (NOT USE_MEM_TRACKER)
target_compile_options(Olap PRIVATE -Wno-unused-lambda-capture)
endif()

pch_reuse(Olap)
8 changes: 4 additions & 4 deletions be/src/olap/base_compaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ Status BaseCompaction::prepare_compact() {
TRACE("got base compaction lock");

// 1. pick rowsets to compact
RETURN_NOT_OK(pick_rowsets_to_compact());
RETURN_IF_ERROR(pick_rowsets_to_compact());
TRACE("rowsets picked");
TRACE_COUNTER_INCREMENT("input_rowsets_count", _input_rowsets.size());
_tablet->set_clone_occurred(false);
Expand Down Expand Up @@ -87,7 +87,7 @@ Status BaseCompaction::execute_compact_impl() {

// 2. do base compaction, merge rowsets
int64_t permits = get_compaction_permits();
RETURN_NOT_OK(do_compaction(permits));
RETURN_IF_ERROR(do_compaction(permits));
TRACE("compaction finished");

// 3. set state to success
Expand Down Expand Up @@ -126,8 +126,8 @@ void BaseCompaction::_filter_input_rowset() {

Status BaseCompaction::pick_rowsets_to_compact() {
_input_rowsets = _tablet->pick_candidate_rowsets_to_base_compaction();
RETURN_NOT_OK(check_version_continuity(_input_rowsets));
RETURN_NOT_OK(_check_rowset_overlapping(_input_rowsets));
RETURN_IF_ERROR(check_version_continuity(_input_rowsets));
RETURN_IF_ERROR(_check_rowset_overlapping(_input_rowsets));
_filter_input_rowset();
if (_input_rowsets.size() <= 1) {
return Status::Error<BE_NO_SUITABLE_VERSION>();
Expand Down
26 changes: 13 additions & 13 deletions be/src/olap/compaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ Compaction::Compaction(const TabletSharedPtr& tablet, const std::string& label)
Compaction::~Compaction() {}

Status Compaction::compact() {
RETURN_NOT_OK(prepare_compact());
RETURN_NOT_OK(execute_compact());
RETURN_IF_ERROR(prepare_compact());
RETURN_IF_ERROR(execute_compact());
return Status::OK();
}

Expand Down Expand Up @@ -165,16 +165,16 @@ bool Compaction::is_rowset_tidy(std::string& pre_max_key, const RowsetSharedPtr&
Status Compaction::do_compact_ordered_rowsets() {
build_basic_info();
RowsetWriterContext ctx;
RETURN_NOT_OK(construct_output_rowset_writer(ctx));
RETURN_IF_ERROR(construct_output_rowset_writer(ctx));

LOG(INFO) << "start to do ordered data compaction, tablet=" << _tablet->full_name()
<< ", output_version=" << _output_version;
// link data to new rowset
auto seg_id = 0;
std::vector<KeyBoundsPB> segment_key_bounds;
for (auto rowset : _input_rowsets) {
RETURN_NOT_OK(rowset->link_files_to(_tablet->tablet_path(), _output_rs_writer->rowset_id(),
seg_id));
RETURN_IF_ERROR(rowset->link_files_to(_tablet->tablet_path(),
_output_rs_writer->rowset_id(), seg_id));
seg_id += rowset->num_segments();

std::vector<KeyBoundsPB> key_bounds;
Expand Down Expand Up @@ -269,7 +269,7 @@ Status Compaction::do_compaction_impl(int64_t permits) {
OlapStopWatch watch;

if (handle_ordered_data_compaction()) {
RETURN_NOT_OK(modify_rowsets());
RETURN_IF_ERROR(modify_rowsets());
TRACE("modify rowsets finished");

int64_t now = UnixMillis();
Expand All @@ -295,8 +295,8 @@ Status Compaction::do_compaction_impl(int64_t permits) {
<< ", output_version=" << _output_version << ", permits: " << permits;
bool vertical_compaction = should_vertical_compaction();
RowsetWriterContext ctx;
RETURN_NOT_OK(construct_input_rowset_readers());
RETURN_NOT_OK(construct_output_rowset_writer(ctx, vertical_compaction));
RETURN_IF_ERROR(construct_input_rowset_readers());
RETURN_IF_ERROR(construct_output_rowset_writer(ctx, vertical_compaction));
if (compaction_type() == ReaderType::READER_COLD_DATA_COMPACTION) {
Tablet::add_pending_remote_rowset(_output_rs_writer->rowset_id().to_string());
}
Expand Down Expand Up @@ -345,7 +345,7 @@ Status Compaction::do_compaction_impl(int64_t permits) {
TRACE("output rowset built");

// 3. check correctness
RETURN_NOT_OK(check_correctness(stats));
RETURN_IF_ERROR(check_correctness(stats));
TRACE("check correctness finished");

if (_input_row_num > 0 && stats.rowid_conversion && config::inverted_index_compaction_enable) {
Expand Down Expand Up @@ -413,7 +413,7 @@ Status Compaction::do_compaction_impl(int64_t permits) {
}

// 4. modify rowsets in memory
RETURN_NOT_OK(modify_rowsets(&stats));
RETURN_IF_ERROR(modify_rowsets(&stats));
TRACE("modify rowsets finished");

// 5. update last success compaction time
Expand Down Expand Up @@ -494,7 +494,7 @@ Status Compaction::construct_output_rowset_writer(RowsetWriterContext& ctx, bool
Status Compaction::construct_input_rowset_readers() {
for (auto& rowset : _input_rowsets) {
RowsetReaderSharedPtr rs_reader;
RETURN_NOT_OK(rowset->create_reader(&rs_reader));
RETURN_IF_ERROR(rowset->create_reader(&rs_reader));
_input_rs_readers.push_back(std::move(rs_reader));
}
return Status::OK();
Expand Down Expand Up @@ -552,11 +552,11 @@ Status Compaction::modify_rowsets(const Merger::Statistics* stats) {
RETURN_IF_ERROR(_tablet->check_rowid_conversion(_output_rowset, location_map));

_tablet->merge_delete_bitmap(output_rowset_delete_bitmap);
RETURN_NOT_OK(_tablet->modify_rowsets(output_rowsets, _input_rowsets, true));
RETURN_IF_ERROR(_tablet->modify_rowsets(output_rowsets, _input_rowsets, true));
}
} else {
std::lock_guard<std::shared_mutex> wrlock(_tablet->get_header_lock());
RETURN_NOT_OK(_tablet->modify_rowsets(output_rowsets, _input_rowsets, true));
RETURN_IF_ERROR(_tablet->modify_rowsets(output_rowsets, _input_rowsets, true));
}

{
Expand Down
6 changes: 3 additions & 3 deletions be/src/olap/cumulative_compaction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ Status CumulativeCompaction::prepare_compact() {
<< _tablet->cumulative_layer_point() << ", tablet=" << _tablet->full_name();

// 2. pick rowsets to compact
RETURN_NOT_OK(pick_rowsets_to_compact());
RETURN_IF_ERROR(pick_rowsets_to_compact());
TRACE("rowsets picked");
TRACE_COUNTER_INCREMENT("input_rowsets_count", _input_rowsets.size());
_tablet->set_clone_occurred(false);
Expand All @@ -88,7 +88,7 @@ Status CumulativeCompaction::execute_compact_impl() {

// 3. do cumulative compaction, merge rowsets
int64_t permits = get_compaction_permits();
RETURN_NOT_OK(do_compaction(permits));
RETURN_IF_ERROR(do_compaction(permits));
TRACE("compaction finished");

// 4. set state to success
Expand Down Expand Up @@ -117,7 +117,7 @@ Status CumulativeCompaction::pick_rowsets_to_compact() {
// candidate_rowsets may not be continuous
// So we need to choose the longest continuous path from it.
std::vector<Version> missing_versions;
RETURN_NOT_OK(find_longest_consecutive_version(&candidate_rowsets, &missing_versions));
RETURN_IF_ERROR(find_longest_consecutive_version(&candidate_rowsets, &missing_versions));
if (!missing_versions.empty()) {
DCHECK(missing_versions.size() == 2);
LOG(WARNING) << "There are missed versions among rowsets. "
Expand Down
18 changes: 9 additions & 9 deletions be/src/olap/delta_writer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,8 @@ Status DeltaWriter::init() {
return Status::Error<TRY_LOCK_FAILED>();
}
std::lock_guard<std::mutex> push_lock(_tablet->get_push_lock());
RETURN_NOT_OK(_storage_engine->txn_manager()->prepare_txn(_req.partition_id, _tablet,
_req.txn_id, _req.load_id));
RETURN_IF_ERROR(_storage_engine->txn_manager()->prepare_txn(_req.partition_id, _tablet,
_req.txn_id, _req.load_id));
}
if (_tablet->enable_unique_key_merge_on_write() && _delete_bitmap == nullptr) {
_delete_bitmap.reset(new DeleteBitmap(_tablet->tablet_id()));
Expand All @@ -178,7 +178,7 @@ Status DeltaWriter::init() {
context.is_direct_write = true;
context.mow_context =
std::make_shared<MowContext>(_cur_max_version, _rowset_ids, _delete_bitmap);
RETURN_NOT_OK(_tablet->create_rowset_writer(context, &_rowset_writer));
RETURN_IF_ERROR(_tablet->create_rowset_writer(context, &_rowset_writer));

_schema.reset(new Schema(_tablet_schema));
_reset_mem_table();
Expand All @@ -187,7 +187,7 @@ Status DeltaWriter::init() {
// unique key should flush serial because we need to make sure same key should sort
// in the same order in all replica.
bool should_serial = _tablet->keys_type() == KeysType::UNIQUE_KEYS;
RETURN_NOT_OK(_storage_engine->memtable_flush_executor()->create_flush_token(
RETURN_IF_ERROR(_storage_engine->memtable_flush_executor()->create_flush_token(
&_flush_token, _rowset_writer->type(), should_serial, _req.is_high_priority));

_is_init = true;
Expand All @@ -205,7 +205,7 @@ Status DeltaWriter::write(const vectorized::Block* block, const std::vector<int>
}
std::lock_guard<std::mutex> l(_lock);
if (!_is_init && !_is_cancelled) {
RETURN_NOT_OK(init());
RETURN_IF_ERROR(init());
}

if (_is_cancelled) {
Expand Down Expand Up @@ -269,7 +269,7 @@ Status DeltaWriter::flush_memtable_and_wait(bool need_wait) {

if (need_wait) {
// wait all memtables in flush queue to be flushed.
RETURN_NOT_OK(_flush_token->wait());
RETURN_IF_ERROR(_flush_token->wait());
}
return Status::OK();
}
Expand All @@ -286,7 +286,7 @@ Status DeltaWriter::wait_flush() {
return _cancel_status;
}
}
RETURN_NOT_OK(_flush_token->wait());
RETURN_IF_ERROR(_flush_token->wait());
return Status::OK();
}

Expand Down Expand Up @@ -327,7 +327,7 @@ Status DeltaWriter::close() {
// in same partition has data loaded.
// so we have to also init this DeltaWriter, so that it can create an empty rowset
// for this tablet when being closed.
RETURN_NOT_OK(init());
RETURN_IF_ERROR(init());
}

if (_is_cancelled) {
Expand All @@ -343,7 +343,7 @@ Status DeltaWriter::close() {
auto s = _flush_memtable_async();
_mem_table.reset();
_is_closed = true;
if (OLAP_UNLIKELY(!s.ok())) {
if (UNLIKELY(!s.ok())) {
return s;
} else {
return Status::OK();
Expand Down
Loading

0 comments on commit e08de52

Please sign in to comment.