Skip to content

Commit

Permalink
fix merge
Browse files Browse the repository at this point in the history
  • Loading branch information
eldenmoon committed Nov 22, 2023
1 parent 450c1e1 commit f3f54ca
Show file tree
Hide file tree
Showing 13 changed files with 24 additions and 49 deletions.
5 changes: 3 additions & 2 deletions be/src/olap/memtable.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ MemTable::MemTable(TabletSharedPtr tablet, Schema* schema, const TabletSchema* t
// TODO: Support ZOrderComparator in the future
_init_columns_offset_by_slot_descs(slot_descs, tuple_desc);
_num_columns = _tablet_schema->num_columns();
_partial_update_info = partial_update_info;
if (partial_update_info != nullptr) {
_is_partial_update = partial_update_info->is_partial_update;
if (_is_partial_update) {
Expand Down Expand Up @@ -552,9 +553,9 @@ Status MemTable::unfold_variant_column(vectorized::Block& block, FlushContext* c
}

std::vector<int> variant_column_pos;
if (_tablet_schema->is_partial_update()) {
if (_is_partial_update) {
// check columns that used to do partial updates should not include variant
for (int i : _tablet_schema->get_update_cids()) {
for (int i : _partial_update_info->update_cids) {
if (_tablet_schema->columns()[i].is_variant_type()) {
return Status::InvalidArgument("Not implement partial updates for variant");
}
Expand Down
1 change: 1 addition & 0 deletions be/src/olap/memtable.h
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@ class MemTable {
std::shared_ptr<MowContext> _mow_context;
size_t _num_columns;
int32_t _seq_col_idx_in_block = -1;
PartialUpdateInfo* _partial_update_info = nullptr;
}; // class MemTable

inline std::ostream& operator<<(std::ostream& os, const MemTable& table) {
Expand Down
2 changes: 1 addition & 1 deletion be/src/olap/rowset/segment_v2/column_reader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ Status ColumnReader::create(const ColumnReaderOptions& opts, const ColumnMetaPB&
// Read variant only root data using a single ColumnReader
std::unique_ptr<ColumnReader> reader_local(
new ColumnReader(opts, meta, num_rows, file_reader));
RETURN_IF_ERROR(reader_local->init());
RETURN_IF_ERROR(reader_local->init(&meta));
*reader = std::move(reader_local);
return Status::OK();
}
Expand Down
4 changes: 2 additions & 2 deletions be/src/olap/rowset/segment_v2/segment.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ Status Segment::_create_column_readers(const SegmentFooterPB& footer) {
}

// init by column path
for (uint32_t ordinal = 0; ordinal < footer.columns().size(); ++ordinal) {
for (uint32_t ordinal = 0; ordinal < _tablet_schema->num_columns(); ++ordinal) {
auto& column = _tablet_schema->column(ordinal);
auto iter = column_path_to_footer_ordinal.find(column.path_info());
if (iter == column_path_to_footer_ordinal.end()) {
Expand All @@ -415,7 +415,7 @@ Status Segment::_create_column_readers(const SegmentFooterPB& footer) {
_sub_column_tree.add(
iter->first,
SubcolumnReader {std::move(reader),
get_data_type_from_column_meta(_footer.columns(iter->second))});
get_data_type_from_column_meta(footer.columns(iter->second))});
}
return Status::OK();
}
Expand Down
2 changes: 0 additions & 2 deletions be/src/olap/storage_engine.h
Original file line number Diff line number Diff line change
Expand Up @@ -503,8 +503,6 @@ class StorageEngine {
// next index for create tablet
std::map<TStorageMedium::type, int> _store_next_index;

// next index for create tablet
std::map<TStorageMedium::type, int> _store_next_index;
DISALLOW_COPY_AND_ASSIGN(StorageEngine);
};

Expand Down
6 changes: 2 additions & 4 deletions be/src/util/arrow/block_convertor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -411,11 +411,9 @@ Status FromBlockConverter::convert(std::shared_ptr<arrow::RecordBatch>* out) {
Status convert_to_arrow_batch(const vectorized::Block& block,
const std::shared_ptr<arrow::Schema>& schema, arrow::MemoryPool* pool,
std::shared_ptr<arrow::RecordBatch>* result) {
// FromBlockConverter converter(block, schema, pool);
std::shared_ptr<arrow::Schema> block_arrow_schema;
RETURN_IF_ERROR(get_block_arrow_schema(block, &block_arrow_schema));
FromBlockConverter converter(block, block_arrow_schema, pool);
FromBlockConverter converter(block, schema, pool);
return converter.convert(result);
}


} // namespace doris
14 changes: 0 additions & 14 deletions be/src/util/thread.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -180,11 +180,7 @@ void ThreadMgr::set_idle_sched(int64_t tid) {
}
}

<<<<<<< HEAD
void ThreadMgr::set_low_priority(int64_t tid) {
=======
void ThreadMgr::set_thread_nice_value(int64_t tid) {
>>>>>>> upstream-apache/branch-2.0
if (tid == getpid()) {
return;
}
Expand All @@ -198,12 +194,7 @@ void ThreadMgr::set_thread_nice_value(int64_t tid) {
// applications that require it (e.g., some audio applications).

// Choose 5 as lower priority value, default is 0
<<<<<<< HEAD
constexpr static int s_low_priority_nice_value = 5;
int err = setpriority(PRIO_PROCESS, 0, s_low_priority_nice_value);
=======
int err = setpriority(PRIO_PROCESS, 0, config::scan_thread_nice_value);
>>>>>>> upstream-apache/branch-2.0
if (err < 0 && errno != EPERM) {
LOG(ERROR) << "set_thread_low_priority";
}
Expand Down Expand Up @@ -340,13 +331,8 @@ void Thread::set_idle_sched() {
ThreadMgr::set_idle_sched(current_thread_id());
}

<<<<<<< HEAD
void Thread::set_low_priority() {
ThreadMgr::set_low_priority(current_thread_id());
=======
void Thread::set_thread_nice_value() {
ThreadMgr::set_thread_nice_value(current_thread_id());
>>>>>>> upstream-apache/branch-2.0
}
#endif

Expand Down
18 changes: 9 additions & 9 deletions be/src/vec/exec/scan/scanner_scheduler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -241,14 +241,20 @@ void ScannerScheduler::_schedule_scanners(ScannerContext* ctx) {
if (type == TabletStorageType::STORAGE_TYPE_LOCAL) {
if (ctx->get_task_group() && config::enable_workload_group_for_scan) {
auto work_func = [this, scanner = *iter, ctx] {
this->_scanner_scan(this, ctx, scanner);
std::stringstream ss;
ss << "ss0" << scanner->runtime_state()->query_type()
<< scanner->runtime_state()->query_id().lo%(int64_t)100000;
this->_scanner_scan(this, ctx, scanner, ss.str());
};
taskgroup::ScanTask scan_task = {work_func, ctx, nice};
ret = _task_group_local_scan_queue->push_back(scan_task);
} else {
PriorityThreadPool::Task task;
task.work_function = [this, scanner = *iter, ctx] {
this->_scanner_scan(this, ctx, scanner);
std::stringstream ss;
ss << "ss1" << scanner->runtime_state()->query_type()
<< scanner->runtime_state()->query_id().lo%(int64_t)100000;
this->_scanner_scan(this, ctx, scanner, ss.str());
};
task.priority = nice;
ret = _local_scan_thread_pool->offer(task);
Expand All @@ -257,7 +263,7 @@ void ScannerScheduler::_schedule_scanners(ScannerContext* ctx) {
PriorityThreadPool::Task task;
task.work_function = [this, scanner = *iter, ctx] {
std::stringstream ss;
ss << "ss1" << scanner->runtime_state()->query_type()
ss << "ss2" << scanner->runtime_state()->query_type()
<< scanner->runtime_state()->query_id().lo%(int64_t)100000;
this->_scanner_scan(this, ctx, scanner, ss.str());
};
Expand Down Expand Up @@ -325,12 +331,6 @@ void ScannerScheduler::_scanner_scan(ScannerScheduler* scheduler, ScannerContext
Thread::set_self_name(thread_name);
}
#endif
#ifndef __APPLE__
if (config::enable_scan_thread_low_thread_priority &&
scanner->get_name() != VFileScanner::NAME) {
Thread::set_low_priority();
}
#endif

#ifndef __APPLE__
// The configuration item is used to lower the priority of the scanner thread,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -614,6 +614,7 @@ public void analyze(Analyzer analyzer) throws UserException {
}
colLabels.add("col_" + colLabels.size());
subColLabels.add(expr.toSubColumnLabel());
}
}
// analyze valueList if exists
if (needToSql) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@
import org.apache.doris.task.StreamLoadTask;
import org.apache.doris.thrift.FrontendService;
import org.apache.doris.thrift.FrontendServiceVersion;
import org.apache.doris.thrift.TBackend;
import org.apache.doris.thrift.TBeginTxnRequest;
import org.apache.doris.thrift.TBeginTxnResult;
import org.apache.doris.thrift.TBinlog;
Expand Down
11 changes: 0 additions & 11 deletions regression-test/data/variant_p0/desc.out
Original file line number Diff line number Diff line change
Expand Up @@ -112,17 +112,6 @@ v3.c.e DOUBLE Yes false \N NONE
k BIGINT Yes true \N
v VARIANT Yes false \N NONE

-- !sql_9_1 --
k BIGINT Yes true \N
v VARIANT Yes false \N NONE
v.a SMALLINT Yes false \N NONE
v.b JSON Yes false \N NONE
v.c.c SMALLINT Yes false \N NONE
v.c.e DOUBLE Yes false \N NONE
v.oooo.xxxx.xxx TINYINT Yes false \N NONE
v.中文 TEXT Yes false \N NONE
v.英文 TEXT Yes false \N NONE

-- !sql_9_2 --
k BIGINT Yes true \N
v VARIANT Yes false \N NONE
Expand Down
6 changes: 3 additions & 3 deletions regression-test/data/variant_p0/load.out
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !sql --
1 [1, 2, NULL]
1 [1, 2, null]
1 [1]
1 [1]
1 [NULL]
1 [null]

-- !sql_1_1 --
1 {"a":1,"b":{"c":1}} {"b":{"c":1}}
Expand Down Expand Up @@ -164,7 +164,7 @@
[123]

-- !sql_25 --
50000 54999.99999999906 6150000
50000 55000.000000007036 6150000

-- !sql_26 --
5000
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/variant_p0/desc.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ suite("regression_test_variant_desc", "variant_type_desc"){
qt_sql_9_2 """desc ${table_name}"""
qt_sql_9_2 """select cast(v:中文 as string) from ${table_name} order by k"""
} finally {
// reset flags
// reset flags of ratio_of_defaults_as_sparse_column
set_be_config.call("ratio_of_defaults_as_sparse_column", "0.95")
}
}

0 comments on commit f3f54ca

Please sign in to comment.