Skip to content

Commit f79e019

Browse files
committed
Update benchmark
1 parent 30e956b commit f79e019

File tree

1 file changed

+7
-4
lines changed

1 file changed

+7
-4
lines changed

cpp/benchmarks/io/parquet/experimental/parquet_deletion_vectors.cpp

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -259,6 +259,7 @@ void BM_parquet_chunked_deletion_vectors(nvbench::state& state)
259259
cudf::io::parquet_reader_options read_opts =
260260
cudf::io::parquet_reader_options::builder(source_sink.make_source_info());
261261

262+
auto num_chunks = 0;
262263
auto mem_stats_logger = cudf::memory_stats_logger();
263264
state.set_cuda_stream(nvbench::make_cuda_stream_view(cudf::get_default_stream().value()));
264265
state.exec(
@@ -274,14 +275,16 @@ void BM_parquet_chunked_deletion_vectors(nvbench::state& state)
274275
row_group_num_rows);
275276
do {
276277
auto const result = reader.read_chunk();
278+
num_chunks++;
277279
} while (reader.has_next());
278280
timer.stop();
279281
});
280282

281283
auto const time = state.get_summary("nv/cold/time/gpu/mean").get_float64("value");
282-
state.add_element_count(static_cast<double>(num_rows) / time, "bytes_per_second");
284+
state.add_element_count(num_chunks, "num_table_chunks");
285+
state.add_element_count(static_cast<double>(num_rows) / time, "bytes_per_sec");
283286
state.add_buffer_size(
284-
mem_stats_logger.peak_memory_usage(), "peak_memory_usage", "peak_memory_usage");
287+
mem_stats_logger.peak_memory_usage(), "peak_memory_usage", "peak_mem_usage");
285288
state.add_buffer_size(source_sink.size(), "encoded_file_size", "encoded_file_size");
286289
}
287290

@@ -300,7 +303,7 @@ NVBENCH_BENCH(BM_parquet_chunked_deletion_vectors)
300303
.add_int64_power_of_two_axis("num_row_groups", nvbench::range(4, 14, 2))
301304
.add_int64_axis("rows_per_row_group", {5'000, 10'000})
302305
.add_string_axis("io_type", {"DEVICE_BUFFER"})
303-
.add_int64_axis("chunk_read_limit", {1'024'000})
306+
.add_int64_axis("chunk_read_limit", {4'096'000})
304307
.add_int64_axis("pass_read_limit", {10'240'000, 102'400'000})
305-
.add_float64_axis("deletion_probability", {0.25, 0.65})
308+
.add_float64_axis("deletion_probability", {0.50})
306309
.add_int64_axis("num_cols", {4});

0 commit comments

Comments
 (0)