Skip to content

Commit e355345

Browse files
Cheng-Yuan-LaiIan Lai
authored andcommitted
feat: rewrite snapshot test in explain_analyze_baseline_metrics
1 parent eb97840 commit e355345

File tree

3 files changed

+25
-23
lines changed

3 files changed

+25
-23
lines changed

datafusion/core/tests/sql/aggregates.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ use super::*;
1919
use datafusion::common::test_util::batches_to_string;
2020
use datafusion_catalog::MemTable;
2121
use datafusion_common::ScalarValue;
22+
use insta::assert_snapshot;
2223

2324
#[tokio::test]
2425
async fn csv_query_array_agg_distinct() -> Result<()> {

datafusion/core/tests/sql/explain_analyze.rs

Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -57,38 +57,38 @@ async fn explain_analyze_baseline_metrics() {
5757

5858
println!("Query Output:\n\n{formatted}");
5959

60-
let re = Regex::new(r"\|[^|]*\|([^|]?)\s\|").unwrap();
60+
let re = Regex::new(r"\|[^|]*\|([^|]*)\|").unwrap();
6161
let actual = formatted
6262
.lines()
63-
.map(|line| re.replace_all(line, "$1").to_string())
63+
.map(|line| re.replace_all(line, "$1").trim_end().to_string())
6464
.filter(|line| !line.is_empty() && !line.starts_with('+'))
6565
.collect::<Vec<_>>()
6666
.join("\n");
6767
insta::with_settings!({filters => vec![
6868
(r"\d+\.?\d*[µmn]?s", "[TIME]"),
6969
]}, {
7070
insta::assert_snapshot!(actual,@r#"
71-
plan
72-
CoalescePartitionsExec: fetch=3, metrics=[output_rows=3, elapsed_compute=[TIME]]
73-
UnionExec, metrics=[output_rows=3, elapsed_compute=[TIME]]
74-
ProjectionExec: expr=[count(Int64(1))@0 as cnt], metrics=[output_rows=1, elapsed_compute=[TIME]]
75-
AggregateExec: mode=Final, gby=[], aggr=[count(Int64(1))], metrics=[output_rows=1, elapsed_compute=[TIME]]
76-
CoalescePartitionsExec, metrics=[output_rows=3, elapsed_compute=[TIME]]
77-
AggregateExec: mode=Partial, gby=[], aggr=[count(Int64(1))], metrics=[output_rows=3, elapsed_compute=[TIME]]
78-
ProjectionExec: expr=[], metrics=[output_rows=5, elapsed_compute=[TIME]]
79-
AggregateExec: mode=FinalPartitioned, gby=[c1@0 as c1], aggr=[], metrics=[output_rows=5, elapsed_compute=[TIME], spill_count=0, spilled_bytes=0.0 B, spilled_rows=0, peak_mem_used=50592]
80-
CoalesceBatchesExec: target_batch_size=4096, metrics=[output_rows=5, elapsed_compute=[TIME]]
81-
RepartitionExec: partitioning=Hash([c1@0], 3), input_partitions=3, metrics=[fetch_time=[TIME], repartition_time=[TIME], send_time=[TIME]]
82-
AggregateExec: mode=Partial, gby=[c1@0 as c1], aggr=[], metrics=[output_rows=5, elapsed_compute=[TIME], spill_count=0, spilled_bytes=0.0 B, spilled_rows=0, skipped_aggregation_rows=0, peak_mem_used=52216]
83-
CoalesceBatchesExec: target_batch_size=4096, metrics=[output_rows=99, elapsed_compute=[TIME]]
84-
FilterExec: c13@1 != C2GT5KVyOPZpgKVl110TyZO0NcJ434, projection=[c1@0], metrics=[output_rows=99, elapsed_compute=[TIME]]
85-
RepartitionExec: partitioning=RoundRobinBatch(3), input_partitions=1, metrics=[fetch_time=[TIME], repartition_time=[TIME], send_time=[TIME]]
86-
DataSourceExec: file_groups={1 group: [[home/ian/open_source/datafusion/testing/data/csv/aggregate_test_100.csv]]}, projection=[c1, c13], file_type=csv, has_header=true, metrics=[output_rows=100, elapsed_compute=[TIME], file_open_errors=0, file_scan_errors=0, time_elapsed_opening=[TIME], time_elapsed_processing=[TIME], time_elapsed_scanning_total=[TIME], time_elapsed_scanning_until_data=[TIME]]
87-
ProjectionExec: expr=[1 as cnt], metrics=[output_rows=1, elapsed_compute=[TIME]]
88-
PlaceholderRowExec, metrics=[]
89-
ProjectionExec: expr=[lead(b.c1,Int64(1)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING@1 as cnt], metrics=[output_rows=1, elapsed_compute=[TIME]]
90-
BoundedWindowAggExec: wdw=[lead(b.c1,Int64(1)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING: Ok(Field { name: "lead(b.c1,Int64(1)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING", data_type: Int64, nullable: true, dict_id: 0, dict_is_ordered: false, metadata: {} }), frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(NULL)), end_bound: Following(UInt64(NULL)), is_causal: false }], mode=[Sorted], metrics=[output_rows=1, elapsed_compute=[TIME]]
91-
ProjectionExec: expr=[1 as c1], metrics=[output_rows=1, elapsed_compute=[TIME]]
71+
plan
72+
CoalescePartitionsExec: fetch=3, metrics=[output_rows=3, elapsed_compute=[TIME]]
73+
UnionExec, metrics=[output_rows=3, elapsed_compute=[TIME]]
74+
ProjectionExec: expr=[count(Int64(1))@0 as cnt], metrics=[output_rows=1, elapsed_compute=[TIME]]
75+
AggregateExec: mode=Final, gby=[], aggr=[count(Int64(1))], metrics=[output_rows=1, elapsed_compute=[TIME]]
76+
CoalescePartitionsExec, metrics=[output_rows=3, elapsed_compute=[TIME]]
77+
AggregateExec: mode=Partial, gby=[], aggr=[count(Int64(1))], metrics=[output_rows=3, elapsed_compute=[TIME]]
78+
ProjectionExec: expr=[], metrics=[output_rows=5, elapsed_compute=[TIME]]
79+
AggregateExec: mode=FinalPartitioned, gby=[c1@0 as c1], aggr=[], metrics=[output_rows=5, elapsed_compute=[TIME], spill_count=0, spilled_bytes=0.0 B, spilled_rows=0, peak_mem_used=50592]
80+
CoalesceBatchesExec: target_batch_size=4096, metrics=[output_rows=5, elapsed_compute=[TIME]]
81+
RepartitionExec: partitioning=Hash([c1@0], 3), input_partitions=3, metrics=[fetch_time=[TIME], repartition_time=[TIME], send_time=[TIME]]
82+
AggregateExec: mode=Partial, gby=[c1@0 as c1], aggr=[], metrics=[output_rows=5, elapsed_compute=[TIME], spill_count=0, spilled_bytes=0.0 B, spilled_rows=0, skipped_aggregation_rows=0, peak_mem_used=52216]
83+
CoalesceBatchesExec: target_batch_size=4096, metrics=[output_rows=99, elapsed_compute=[TIME]]
84+
FilterExec: c13@1 != C2GT5KVyOPZpgKVl110TyZO0NcJ434, projection=[c1@0], metrics=[output_rows=99, elapsed_compute=[TIME]]
85+
RepartitionExec: partitioning=RoundRobinBatch(3), input_partitions=1, metrics=[fetch_time=[TIME], repartition_time=[TIME], send_time=[TIME]]
86+
DataSourceExec: file_groups={1 group: [[home/ian/open_source/datafusion/testing/data/csv/aggregate_test_100.csv]]}, projection=[c1, c13], file_type=csv, has_header=true, metrics=[output_rows=100, elapsed_compute=[TIME], file_open_errors=0, file_scan_errors=0, time_elapsed_opening=[TIME], time_elapsed_processing=[TIME], time_elapsed_scanning_total=[TIME], time_elapsed_scanning_until_data=[TIME]]
87+
ProjectionExec: expr=[1 as cnt], metrics=[output_rows=1, elapsed_compute=[TIME]]
88+
PlaceholderRowExec, metrics=[]
89+
ProjectionExec: expr=[lead(b.c1,Int64(1)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING@1 as cnt], metrics=[output_rows=1, elapsed_compute=[TIME]]
90+
BoundedWindowAggExec: wdw=[lead(b.c1,Int64(1)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING: Ok(Field { name: "lead(b.c1,Int64(1)) ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING", data_type: Int64, nullable: true, dict_id: 0, dict_is_ordered: false, metadata: {} }), frame: WindowFrame { units: Rows, start_bound: Preceding(UInt64(NULL)), end_bound: Following(UInt64(NULL)), is_causal: false }], mode=[Sorted], metrics=[output_rows=1, elapsed_compute=[TIME]]
91+
ProjectionExec: expr=[1 as c1], metrics=[output_rows=1, elapsed_compute=[TIME]]
9292
PlaceholderRowExec, metrics=[]
9393
"#);
9494
});

datafusion/core/tests/sql/select.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
use super::*;
1919
use datafusion_common::ScalarValue;
20+
use insta::assert_snapshot;
2021

2122
#[tokio::test]
2223
async fn test_list_query_parameters() -> Result<()> {

0 commit comments

Comments
 (0)