Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/rust-cubesql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ jobs:
run: cd packages/cubejs-backend-native && cargo clippy --locked --workspace --all-targets --keep-going -- -D warnings
- name: Clippy Native (with Python)
run: cd packages/cubejs-backend-native && cargo clippy --locked --workspace --all-targets --keep-going --features python -- -D warnings
- name: Clippy cubenativeutils
run: cd rust/cubenativeutils && cargo clippy --locked --workspace --all-targets --keep-going -- -D warnings
- name: Clippy cubesqlplanner
run: cd rust/cubesqlplanner && cargo clippy --locked --workspace --all-targets --keep-going -- -D warnings

unit:
# We use host instead of cross container, because it's much faster
Expand Down
13 changes: 13 additions & 0 deletions rust/cubenativeutils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,16 @@ convert_case = "0.6.0"
version = "=1"
default-features = false
features = ["napi-1", "napi-4", "napi-6", "futures"]

# Code in cubenativeutils crate is not ready for full-blown clippy
# So we disable some rules to enable per-rule latch in CI, not for a whole clippy run
# Feel free to remove any rule from here and fix all warnings with it
# Or to write a comment why rule should stay disabled
[lints.clippy]
clone_on_copy = "allow"
len_without_is_empty = "allow"
module_inception = "allow"
multiple_bound_locations = "allow"
result_large_err = "allow"
unnecessary_cast = "allow"
useless_format = "allow"
39 changes: 39 additions & 0 deletions rust/cubesqlplanner/cubesqlplanner/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,42 @@ regex = "1.3.9"
version = "=1"
default-features = false
features = ["napi-1", "napi-4", "napi-6", "futures"]

# Code in cubesqlplanner workspace is not ready for full-blown clippy
# So we disable some rules to enable per-rule latch in CI, not for a whole clippy run
# Feel free to remove any rule from here and fix all warnings with it
# Or to write a comment why rule should stay disabled
[lints.clippy]
clone_on_copy = "allow"
collapsible_else_if = "allow"
default_constructed_unit_structs = "allow"
enum_variant_names = "allow"
filter-map-identity = "allow"
let_and_return = "allow"
map_clone = "allow"
manual_map = "allow"
manual_unwrap_or_default = "allow"
match_like_matches_macro = "allow"
needless-bool = "allow"
needless_borrow = "allow"
needless_question_mark = "allow"
neg_multiply = "allow"
new_without_default = "allow"
only_used_in_recursion = "allow"
ptr_arg = "allow"
redundant_closure = "allow"
result_large_err = "allow"
should_implement_trait = "allow"
to_string_in_format_args = "allow"
too-many-arguments = "allow"
useless_conversion = "allow"
useless_format = "allow"
vec_init_then_push = "allow"
type_complexity = "allow"
if_same_then_else = "allow"
to_string_trait_impl = "allow"
field_reassign_with_default = "allow"
collapsible_match = "allow"
wrong_self_convention = "allow"
module_inception = "allow"
comparison_chain = "allow"
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ impl MeasureMatcher {
pub fn try_match(&self, symbol: &Rc<MemberSymbol>) -> Result<bool, CubeError> {
match symbol.as_ref() {
MemberSymbol::Measure(measure) => {
if self.pre_aggregation_measures.contains(&measure.full_name()) {
if !self.only_addictive || measure.is_addictive() {
return Ok(true);
}
if self.pre_aggregation_measures.contains(&measure.full_name())
&& (!self.only_addictive || measure.is_addictive())
{
return Ok(true);
}
}
MemberSymbol::MemberExpression(_) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ impl MatchState {
if matches!(self, MatchState::Partial) || matches!(other, MatchState::Partial) {
return MatchState::Partial;
}
return MatchState::Full;
MatchState::Full
}
}

Expand Down Expand Up @@ -234,8 +234,8 @@ impl PreAggregationOptimizer {

if let Some(multi_stage_item) = multi_stage_queries
.iter()
.find(|&query| &query.name == multi_stage_name)
.cloned()
.find(|query| &query.name == multi_stage_name)
{
match &multi_stage_item.member_type {
MultiStageMemberLogicalType::LeafMeasure(multi_stage_leaf_measure) => self
Expand Down Expand Up @@ -432,7 +432,7 @@ impl PreAggregationOptimizer {
.map(|(d, _)| d.clone()),
)
.collect(),
measures: pre_aggregation.measures.iter().cloned().collect(),
measures: pre_aggregation.measures.to_vec(),
multiplied_measures: HashSet::new(),
};
let pre_aggregation = PreAggregation {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ use itertools::Itertools;
use std::collections::HashMap;
use std::collections::HashSet;
use std::rc::Rc;
const TOTAL_COUNT: &'static str = "total_count";
const ORIGINAL_QUERY: &'static str = "original_query";
const TOTAL_COUNT: &str = "total_count";
const ORIGINAL_QUERY: &str = "original_query";

#[derive(Clone, Debug)]
#[derive(Clone, Debug, Default)]
struct PhysicalPlanBuilderContext {
pub alias_prefix: Option<String>,
pub render_measure_as_state: bool, //Render measure as state, for example hll state for count_approx
Expand All @@ -28,18 +28,6 @@ struct PhysicalPlanBuilderContext {
pub original_sql_pre_aggregations: HashMap<String, String>,
}

impl Default for PhysicalPlanBuilderContext {
fn default() -> Self {
Self {
alias_prefix: None,
render_measure_as_state: false,
render_measure_for_ungrouped: false,
time_shifts: HashMap::new(),
original_sql_pre_aggregations: HashMap::new(),
}
}
}

impl PhysicalPlanBuilderContext {
pub fn make_sql_nodes_factory(&self) -> SqlNodesFactory {
let mut factory = SqlNodesFactory::new();
Expand Down
7 changes: 2 additions & 5 deletions rust/cubesqlplanner/cubesqlplanner/src/plan/builder/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,11 +203,8 @@ impl SelectBuilder {
source: &SingleAliasedSource,
refs: &mut HashMap<String, String>,
) {
match &source.source {
SingleSource::Cube(cube) => {
refs.insert(cube.name().clone(), source.alias.clone());
}
_ => {}
if let SingleSource::Cube(cube) = &source.source {
refs.insert(cube.name().clone(), source.alias.clone());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,11 @@ impl BaseDimension {
pub fn is_sub_query(&self) -> bool {
self.definition
.as_ref()
.map_or(false, |def| def.static_data().sub_query.unwrap_or(false))
.is_some_and(|def| def.static_data().sub_query.unwrap_or(false))
}

pub fn propagate_filters_to_sub_query(&self) -> bool {
self.definition.as_ref().map_or(false, |def| {
self.definition.as_ref().is_some_and(|def| {
def.static_data()
.propagate_filters_to_sub_query
.unwrap_or(false)
Expand Down
10 changes: 5 additions & 5 deletions rust/cubesqlplanner/cubesqlplanner/src/planner/base_measure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,19 +185,19 @@ impl BaseMeasure {
pub fn reduce_by(&self) -> Option<Vec<String>> {
self.definition
.as_ref()
.map_or(None, |d| d.static_data().reduce_by_references.clone())
.and_then(|d| d.static_data().reduce_by_references.clone())
}

pub fn add_group_by(&self) -> Option<Vec<String>> {
self.definition
.as_ref()
.map_or(None, |d| d.static_data().add_group_by_references.clone())
.and_then(|d| d.static_data().add_group_by_references.clone())
}

pub fn group_by(&self) -> Option<Vec<String>> {
self.definition
.as_ref()
.map_or(None, |d| d.static_data().group_by_references.clone())
.and_then(|d| d.static_data().group_by_references.clone())
}

//FIXME dublicate with symbol
Expand All @@ -218,13 +218,13 @@ impl BaseMeasure {
pub fn is_multi_stage(&self) -> bool {
self.definition
.as_ref()
.map_or(false, |d| d.static_data().multi_stage.unwrap_or(false))
.is_some_and(|d| d.static_data().multi_stage.unwrap_or(false))
}

pub fn rolling_window(&self) -> Option<RollingWindow> {
self.definition
.as_ref()
.map_or(None, |d| d.static_data().rolling_window.clone())
.and_then(|d| d.static_data().rolling_window.clone())
}

pub fn is_rolling_window(&self) -> bool {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ use crate::planner::{evaluate_with_context, FiltersContext, VisitorContext};
use cubenativeutils::CubeError;
use std::rc::Rc;

const FROM_PARTITION_RANGE: &'static str = "__FROM_PARTITION_RANGE";
const FROM_PARTITION_RANGE: &str = "__FROM_PARTITION_RANGE";

const TO_PARTITION_RANGE: &'static str = "__TO_PARTITION_RANGE";
const TO_PARTITION_RANGE: &str = "__TO_PARTITION_RANGE";

#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FilterType {
Expand Down Expand Up @@ -589,7 +589,7 @@ impl BaseFilter {
}
let from = self.format_from_date(value)?;

let res = if use_db_time_zone && &from != FROM_PARTITION_RANGE {
let res = if use_db_time_zone && from != FROM_PARTITION_RANGE {
self.query_tools.base_tools().in_db_time_zone(from)?
} else {
from
Expand All @@ -607,7 +607,7 @@ impl BaseFilter {
}
let from = self.format_to_date(value)?;

let res = if use_db_time_zone && &from != TO_PARTITION_RANGE {
let res = if use_db_time_zone && from != TO_PARTITION_RANGE {
self.query_tools.base_tools().in_db_time_zone(from)?
} else {
from
Expand Down Expand Up @@ -705,10 +705,10 @@ impl BaseFilter {
as_date_time,
)
} else {
return Err(CubeError::user(format!(
Err(CubeError::user(format!(
"Arguments for timestamp parameter for operator {} is not valid",
self.filter_operator().to_string()
)));
)))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ impl RollingWindowPlanner {
}
}

if time_dimensions.len() == 0 {
if time_dimensions.is_empty() {
let rolling_base = self.add_rolling_window_base(
member.clone(),
state.clone(),
Expand Down Expand Up @@ -254,7 +254,7 @@ impl RollingWindowPlanner {
let is_to_date = rolling_window
.rolling_type
.as_ref()
.map_or(false, |tp| tp == "to_date");
.is_some_and(|tp| tp == "to_date");

if is_to_date {
if let Some(granularity) = &rolling_window.granularity {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ impl QueryProperties {
.into_values()
.map(|measures_and_join| {
(
measures_and_join.iter().next().unwrap().1 .1.clone(),
measures_and_join.first().unwrap().1 .1.clone(),
measures_and_join
.into_iter()
.flat_map(|m| m.0)
Expand All @@ -506,7 +506,7 @@ impl QueryProperties {
.join(", ")
)));
}
Ok(self.multi_fact_join_groups.iter().next().unwrap().0.clone())
Ok(self.multi_fact_join_groups.first().unwrap().0.clone())
}

pub fn measures(&self) -> &Vec<Rc<BaseMeasure>> {
Expand Down Expand Up @@ -797,8 +797,7 @@ impl QueryProperties {
} else {
let join = self
.compute_join_multi_fact_groups_with_measures(&vec![m.clone()])?
.iter()
.next()
.first()
.expect("No join groups returned for single measure multi-fact join group")
.0
.clone();
Expand Down Expand Up @@ -845,11 +844,7 @@ impl QueryProperties {
}
FilterItem::Item(item) => {
let item_member_name = item.member_name();
if measures
.iter()
.find(|m| m.full_name() == item_member_name)
.is_none()
{
if !measures.iter().any(|m| m.full_name() == item_member_name) {
measures.push(BaseMeasure::try_new_required(
item.member_evaluator().clone(),
self.query_tools.clone(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,10 @@ impl TraversalVisitor for HasCumulativeMembersCollector {
_path: &Vec<String>,
_: &Self::State,
) -> Result<Option<Self::State>, CubeError> {
match node.as_ref() {
MemberSymbol::Measure(s) => {
if s.is_rolling_window() {
self.has_cumulative_members = true;
}
if let MemberSymbol::Measure(s) = node.as_ref() {
if s.is_rolling_window() {
self.has_cumulative_members = true;
}
_ => {}
};
if self.has_cumulative_members {
Ok(None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,11 @@ impl SubQueryDimensionsCollector {

fn check_dim_has_measures(&self, dim: &DimensionSymbol) -> bool {
for dep in dim.get_dependencies().iter() {
match dep.as_ref() {
MemberSymbol::Measure(_) => return true,
_ => {}
if let MemberSymbol::Measure(_) = dep.as_ref() {
return true;
}
}
return false;
false
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ impl SqlCall {
.collect::<Result<Vec<_>, _>>()?;
let eval_result = self.member_sql.call(args)?;

Ok(eval_result.trim() == &reference_candidate.full_name())
Ok(eval_result.trim() == reference_candidate.full_name())
}

pub fn get_dependencies(&self) -> Vec<Rc<MemberSymbol>> {
Expand Down Expand Up @@ -180,11 +180,8 @@ impl SqlCall {
result.push(cube_dep.cube_symbol.name());

for (_, v) in cube_dep.properties.iter() {
match v {
CubeDepProperty::CubeDependency(cube_dep) => {
self.extract_cube_deps_from_cube_dep(cube_dep, result)
}
_ => {}
if let CubeDepProperty::CubeDependency(cube_dep) = v {
self.extract_cube_deps_from_cube_dep(cube_dep, result)
};
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pub struct PlanSqlTemplates {
pub const UNDERSCORE_UPPER_BOUND: Boundary = Boundary {
name: "UnderscoreUpper",
condition: |s, _| {
s.get(0) == Some(&"_")
s.first() == Some(&"_")
&& s.get(1)
.map(|c| c.to_uppercase() != c.to_lowercase() && *c == c.to_uppercase())
== Some(true)
Expand All @@ -32,7 +32,7 @@ fn grapheme_is_uppercase(c: &&str) -> bool {
pub const UPPER_UPPER_BOUND: Boundary = Boundary {
name: "UpperUpper",
condition: |s, _| {
s.get(0).map(grapheme_is_uppercase) == Some(true)
s.first().map(grapheme_is_uppercase) == Some(true)
&& s.get(1).map(grapheme_is_uppercase) == Some(true)
},
arg: None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl QueryDateTime {

pub fn add_duration(&self, duration: Duration) -> Result<Self, CubeError> {
let mut native = self.naive_local();
native = native + duration;
native += duration;
Self::from_local_date_time(self.date_time.timezone(), native)
}

Expand Down
Loading
Loading