Skip to content

Commit

Permalink
New clippy lints
Browse files Browse the repository at this point in the history
  • Loading branch information
muglug committed Jul 20, 2024
1 parent a3482e6 commit 437e1c4
Show file tree
Hide file tree
Showing 9 changed files with 80 additions and 136 deletions.
71 changes: 34 additions & 37 deletions src/analyzer/expr/call/arguments_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ use crate::expr::call_analyzer::get_generic_param_for_offset;
use crate::expr::expression_identifier::{self, get_var_id};
use crate::expr::fetch::array_fetch_analyzer::add_array_fetch_dataflow;
use crate::function_analysis_data::FunctionAnalysisData;
use crate::scope_analyzer::ScopeAnalyzer;
use crate::scope::BlockContext;
use crate::scope_analyzer::ScopeAnalyzer;
use crate::statements_analyzer::StatementsAnalyzer;
use crate::stmt_analyzer::AnalysisError;
use crate::{expression_analyzer, functionlike_analyzer};
Expand Down Expand Up @@ -796,43 +796,40 @@ fn handle_closure_arg(
GraphKind::WholeProgram(_)
) || !statements_analyzer.get_config().in_migration
{
if let FunctionLikeIdentifier::Function(function_name) = functionlike_id {
match *function_name {
StrId::LIB_VEC_MAP
| StrId::LIB_DICT_MAP
| StrId::LIB_KEYSET_MAP
| StrId::LIB_VEC_MAP_ASYNC
| StrId::LIB_DICT_MAP_ASYNC
| StrId::LIB_KEYSET_MAP_ASYNC
| StrId::LIB_VEC_FILTER
| StrId::LIB_DICT_FILTER
| StrId::LIB_KEYSET_FILTER
| StrId::LIB_VEC_TAKE
| StrId::LIB_DICT_TAKE
| StrId::LIB_KEYSET_TAKE
| StrId::LIB_C_FIND
| StrId::LIB_C_FINDX
| StrId::LIB_VEC_MAP_WITH_KEY
| StrId::LIB_DICT_MAP_WITH_KEY
| StrId::LIB_KEYSET_MAP_WITH_KEY
| StrId::LIB_DICT_MAP_WITH_KEY_ASYNC
| StrId::LIB_DICT_FROM_KEYS
| StrId::LIB_DICT_FROM_KEYS_ASYNC => {
if param_offset == 0 {
if let Some(ref mut signature_type) = param_storage.signature_type {
add_array_fetch_dataflow(
statements_analyzer,
args[0].1.pos(),
analysis_data,
None,
signature_type,
&mut get_arraykey(false),
);
}
}
if let FunctionLikeIdentifier::Function(
StrId::LIB_VEC_MAP
| StrId::LIB_DICT_MAP
| StrId::LIB_KEYSET_MAP
| StrId::LIB_VEC_MAP_ASYNC
| StrId::LIB_DICT_MAP_ASYNC
| StrId::LIB_KEYSET_MAP_ASYNC
| StrId::LIB_VEC_FILTER
| StrId::LIB_DICT_FILTER
| StrId::LIB_KEYSET_FILTER
| StrId::LIB_VEC_TAKE
| StrId::LIB_DICT_TAKE
| StrId::LIB_KEYSET_TAKE
| StrId::LIB_C_FIND
| StrId::LIB_C_FINDX
| StrId::LIB_VEC_MAP_WITH_KEY
| StrId::LIB_DICT_MAP_WITH_KEY
| StrId::LIB_KEYSET_MAP_WITH_KEY
| StrId::LIB_DICT_MAP_WITH_KEY_ASYNC
| StrId::LIB_DICT_FROM_KEYS
| StrId::LIB_DICT_FROM_KEYS_ASYNC,
) = functionlike_id
{
if param_offset == 0 {
if let Some(ref mut signature_type) = param_storage.signature_type {
add_array_fetch_dataflow(
statements_analyzer,
args[0].1.pos(),
analysis_data,
None,
signature_type,
&mut get_arraykey(false),
);
}

_ => {}
}
}
}
Expand Down
24 changes: 1 addition & 23 deletions src/analyzer/expr/collection_analyzer.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::{collections::BTreeMap, str::FromStr, sync::Arc};
use std::{collections::BTreeMap, sync::Arc};

use hakana_reflection_info::{
data_flow::{
Expand Down Expand Up @@ -44,28 +44,6 @@ impl ArrayCreationInfo {
}
}

#[derive(Debug, PartialEq)]
enum TContainerType {
Vec,
Dict,
Keyset,
Vector,
}

impl FromStr for TContainerType {
type Err = ();

fn from_str(input: &str) -> Result<TContainerType, Self::Err> {
match input {
"vec" => Ok(TContainerType::Vec),
"dict" => Ok(TContainerType::Dict),
"keyset" => Ok(TContainerType::Keyset),
"Vector" => Ok(TContainerType::Vector),
_ => Err(()),
}
}
}

pub(crate) fn analyze_vals(
statements_analyzer: &StatementsAnalyzer,
vc_kind: &oxidized::tast::VcKind,
Expand Down
22 changes: 6 additions & 16 deletions src/analyzer/expr/ternary_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ use std::rc::Rc;

use crate::function_analysis_data::FunctionAnalysisData;
use crate::reconciler::{self, assertion_reconciler};
use crate::scope_analyzer::ScopeAnalyzer;
use crate::scope::if_scope::IfScope;
use crate::scope::{var_has_root, BlockContext};
use crate::scope_analyzer::ScopeAnalyzer;
use crate::statements_analyzer::StatementsAnalyzer;
use crate::stmt::if_conditional_analyzer::{self, add_branch_dataflow};
use crate::stmt_analyzer::AnalysisError;
Expand Down Expand Up @@ -71,11 +71,7 @@ pub(crate) fn analyze(
false,
);

let mut if_clauses = if let Ok(if_clauses) = if_clauses {
if_clauses
} else {
vec![]
};
let mut if_clauses = if_clauses.unwrap_or_default();

if if_clauses.len() > 200 {
if_clauses = Vec::new();
Expand Down Expand Up @@ -169,7 +165,7 @@ pub(crate) fn analyze(
if let Ok(negated_if_clauses) = hakana_algebra::negate_formula(if_clauses) {
if_scope.negated_clauses = negated_if_clauses;
} else {
if_scope.negated_clauses = if let Ok(new_negated_clauses) = formula_generator::get_formula(
if_scope.negated_clauses = formula_generator::get_formula(
cond_object_id,
cond_object_id,
&aast::Expr(
Expand All @@ -181,11 +177,8 @@ pub(crate) fn analyze(
analysis_data,
false,
false,
) {
new_negated_clauses
} else {
Vec::new()
};
)
.unwrap_or_default();
}

let negated_clauses = hakana_algebra::simplify_cnf({
Expand Down Expand Up @@ -367,10 +360,7 @@ pub(crate) fn analyze(
//these vars were changed in the if and existed before
for redef_var_ifs_id in &redef_var_ifs {
if context.locals.contains_key(redef_var_ifs_id) {
if temp_else_context
.locals
.contains_key(redef_var_ifs_id)
{
if temp_else_context.locals.contains_key(redef_var_ifs_id) {
context.locals.insert(
redef_var_ifs_id.clone(),
Rc::new(combine_union_types(
Expand Down
12 changes: 6 additions & 6 deletions src/analyzer/stmt/foreach_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ fn check_iterator_type(
);
value_param = add_union_type(
value_param,
&known_item,
known_item,
codebase,
false,
);
Expand All @@ -338,15 +338,15 @@ fn check_iterator_type(
);
value_param = add_union_type(
value_param,
&known_item,
known_item,
codebase,
false,
);
}
DictKey::Enum(enum_name, member_name) => {
if let Some(literal_value) = statements_analyzer
.get_codebase()
.get_classconst_literal_value(&enum_name, &member_name)
.get_classconst_literal_value(enum_name, member_name)
{
if let Some(value) =
literal_value.get_literal_string_value()
Expand All @@ -359,7 +359,7 @@ fn check_iterator_type(
);
value_param = add_union_type(
value_param,
&known_item,
known_item,
codebase,
false,
);
Expand All @@ -374,7 +374,7 @@ fn check_iterator_type(
);
value_param = add_union_type(
value_param,
&known_item,
known_item,
codebase,
false,
);
Expand Down Expand Up @@ -412,7 +412,7 @@ fn check_iterator_type(
false,
);
value_param =
add_union_type(value_param, &known_item, codebase, false);
add_union_type(value_param, known_item, codebase, false);
}
}

Expand Down
17 changes: 5 additions & 12 deletions src/analyzer/stmt/ifelse_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ pub(crate) fn analyze(
if let Ok(negated_if_clauses) = hakana_algebra::negate_formula(if_clauses) {
if_scope.negated_clauses = negated_if_clauses;
} else {
if_scope.negated_clauses = if let Ok(new_negated_clauses) = formula_generator::get_formula(
if_scope.negated_clauses = formula_generator::get_formula(
cond_object_id,
cond_object_id,
&aast::Expr(
Expand All @@ -193,11 +193,8 @@ pub(crate) fn analyze(
analysis_data,
false,
false,
) {
new_negated_clauses
} else {
Vec::new()
};
)
.unwrap_or_default();
}

let (new_negated_types, _) = hakana_algebra::get_truths_from_formula(
Expand Down Expand Up @@ -324,9 +321,7 @@ pub(crate) fn analyze(
);

if_scope.updated_vars.insert(var_id.clone());
context
.locals
.insert(var_id.clone(), Rc::new(var_type));
context.locals.insert(var_id.clone(), Rc::new(var_type));
}
}

Expand Down Expand Up @@ -366,9 +361,7 @@ pub(crate) fn analyze(
&mut existing_var_type.parent_nodes,
var_type.parent_nodes,
);
context
.locals
.insert(var_id, Rc::new(existing_var_type));
context.locals.insert(var_id, Rc::new(existing_var_type));
}
}
}
Expand Down
20 changes: 6 additions & 14 deletions src/analyzer/stmt/loop_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ use crate::{
expression_analyzer, formula_generator,
function_analysis_data::FunctionAnalysisData,
reconciler,
scope_analyzer::ScopeAnalyzer,
scope::{control_action::ControlAction, loop_scope::LoopScope, BlockContext},
scope_analyzer::ScopeAnalyzer,
statements_analyzer::StatementsAnalyzer,
stmt_analyzer::AnalysisError,
};
Expand Down Expand Up @@ -241,8 +241,7 @@ pub(crate) fn analyze<'a>(
for (var_id, continue_context_type) in continue_context.locals.clone() {
if always_assigned_before_loop_body_vars.contains(&var_id) {
// set the vars to whatever the while/foreach loop expects them to be
if let Some(pre_loop_context_type) = pre_loop_context.locals.get(&var_id)
{
if let Some(pre_loop_context_type) = pre_loop_context.locals.get(&var_id) {
if continue_context_type != *pre_loop_context_type {
different_from_pre_loop_types.insert(var_id.clone());
has_changes = true;
Expand Down Expand Up @@ -446,8 +445,7 @@ pub(crate) fn analyze<'a>(
for (var_id, possibly_redefined_var_type) in
&cloned_loop_scope.possibly_redefined_loop_parent_vars
{
if let Some(do_context_type) = inner_do_context_inner.locals.get_mut(var_id)
{
if let Some(do_context_type) = inner_do_context_inner.locals.get_mut(var_id) {
*do_context_type = if do_context_type == possibly_redefined_var_type {
possibly_redefined_var_type.clone()
} else {
Expand All @@ -468,9 +466,7 @@ pub(crate) fn analyze<'a>(
inner_do_context = Some(inner_do_context_inner);
} else {
for (var_id, var_type) in &cloned_loop_scope.possibly_redefined_loop_parent_vars {
if let Some(loop_parent_context_type) =
loop_parent_context.locals.get_mut(var_id)
{
if let Some(loop_parent_context_type) = loop_parent_context.locals.get_mut(var_id) {
*loop_parent_context_type = Rc::new(combine_union_types(
var_type,
loop_parent_context_type,
Expand Down Expand Up @@ -583,13 +579,9 @@ pub(crate) fn analyze<'a>(
// if the loop contains an assertion and there are no break statements, we can negate that assertion
// and apply it to the current context

let negated_pre_condition_clauses = if let Ok(clauses) =
let negated_pre_condition_clauses =
hakana_algebra::negate_formula(pre_condition_clauses.into_iter().flatten().collect())
{
clauses
} else {
vec![]
};
.unwrap_or_default();

let (negated_pre_condition_types, _) = hakana_algebra::get_truths_from_formula(
negated_pre_condition_clauses.iter().collect(),
Expand Down
29 changes: 13 additions & 16 deletions src/code_info/t_atomic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1025,25 +1025,21 @@ impl TAtomic {
defining_entity,
extra_types,
..
} => {
return TAtomic::TGenericParam {
as_type: Box::new(new_as_type),
param_name: *param_name,
defining_entity: *defining_entity,
extra_types: extra_types.clone(),
};
}
} => TAtomic::TGenericParam {
as_type: Box::new(new_as_type),
param_name: *param_name,
defining_entity: *defining_entity,
extra_types: extra_types.clone(),
},
TAtomic::TClassTypeConstant {
class_type,
member_name,
..
} => {
return TAtomic::TClassTypeConstant {
as_type: Box::new(new_as_type),
class_type: class_type.clone(),
member_name: *member_name,
};
}
} => TAtomic::TClassTypeConstant {
as_type: Box::new(new_as_type),
class_type: class_type.clone(),
member_name: *member_name,
},
_ => panic!(),
}
}
Expand Down Expand Up @@ -1504,7 +1500,8 @@ impl TAtomic {
as_type: Some(as_type),
..
} => as_type.is_json_compatible(banned_type_aliases),
TAtomic::TGenericParam { as_type, .. } | TAtomic::TClassTypeConstant { as_type, .. } => {
TAtomic::TGenericParam { as_type, .. }
| TAtomic::TClassTypeConstant { as_type, .. } => {
as_type.is_json_compatible(banned_type_aliases)
}
_ => false,
Expand Down
Loading

0 comments on commit 437e1c4

Please sign in to comment.