gcc/ * basic-block.h (FOR_EACH_BB): Eliminate macro. * asan.c (transform_statements, execute_sanopt): Eliminate use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun explicit. * auto-inc-dec.c (rest_of_handle_auto_inc_dec): Likewise. * bb-reorder.c (find_rarely_executed_basic_blocks_and_crossing_edges, set_edge_can_fallthru_flag, fix_up_fall_thru_edges, fix_crossing_unconditional_branches, add_reg_crossing_jump_notes, insert_section_boundary_note, rest_of_handle_reorder_blocks, duplicate_computed_gotos): Likewise. * cfg.c (clear_edges, compact_blocks, brief_dump_cfg): Likewise. * cfganal.c (find_unreachable_blocks, add_noreturn_fake_exit_edges, compute_dominance_frontiers_1, single_pred_before_succ_order): Likewise. * cfgbuild.c (find_many_sub_basic_blocks): Likewise. * cfgcleanup.c (try_optimize_cfg, delete_dead_jumptables): Likewise. * cfgexpand.c (add_scope_conflicts, discover_nonconstant_array_refs): Likewise. * cfgloop.c (flow_loops_cfg_dump, get_loop_body, record_loop_exits, verify_loop_structure): Likewise. * cfgloopanal.c (mark_loop_exit_edges): Likewise. * cfgrtl.c (compute_bb_for_insn, find_partition_fixes, verify_hot_cold_block_grouping, purge_all_dead_edges, fixup_abnormal_edges, record_effective_endpoints, outof_cfg_layout_mode, fixup_reorder_chain, force_one_exit_fallthru, break_superblocks): Likewise. * cgraphbuild.c (build_cgraph_edges, rebuild_cgraph_edges, cgraph_rebuild_references): Likewise. * combine-stack-adj.c (combine_stack_adjustments): Likewise. * combine.c (delete_noop_moves, create_log_links, combine_instructions): Likewise. * config/arm/arm.c (thumb1_reorg, thumb2_reorg): Likewise. * config/bfin/bfin.c (bfin_gen_bundles, reorder_var_tracking_notes): Likewise. * config/c6x/c6x.c (c6x_gen_bundles, conditionalize_after_sched, c6x_reorg): Likewise. * config/epiphany/resolve-sw-modes.c (resolve_sw_modes): Likewise. * config/frv/frv.c (frv_optimize_membar): Likewise. * config/i386/i386.c (ix86_finalize_stack_realign_flags): Likewise. * config/ia64/ia64.c (ia64_reorg): Likewise. * config/mips/mips.c (mips_annotate_pic_calls): Likewise. * config/picochip/picochip.c (reorder_var_tracking_notes): Likewise. * config/rs6000/rs6000.c (rs6000_alloc_sdmode_stack_slot): Likewise. * config/s390/s390.c (s390_regs_ever_clobbered): Likewise. * config/sh/sh_treg_combine.cc (sh_treg_combine::execute): Likewise. * config/spu/spu.c (spu_machine_dependent_reorg): Likewise. * config/tilegx/tilegx.c (tilegx_gen_bundles, reorder_var_tracking_notes): Likewise. * config/tilepro/tilepro.c (tilepro_gen_bundles, reorder_var_tracking_notes): Likewise. * coverage.c (coverage_compute_cfg_checksum): Likewise. * cprop.c (compute_hash_table_work, compute_cprop_data, local_cprop_pass, find_implicit_sets): Likewise. * cse.c (cse_condition_code_reg): Likewise. * dce.c (prescan_insns_for_dce): Likewise. * df-core.c (df_compact_blocks): Likewise. * df-problems.c (df_word_lr_alloc): Likewise. * df-scan.c (df_scan_start_dump, df_scan_blocks, df_insn_rescan_all, df_update_entry_exit_and_calls): Likewise. * dominance.c (calculate_dominance_info, verify_dominators, debug_dominance_info): Likewise. * dse.c (dse_step5_nospill): Likewise. * except.c (finish_eh_generation): Likewise. * final.c (compute_alignments): Likewise. * function.c (thread_prologue_and_epilogue_insns, rest_of_match_asm_constraints): Likewise. * gcse.c (compute_hash_table_work, prune_expressions, compute_pre_data, compute_code_hoist_vbeinout, hoist_code, calculate_bb_reg_pressure, compute_ld_motion_mems): Likewise. * gimple-iterator.c (gsi_commit_edge_inserts): Likewise. * gimple-ssa-isolate-paths.c (find_implicit_erroneous_behaviour, find_explicit_erroneous_behaviour): Likewise. * graphite-sese-to-poly.c (rewrite_reductions_out_of_ssa, rewrite_cross_bb_scalar_deps_out_of_ssa): Likewise. * haifa-sched.c (haifa_sched_init): Likewise. * hw-doloop.c (discover_loops, set_bb_indices, reorder_loops): Likewise. * ifcvt.c (if_convert): Likewise. * init-regs.c (initialize_uninitialized_regs): Likewise. * ipa-prop.c (ipcp_transform_function): Likewise. * ipa-pure-const.c (analyze_function): Likewise. * ipa-split.c (find_split_points, execute_split_functions): Likewise. * ira-build.c (form_loop_tree): Likewise. * ira-costs.c (find_costs_and_classes): Likewise. * ira-emit.c (emit_moves, add_ranges_and_copies, ira_emit): Likewise. * ira.c (decrease_live_ranges_number, compute_regs_asm_clobbered, mark_elimination, update_equiv_regs, find_moveable_pseudos, split_live_ranges_for_shrink_wrap, allocate_initial_values): Likewise. * jump.c (mark_all_labels): Likewise. * lcm.c (compute_laterin, compute_insert_delete, compute_available, compute_nearerout, compute_rev_insert_delete): Likewise. * loop-init.c (fix_loop_structure): Likewise. * loop-invariant.c (calculate_loop_reg_pressure): Likewise. * lower-subreg.c (decompose_multiword_subregs, decompose_multiword_subregs): Likewise. * lra-assigns.c (assign_by_spills): Likewise. * lra-coalesce.c (lra_coalesce): Likewise. * lra-constraints.c (lra_inheritance, remove_inheritance_pseudos): Likewise. * lra-eliminations.c (lra_init_elimination): Likewise. * lra-spills.c (assign_spill_hard_regs, spill_pseudos, lra_final_code_change): Likewise. * lra.c (remove_scratches, check_rtl, has_nonexceptional_receiver, update_inc_notes): Likewise. * mcf.c (adjust_cfg_counts): Likewise. * mode-switching.c (optimize_mode_switching): Likewise. * modulo-sched.c (rest_of_handle_sms): Likewise. * omp-low.c (optimize_omp_library_calls, expand_omp_taskreg, expand_omp_target): Likewise. * postreload-gcse.c (alloc_mem, compute_hash_table): Likewise. * postreload.c (reload_cse_regs_1): Likewise. * predict.c (strip_predict_hints, tree_bb_level_predictions, tree_estimate_probability, expensive_function_p, estimate_bb_frequencies, compute_function_frequency): Likewise. * profile.c (is_inconsistent, compute_branch_probabilities, branch_prob): Likewise. * ree.c (find_removable_extensions): Likewise. * reg-stack.c (compensate_edges, convert_regs, reg_to_stack): Likewise. * regcprop.c (copyprop_hardreg_forward): Likewise. * reginfo.c (init_subregs_of_mode): Likewise. * regrename.c (regrename_analyze): Likewise. * regstat.c (regstat_compute_ri, regstat_compute_calls_crossed): Likewise. * reload1.c (has_nonexceptional_receiver, reload, calculate_elim_costs_all_insns): Likewise. * resource.c (init_resource_info, free_resource_info): Likewise. * sched-ebb.c (schedule_ebbs): Likewise. * sched-rgn.c (is_cfg_nonregular, find_single_block_region, haifa_find_rgns, sched_rgn_local_init): Likewise. * sel-sched-dump.c (sel_dump_cfg_2): Likewise. * sel-sched-ir.c (init_lv_sets, free_lv_sets, make_regions_from_the_rest): Likewise. * sese.c (build_sese_loop_nests, sese_build_liveouts): Likewise. * stack-ptr-mod.c (notice_stack_pointer_modification): Likewise. * store-motion.c (compute_store_table, build_store_vectors, one_store_motion_pass): Likewise. * tracer.c (tail_duplicate): Likewise. * trans-mem.c (compute_transaction_bits): Likewise. * tree-call-cdce.c (tree_call_cdce): Likewise. * tree-cfg.c (replace_loop_annotate, factor_computed_gotos, fold_cond_expr_cond, make_edges, assign_discriminators, make_abnormal_goto_edges, cleanup_dead_labels, group_case_labels, dump_cfg_stats, gimple_verify_flow_info, print_loop, execute_fixup_cfg): Likewise. * tree-cfgcleanup.c (cleanup_tree_cfg_1, merge_phi_nodes): Likewise. * tree-complex.c (init_dont_simulate_again, tree_lower_complex): Likewise. * tree-dfa.c (collect_dfa_stats, dump_enumerated_decls): Likewise. * tree-eh.c (execute_lower_resx, execute_lower_eh_dispatch, mark_reachable_handlers): Likewise. * tree-emutls.c (lower_emutls_function_body): Likewise. * tree-if-conv.c (main_tree_if_conversion): Likewise. * tree-inline.c (optimize_inline_calls): Likewise. * tree-into-ssa.c (rewrite_into_ssa, update_ssa): Likewise. * tree-nrv.c (tree_nrv, execute_return_slot_opt): Likewise. * tree-object-size.c (compute_object_sizes): Likewise. * tree-outof-ssa.c (eliminate_useless_phis, rewrite_trees, insert_backedge_copies, tree_profiling): Likewise. * tree-scalar-evolution.c (scev_const_prop): Likewise. * tree-sra.c (scan_function, sra_modify_function_body, propagate_dereference_distances, ipa_sra_modify_function_body, convert_callers): Likewise. * tree-ssa-ccp.c (ccp_initialize, execute_fold_all_builtins): Likewise. * tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise. create_outofssa_var_map, coalesce_partitions): Likewise. * tree-ssa-copy.c (init_copy_prop): Likewise. * tree-ssa-copyrename.c (rename_ssa_copies): Likewise. * tree-ssa-dce.c (find_obviously_necessary_stmts, eliminate_unnecessary_stmts): Likewise. * tree-ssa-dom.c (free_all_edge_infos, tree_ssa_dominator_optimize): Likewise. * tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): Likewise. * tree-ssa-live.c (clear_unused_block_pointer, remove_unused_locals, new_tree_live_info, calculate_live_on_exit, dump_live_info, analyze_memory_references, fill_always_executed_in, tree_ssa_lim_finalize): Likewise. * tree-ssa-loop-manip.c (find_uses_to_rename, verify_loop_closed_ssa): Likewise. * tree-ssa-math-opts.c (execute_cse_reciprocals, execute_cse_sincos, execute_optimize_bswap, execute_optimize_widening_mul): Likewise. * tree-ssa-propagate.c (substitute_and_fold): Likewise. * tree-ssa-structalias.c (compute_points_to_sets): Likewise. * tree-ssa-tail-merge.c (find_same_succ, reset_cluster_vectors): Likewise. * tree-ssa-ter.c (find_replaceable_exprs): Likewise. * tree-ssa-threadupdate.c (thread_through_all_blocks): Likewise. * tree-ssa-uncprop.c (associate_equivalences_with_edges, tree_ssa_uncprop): Likewise. * tree-ssa-uninit.c (warn_uninitialized_vars, execute_late_warn_uninitialized): Likewise. * tree-ssa.c (verify_ssa, execute_update_addresses_taken): Likewise. * tree-stdarg.c (check_all_va_list_escapes, execute_optimize_stdarg): Likewise. * tree-switch-conversion.c (do_switchconv): Likewise. * tree-vect-generic.c (expand_vector_operations): Likewise. * tree-vectorizer.c (adjust_simduid_builtins, note_simd_array_uses, execute_vect_slp): Likewise. * tree-vrp.c (check_all_array_refs, remove_range_assertions, vrp_initialize, identify_jump_threads, instrument_memory_accesses): Likewise. * ubsan.c (ubsan_pass): Likewise. * value-prof.c (verify_histograms, gimple_value_profile_transformations, gimple_find_values_to_profile): Likewise. * var-tracking.c (vt_find_locations, dump_dataflow_sets, vt_emit_notes, vt_initialize, delete_debug_insns, vt_finalize): Likewise. gcc/testsuite/ * g++.dg/plugin/selfassign.c (execute_warn_self_assign): Eliminate use of FOR_EACH_BB in favor of FOR_EACH_BB_FN, to make use of cfun explicit. * gcc.dg/plugin/selfassign.c (execute_warn_self_assign): Likewise. From-SVN: r205828
1321 lines
36 KiB
C
1321 lines
36 KiB
C
/* __builtin_object_size (ptr, object_size_type) computation
|
|
Copyright (C) 2004-2013 Free Software Foundation, Inc.
|
|
Contributed by Jakub Jelinek <jakub@redhat.com>
|
|
|
|
This file is part of GCC.
|
|
|
|
GCC is free software; you can redistribute it and/or modify
|
|
it under the terms of the GNU General Public License as published by
|
|
the Free Software Foundation; either version 3, or (at your option)
|
|
any later version.
|
|
|
|
GCC is distributed in the hope that it will be useful,
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
GNU General Public License for more details.
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
along with GCC; see the file COPYING3. If not see
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
#include "config.h"
|
|
#include "system.h"
|
|
#include "coretypes.h"
|
|
#include "tm.h"
|
|
#include "tree.h"
|
|
#include "tree-object-size.h"
|
|
#include "diagnostic-core.h"
|
|
#include "gimple-pretty-print.h"
|
|
#include "bitmap.h"
|
|
#include "basic-block.h"
|
|
#include "tree-ssa-alias.h"
|
|
#include "internal-fn.h"
|
|
#include "gimple-fold.h"
|
|
#include "gimple-expr.h"
|
|
#include "is-a.h"
|
|
#include "gimple.h"
|
|
#include "gimple-iterator.h"
|
|
#include "gimple-ssa.h"
|
|
#include "stringpool.h"
|
|
#include "tree-ssanames.h"
|
|
#include "tree-pass.h"
|
|
#include "tree-ssa-propagate.h"
|
|
#include "tree-phinodes.h"
|
|
#include "ssa-iterators.h"
|
|
|
|
struct object_size_info
|
|
{
|
|
int object_size_type;
|
|
bitmap visited, reexamine;
|
|
int pass;
|
|
bool changed;
|
|
unsigned int *depths;
|
|
unsigned int *stack, *tos;
|
|
};
|
|
|
|
static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
|
|
|
|
static tree compute_object_offset (const_tree, const_tree);
|
|
static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
|
|
const_tree, int);
|
|
static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
|
|
static tree pass_through_call (const_gimple);
|
|
static void collect_object_sizes_for (struct object_size_info *, tree);
|
|
static void expr_object_size (struct object_size_info *, tree, tree);
|
|
static bool merge_object_sizes (struct object_size_info *, tree, tree,
|
|
unsigned HOST_WIDE_INT);
|
|
static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
|
|
static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
|
|
static unsigned int compute_object_sizes (void);
|
|
static void init_offset_limit (void);
|
|
static void check_for_plus_in_loops (struct object_size_info *, tree);
|
|
static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
|
|
unsigned int);
|
|
|
|
/* object_sizes[0] is upper bound for number of bytes till the end of
|
|
the object.
|
|
object_sizes[1] is upper bound for number of bytes till the end of
|
|
the subobject (innermost array or field with address taken).
|
|
object_sizes[2] is lower bound for number of bytes till the end of
|
|
the object and object_sizes[3] lower bound for subobject. */
|
|
static vec<unsigned HOST_WIDE_INT> object_sizes[4];
|
|
|
|
/* Bitmaps what object sizes have been computed already. */
|
|
static bitmap computed[4];
|
|
|
|
/* Maximum value of offset we consider to be addition. */
|
|
static unsigned HOST_WIDE_INT offset_limit;
|
|
|
|
|
|
/* Initialize OFFSET_LIMIT variable. */
|
|
static void
|
|
init_offset_limit (void)
|
|
{
|
|
if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
|
|
offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
|
|
else
|
|
offset_limit = -1;
|
|
offset_limit /= 2;
|
|
}
|
|
|
|
|
|
/* Compute offset of EXPR within VAR. Return error_mark_node
|
|
if unknown. */
|
|
|
|
static tree
|
|
compute_object_offset (const_tree expr, const_tree var)
|
|
{
|
|
enum tree_code code = PLUS_EXPR;
|
|
tree base, off, t;
|
|
|
|
if (expr == var)
|
|
return size_zero_node;
|
|
|
|
switch (TREE_CODE (expr))
|
|
{
|
|
case COMPONENT_REF:
|
|
base = compute_object_offset (TREE_OPERAND (expr, 0), var);
|
|
if (base == error_mark_node)
|
|
return base;
|
|
|
|
t = TREE_OPERAND (expr, 1);
|
|
off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
|
|
size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
|
|
/ BITS_PER_UNIT));
|
|
break;
|
|
|
|
case REALPART_EXPR:
|
|
CASE_CONVERT:
|
|
case VIEW_CONVERT_EXPR:
|
|
case NON_LVALUE_EXPR:
|
|
return compute_object_offset (TREE_OPERAND (expr, 0), var);
|
|
|
|
case IMAGPART_EXPR:
|
|
base = compute_object_offset (TREE_OPERAND (expr, 0), var);
|
|
if (base == error_mark_node)
|
|
return base;
|
|
|
|
off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
|
|
break;
|
|
|
|
case ARRAY_REF:
|
|
base = compute_object_offset (TREE_OPERAND (expr, 0), var);
|
|
if (base == error_mark_node)
|
|
return base;
|
|
|
|
t = TREE_OPERAND (expr, 1);
|
|
if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
|
|
{
|
|
code = MINUS_EXPR;
|
|
t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
|
|
}
|
|
t = fold_convert (sizetype, t);
|
|
off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
|
|
break;
|
|
|
|
case MEM_REF:
|
|
gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
|
|
return double_int_to_tree (sizetype, mem_ref_offset (expr));
|
|
|
|
default:
|
|
return error_mark_node;
|
|
}
|
|
|
|
return size_binop (code, base, off);
|
|
}
|
|
|
|
|
|
/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
|
|
OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
|
|
If unknown, return unknown[object_size_type]. */
|
|
|
|
static unsigned HOST_WIDE_INT
|
|
addr_object_size (struct object_size_info *osi, const_tree ptr,
|
|
int object_size_type)
|
|
{
|
|
tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
|
|
|
|
gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
|
|
|
|
pt_var = TREE_OPERAND (ptr, 0);
|
|
while (handled_component_p (pt_var))
|
|
pt_var = TREE_OPERAND (pt_var, 0);
|
|
|
|
if (pt_var
|
|
&& TREE_CODE (pt_var) == MEM_REF)
|
|
{
|
|
unsigned HOST_WIDE_INT sz;
|
|
|
|
if (!osi || (object_size_type & 1) != 0
|
|
|| TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
|
|
{
|
|
sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
|
|
object_size_type & ~1);
|
|
}
|
|
else
|
|
{
|
|
tree var = TREE_OPERAND (pt_var, 0);
|
|
if (osi->pass == 0)
|
|
collect_object_sizes_for (osi, var);
|
|
if (bitmap_bit_p (computed[object_size_type],
|
|
SSA_NAME_VERSION (var)))
|
|
sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
|
|
else
|
|
sz = unknown[object_size_type];
|
|
}
|
|
if (sz != unknown[object_size_type])
|
|
{
|
|
double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
|
|
if (dsz.is_negative ())
|
|
sz = 0;
|
|
else if (dsz.fits_uhwi ())
|
|
sz = dsz.to_uhwi ();
|
|
else
|
|
sz = unknown[object_size_type];
|
|
}
|
|
|
|
if (sz != unknown[object_size_type] && sz < offset_limit)
|
|
pt_var_size = size_int (sz);
|
|
}
|
|
else if (pt_var
|
|
&& DECL_P (pt_var)
|
|
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
|
|
&& tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
|
|
pt_var_size = DECL_SIZE_UNIT (pt_var);
|
|
else if (pt_var
|
|
&& TREE_CODE (pt_var) == STRING_CST
|
|
&& TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
|
|
&& tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
|
|
&& tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
|
|
< offset_limit)
|
|
pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
|
|
else
|
|
return unknown[object_size_type];
|
|
|
|
if (pt_var != TREE_OPERAND (ptr, 0))
|
|
{
|
|
tree var;
|
|
|
|
if (object_size_type & 1)
|
|
{
|
|
var = TREE_OPERAND (ptr, 0);
|
|
|
|
while (var != pt_var
|
|
&& TREE_CODE (var) != BIT_FIELD_REF
|
|
&& TREE_CODE (var) != COMPONENT_REF
|
|
&& TREE_CODE (var) != ARRAY_REF
|
|
&& TREE_CODE (var) != ARRAY_RANGE_REF
|
|
&& TREE_CODE (var) != REALPART_EXPR
|
|
&& TREE_CODE (var) != IMAGPART_EXPR)
|
|
var = TREE_OPERAND (var, 0);
|
|
if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
|
|
var = TREE_OPERAND (var, 0);
|
|
if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
|
|
|| ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
|
|
|| (pt_var_size
|
|
&& tree_int_cst_lt (pt_var_size,
|
|
TYPE_SIZE_UNIT (TREE_TYPE (var)))))
|
|
var = pt_var;
|
|
else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
|
|
{
|
|
tree v = var;
|
|
/* For &X->fld, compute object size only if fld isn't the last
|
|
field, as struct { int i; char c[1]; } is often used instead
|
|
of flexible array member. */
|
|
while (v && v != pt_var)
|
|
switch (TREE_CODE (v))
|
|
{
|
|
case ARRAY_REF:
|
|
if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
|
|
&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
|
|
{
|
|
tree domain
|
|
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
|
|
if (domain
|
|
&& TYPE_MAX_VALUE (domain)
|
|
&& TREE_CODE (TYPE_MAX_VALUE (domain))
|
|
== INTEGER_CST
|
|
&& tree_int_cst_lt (TREE_OPERAND (v, 1),
|
|
TYPE_MAX_VALUE (domain)))
|
|
{
|
|
v = NULL_TREE;
|
|
break;
|
|
}
|
|
}
|
|
v = TREE_OPERAND (v, 0);
|
|
break;
|
|
case REALPART_EXPR:
|
|
case IMAGPART_EXPR:
|
|
v = NULL_TREE;
|
|
break;
|
|
case COMPONENT_REF:
|
|
if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
|
|
{
|
|
v = NULL_TREE;
|
|
break;
|
|
}
|
|
while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
|
|
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
|
|
!= UNION_TYPE
|
|
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
|
|
!= QUAL_UNION_TYPE)
|
|
break;
|
|
else
|
|
v = TREE_OPERAND (v, 0);
|
|
if (TREE_CODE (v) == COMPONENT_REF
|
|
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
|
|
== RECORD_TYPE)
|
|
{
|
|
tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
|
|
for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
|
|
if (TREE_CODE (fld_chain) == FIELD_DECL)
|
|
break;
|
|
|
|
if (fld_chain)
|
|
{
|
|
v = NULL_TREE;
|
|
break;
|
|
}
|
|
v = TREE_OPERAND (v, 0);
|
|
}
|
|
while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
|
|
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
|
|
!= UNION_TYPE
|
|
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
|
|
!= QUAL_UNION_TYPE)
|
|
break;
|
|
else
|
|
v = TREE_OPERAND (v, 0);
|
|
if (v != pt_var)
|
|
v = NULL_TREE;
|
|
else
|
|
v = pt_var;
|
|
break;
|
|
default:
|
|
v = pt_var;
|
|
break;
|
|
}
|
|
if (v == pt_var)
|
|
var = pt_var;
|
|
}
|
|
}
|
|
else
|
|
var = pt_var;
|
|
|
|
if (var != pt_var)
|
|
var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
|
|
else if (!pt_var_size)
|
|
return unknown[object_size_type];
|
|
else
|
|
var_size = pt_var_size;
|
|
bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
|
|
if (bytes != error_mark_node)
|
|
{
|
|
if (TREE_CODE (bytes) == INTEGER_CST
|
|
&& tree_int_cst_lt (var_size, bytes))
|
|
bytes = size_zero_node;
|
|
else
|
|
bytes = size_binop (MINUS_EXPR, var_size, bytes);
|
|
}
|
|
if (var != pt_var
|
|
&& pt_var_size
|
|
&& TREE_CODE (pt_var) == MEM_REF
|
|
&& bytes != error_mark_node)
|
|
{
|
|
tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
|
|
if (bytes2 != error_mark_node)
|
|
{
|
|
if (TREE_CODE (bytes2) == INTEGER_CST
|
|
&& tree_int_cst_lt (pt_var_size, bytes2))
|
|
bytes2 = size_zero_node;
|
|
else
|
|
bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
|
|
bytes = size_binop (MIN_EXPR, bytes, bytes2);
|
|
}
|
|
}
|
|
}
|
|
else if (!pt_var_size)
|
|
return unknown[object_size_type];
|
|
else
|
|
bytes = pt_var_size;
|
|
|
|
if (tree_fits_uhwi_p (bytes))
|
|
return tree_to_uhwi (bytes);
|
|
|
|
return unknown[object_size_type];
|
|
}
|
|
|
|
|
|
/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
|
|
Handles various allocation calls. OBJECT_SIZE_TYPE is the second
|
|
argument from __builtin_object_size. If unknown, return
|
|
unknown[object_size_type]. */
|
|
|
|
static unsigned HOST_WIDE_INT
|
|
alloc_object_size (const_gimple call, int object_size_type)
|
|
{
|
|
tree callee, bytes = NULL_TREE;
|
|
tree alloc_size;
|
|
int arg1 = -1, arg2 = -1;
|
|
|
|
gcc_assert (is_gimple_call (call));
|
|
|
|
callee = gimple_call_fndecl (call);
|
|
if (!callee)
|
|
return unknown[object_size_type];
|
|
|
|
alloc_size = lookup_attribute ("alloc_size",
|
|
TYPE_ATTRIBUTES (TREE_TYPE (callee)));
|
|
if (alloc_size && TREE_VALUE (alloc_size))
|
|
{
|
|
tree p = TREE_VALUE (alloc_size);
|
|
|
|
arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
|
|
if (TREE_CHAIN (p))
|
|
arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
|
|
}
|
|
|
|
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
|
|
switch (DECL_FUNCTION_CODE (callee))
|
|
{
|
|
case BUILT_IN_CALLOC:
|
|
arg2 = 1;
|
|
/* fall through */
|
|
case BUILT_IN_MALLOC:
|
|
case BUILT_IN_ALLOCA:
|
|
case BUILT_IN_ALLOCA_WITH_ALIGN:
|
|
arg1 = 0;
|
|
default:
|
|
break;
|
|
}
|
|
|
|
if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
|
|
|| TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
|
|
|| (arg2 >= 0
|
|
&& (arg2 >= (int)gimple_call_num_args (call)
|
|
|| TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
|
|
return unknown[object_size_type];
|
|
|
|
if (arg2 >= 0)
|
|
bytes = size_binop (MULT_EXPR,
|
|
fold_convert (sizetype, gimple_call_arg (call, arg1)),
|
|
fold_convert (sizetype, gimple_call_arg (call, arg2)));
|
|
else if (arg1 >= 0)
|
|
bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
|
|
|
|
if (bytes && tree_fits_uhwi_p (bytes))
|
|
return tree_to_uhwi (bytes);
|
|
|
|
return unknown[object_size_type];
|
|
}
|
|
|
|
|
|
/* If object size is propagated from one of function's arguments directly
|
|
to its return value, return that argument for GIMPLE_CALL statement CALL.
|
|
Otherwise return NULL. */
|
|
|
|
static tree
|
|
pass_through_call (const_gimple call)
|
|
{
|
|
tree callee = gimple_call_fndecl (call);
|
|
|
|
if (callee
|
|
&& DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
|
|
switch (DECL_FUNCTION_CODE (callee))
|
|
{
|
|
case BUILT_IN_MEMCPY:
|
|
case BUILT_IN_MEMMOVE:
|
|
case BUILT_IN_MEMSET:
|
|
case BUILT_IN_STRCPY:
|
|
case BUILT_IN_STRNCPY:
|
|
case BUILT_IN_STRCAT:
|
|
case BUILT_IN_STRNCAT:
|
|
case BUILT_IN_MEMCPY_CHK:
|
|
case BUILT_IN_MEMMOVE_CHK:
|
|
case BUILT_IN_MEMSET_CHK:
|
|
case BUILT_IN_STRCPY_CHK:
|
|
case BUILT_IN_STRNCPY_CHK:
|
|
case BUILT_IN_STPNCPY_CHK:
|
|
case BUILT_IN_STRCAT_CHK:
|
|
case BUILT_IN_STRNCAT_CHK:
|
|
case BUILT_IN_ASSUME_ALIGNED:
|
|
if (gimple_call_num_args (call) >= 1)
|
|
return gimple_call_arg (call, 0);
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
|
|
return NULL_TREE;
|
|
}
|
|
|
|
|
|
/* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
|
|
second argument from __builtin_object_size. */
|
|
|
|
unsigned HOST_WIDE_INT
|
|
compute_builtin_object_size (tree ptr, int object_size_type)
|
|
{
|
|
gcc_assert (object_size_type >= 0 && object_size_type <= 3);
|
|
|
|
if (! offset_limit)
|
|
init_offset_limit ();
|
|
|
|
if (TREE_CODE (ptr) == ADDR_EXPR)
|
|
return addr_object_size (NULL, ptr, object_size_type);
|
|
|
|
if (TREE_CODE (ptr) == SSA_NAME
|
|
&& POINTER_TYPE_P (TREE_TYPE (ptr))
|
|
&& computed[object_size_type] != NULL)
|
|
{
|
|
if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
|
|
{
|
|
struct object_size_info osi;
|
|
bitmap_iterator bi;
|
|
unsigned int i;
|
|
|
|
if (num_ssa_names > object_sizes[object_size_type].length ())
|
|
object_sizes[object_size_type].safe_grow (num_ssa_names);
|
|
if (dump_file)
|
|
{
|
|
fprintf (dump_file, "Computing %s %sobject size for ",
|
|
(object_size_type & 2) ? "minimum" : "maximum",
|
|
(object_size_type & 1) ? "sub" : "");
|
|
print_generic_expr (dump_file, ptr, dump_flags);
|
|
fprintf (dump_file, ":\n");
|
|
}
|
|
|
|
osi.visited = BITMAP_ALLOC (NULL);
|
|
osi.reexamine = BITMAP_ALLOC (NULL);
|
|
osi.object_size_type = object_size_type;
|
|
osi.depths = NULL;
|
|
osi.stack = NULL;
|
|
osi.tos = NULL;
|
|
|
|
/* First pass: walk UD chains, compute object sizes that
|
|
can be computed. osi.reexamine bitmap at the end will
|
|
contain what variables were found in dependency cycles
|
|
and therefore need to be reexamined. */
|
|
osi.pass = 0;
|
|
osi.changed = false;
|
|
collect_object_sizes_for (&osi, ptr);
|
|
|
|
/* Second pass: keep recomputing object sizes of variables
|
|
that need reexamination, until no object sizes are
|
|
increased or all object sizes are computed. */
|
|
if (! bitmap_empty_p (osi.reexamine))
|
|
{
|
|
bitmap reexamine = BITMAP_ALLOC (NULL);
|
|
|
|
/* If looking for minimum instead of maximum object size,
|
|
detect cases where a pointer is increased in a loop.
|
|
Although even without this detection pass 2 would eventually
|
|
terminate, it could take a long time. If a pointer is
|
|
increasing this way, we need to assume 0 object size.
|
|
E.g. p = &buf[0]; while (cond) p = p + 4; */
|
|
if (object_size_type & 2)
|
|
{
|
|
osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
|
|
osi.stack = XNEWVEC (unsigned int, num_ssa_names);
|
|
osi.tos = osi.stack;
|
|
osi.pass = 1;
|
|
/* collect_object_sizes_for is changing
|
|
osi.reexamine bitmap, so iterate over a copy. */
|
|
bitmap_copy (reexamine, osi.reexamine);
|
|
EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
|
|
if (bitmap_bit_p (osi.reexamine, i))
|
|
check_for_plus_in_loops (&osi, ssa_name (i));
|
|
|
|
free (osi.depths);
|
|
osi.depths = NULL;
|
|
free (osi.stack);
|
|
osi.stack = NULL;
|
|
osi.tos = NULL;
|
|
}
|
|
|
|
do
|
|
{
|
|
osi.pass = 2;
|
|
osi.changed = false;
|
|
/* collect_object_sizes_for is changing
|
|
osi.reexamine bitmap, so iterate over a copy. */
|
|
bitmap_copy (reexamine, osi.reexamine);
|
|
EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
|
|
if (bitmap_bit_p (osi.reexamine, i))
|
|
{
|
|
collect_object_sizes_for (&osi, ssa_name (i));
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
|
{
|
|
fprintf (dump_file, "Reexamining ");
|
|
print_generic_expr (dump_file, ssa_name (i),
|
|
dump_flags);
|
|
fprintf (dump_file, "\n");
|
|
}
|
|
}
|
|
}
|
|
while (osi.changed);
|
|
|
|
BITMAP_FREE (reexamine);
|
|
}
|
|
EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
|
|
bitmap_set_bit (computed[object_size_type], i);
|
|
|
|
/* Debugging dumps. */
|
|
if (dump_file)
|
|
{
|
|
EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
|
|
if (object_sizes[object_size_type][i]
|
|
!= unknown[object_size_type])
|
|
{
|
|
print_generic_expr (dump_file, ssa_name (i),
|
|
dump_flags);
|
|
fprintf (dump_file,
|
|
": %s %sobject size "
|
|
HOST_WIDE_INT_PRINT_UNSIGNED "\n",
|
|
(object_size_type & 2) ? "minimum" : "maximum",
|
|
(object_size_type & 1) ? "sub" : "",
|
|
object_sizes[object_size_type][i]);
|
|
}
|
|
}
|
|
|
|
BITMAP_FREE (osi.reexamine);
|
|
BITMAP_FREE (osi.visited);
|
|
}
|
|
|
|
return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
|
|
}
|
|
|
|
return unknown[object_size_type];
|
|
}
|
|
|
|
/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
|
|
|
|
static void
|
|
expr_object_size (struct object_size_info *osi, tree ptr, tree value)
|
|
{
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (ptr);
|
|
unsigned HOST_WIDE_INT bytes;
|
|
|
|
gcc_assert (object_sizes[object_size_type][varno]
|
|
!= unknown[object_size_type]);
|
|
gcc_assert (osi->pass == 0);
|
|
|
|
if (TREE_CODE (value) == WITH_SIZE_EXPR)
|
|
value = TREE_OPERAND (value, 0);
|
|
|
|
/* Pointer variables should have been handled by merge_object_sizes. */
|
|
gcc_assert (TREE_CODE (value) != SSA_NAME
|
|
|| !POINTER_TYPE_P (TREE_TYPE (value)));
|
|
|
|
if (TREE_CODE (value) == ADDR_EXPR)
|
|
bytes = addr_object_size (osi, value, object_size_type);
|
|
else
|
|
bytes = unknown[object_size_type];
|
|
|
|
if ((object_size_type & 2) == 0)
|
|
{
|
|
if (object_sizes[object_size_type][varno] < bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
else
|
|
{
|
|
if (object_sizes[object_size_type][varno] > bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
}
|
|
|
|
|
|
/* Compute object_sizes for PTR, defined to the result of a call. */
|
|
|
|
static void
|
|
call_object_size (struct object_size_info *osi, tree ptr, gimple call)
|
|
{
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (ptr);
|
|
unsigned HOST_WIDE_INT bytes;
|
|
|
|
gcc_assert (is_gimple_call (call));
|
|
|
|
gcc_assert (object_sizes[object_size_type][varno]
|
|
!= unknown[object_size_type]);
|
|
gcc_assert (osi->pass == 0);
|
|
|
|
bytes = alloc_object_size (call, object_size_type);
|
|
|
|
if ((object_size_type & 2) == 0)
|
|
{
|
|
if (object_sizes[object_size_type][varno] < bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
else
|
|
{
|
|
if (object_sizes[object_size_type][varno] > bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
}
|
|
|
|
|
|
/* Compute object_sizes for PTR, defined to an unknown value. */
|
|
|
|
static void
|
|
unknown_object_size (struct object_size_info *osi, tree ptr)
|
|
{
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (ptr);
|
|
unsigned HOST_WIDE_INT bytes;
|
|
|
|
gcc_assert (object_sizes[object_size_type][varno]
|
|
!= unknown[object_size_type]);
|
|
gcc_assert (osi->pass == 0);
|
|
|
|
bytes = unknown[object_size_type];
|
|
|
|
if ((object_size_type & 2) == 0)
|
|
{
|
|
if (object_sizes[object_size_type][varno] < bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
else
|
|
{
|
|
if (object_sizes[object_size_type][varno] > bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
}
|
|
|
|
|
|
/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
|
|
the object size might need reexamination later. */
|
|
|
|
static bool
|
|
merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
|
|
unsigned HOST_WIDE_INT offset)
|
|
{
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (dest);
|
|
unsigned HOST_WIDE_INT orig_bytes;
|
|
|
|
if (object_sizes[object_size_type][varno] == unknown[object_size_type])
|
|
return false;
|
|
if (offset >= offset_limit)
|
|
{
|
|
object_sizes[object_size_type][varno] = unknown[object_size_type];
|
|
return false;
|
|
}
|
|
|
|
if (osi->pass == 0)
|
|
collect_object_sizes_for (osi, orig);
|
|
|
|
orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
|
|
if (orig_bytes != unknown[object_size_type])
|
|
orig_bytes = (offset > orig_bytes)
|
|
? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
|
|
|
|
if ((object_size_type & 2) == 0)
|
|
{
|
|
if (object_sizes[object_size_type][varno] < orig_bytes)
|
|
{
|
|
object_sizes[object_size_type][varno] = orig_bytes;
|
|
osi->changed = true;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
if (object_sizes[object_size_type][varno] > orig_bytes)
|
|
{
|
|
object_sizes[object_size_type][varno] = orig_bytes;
|
|
osi->changed = true;
|
|
}
|
|
}
|
|
return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
|
|
}
|
|
|
|
|
|
/* Compute object_sizes for VAR, defined to the result of an assignment
|
|
with operator POINTER_PLUS_EXPR. Return true if the object size might
|
|
need reexamination later. */
|
|
|
|
static bool
|
|
plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
|
|
{
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (var);
|
|
unsigned HOST_WIDE_INT bytes;
|
|
tree op0, op1;
|
|
|
|
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
|
|
{
|
|
op0 = gimple_assign_rhs1 (stmt);
|
|
op1 = gimple_assign_rhs2 (stmt);
|
|
}
|
|
else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
|
|
{
|
|
tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
|
|
gcc_assert (TREE_CODE (rhs) == MEM_REF);
|
|
op0 = TREE_OPERAND (rhs, 0);
|
|
op1 = TREE_OPERAND (rhs, 1);
|
|
}
|
|
else
|
|
gcc_unreachable ();
|
|
|
|
if (object_sizes[object_size_type][varno] == unknown[object_size_type])
|
|
return false;
|
|
|
|
/* Handle PTR + OFFSET here. */
|
|
if (TREE_CODE (op1) == INTEGER_CST
|
|
&& (TREE_CODE (op0) == SSA_NAME
|
|
|| TREE_CODE (op0) == ADDR_EXPR))
|
|
{
|
|
if (! tree_fits_uhwi_p (op1))
|
|
bytes = unknown[object_size_type];
|
|
else if (TREE_CODE (op0) == SSA_NAME)
|
|
return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
|
|
else
|
|
{
|
|
unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
|
|
|
|
/* op0 will be ADDR_EXPR here. */
|
|
bytes = addr_object_size (osi, op0, object_size_type);
|
|
if (bytes == unknown[object_size_type])
|
|
;
|
|
else if (off > offset_limit)
|
|
bytes = unknown[object_size_type];
|
|
else if (off > bytes)
|
|
bytes = 0;
|
|
else
|
|
bytes -= off;
|
|
}
|
|
}
|
|
else
|
|
bytes = unknown[object_size_type];
|
|
|
|
if ((object_size_type & 2) == 0)
|
|
{
|
|
if (object_sizes[object_size_type][varno] < bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
else
|
|
{
|
|
if (object_sizes[object_size_type][varno] > bytes)
|
|
object_sizes[object_size_type][varno] = bytes;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
|
|
/* Compute object_sizes for VAR, defined at STMT, which is
|
|
a COND_EXPR. Return true if the object size might need reexamination
|
|
later. */
|
|
|
|
static bool
|
|
cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
|
|
{
|
|
tree then_, else_;
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (var);
|
|
bool reexamine = false;
|
|
|
|
gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
|
|
|
|
if (object_sizes[object_size_type][varno] == unknown[object_size_type])
|
|
return false;
|
|
|
|
then_ = gimple_assign_rhs2 (stmt);
|
|
else_ = gimple_assign_rhs3 (stmt);
|
|
|
|
if (TREE_CODE (then_) == SSA_NAME)
|
|
reexamine |= merge_object_sizes (osi, var, then_, 0);
|
|
else
|
|
expr_object_size (osi, var, then_);
|
|
|
|
if (TREE_CODE (else_) == SSA_NAME)
|
|
reexamine |= merge_object_sizes (osi, var, else_, 0);
|
|
else
|
|
expr_object_size (osi, var, else_);
|
|
|
|
return reexamine;
|
|
}
|
|
|
|
/* Compute object sizes for VAR.
|
|
For ADDR_EXPR an object size is the number of remaining bytes
|
|
to the end of the object (where what is considered an object depends on
|
|
OSI->object_size_type).
|
|
For allocation GIMPLE_CALL like malloc or calloc object size is the size
|
|
of the allocation.
|
|
For POINTER_PLUS_EXPR where second operand is a constant integer,
|
|
object size is object size of the first operand minus the constant.
|
|
If the constant is bigger than the number of remaining bytes until the
|
|
end of the object, object size is 0, but if it is instead a pointer
|
|
subtraction, object size is unknown[object_size_type].
|
|
To differentiate addition from subtraction, ADDR_EXPR returns
|
|
unknown[object_size_type] for all objects bigger than half of the address
|
|
space, and constants less than half of the address space are considered
|
|
addition, while bigger constants subtraction.
|
|
For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
|
|
object size is object size of that argument.
|
|
Otherwise, object size is the maximum of object sizes of variables
|
|
that it might be set to. */
|
|
|
|
static void
|
|
collect_object_sizes_for (struct object_size_info *osi, tree var)
|
|
{
|
|
int object_size_type = osi->object_size_type;
|
|
unsigned int varno = SSA_NAME_VERSION (var);
|
|
gimple stmt;
|
|
bool reexamine;
|
|
|
|
if (bitmap_bit_p (computed[object_size_type], varno))
|
|
return;
|
|
|
|
if (osi->pass == 0)
|
|
{
|
|
if (bitmap_set_bit (osi->visited, varno))
|
|
{
|
|
object_sizes[object_size_type][varno]
|
|
= (object_size_type & 2) ? -1 : 0;
|
|
}
|
|
else
|
|
{
|
|
/* Found a dependency loop. Mark the variable for later
|
|
re-examination. */
|
|
bitmap_set_bit (osi->reexamine, varno);
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
|
{
|
|
fprintf (dump_file, "Found a dependency loop at ");
|
|
print_generic_expr (dump_file, var, dump_flags);
|
|
fprintf (dump_file, "\n");
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
|
{
|
|
fprintf (dump_file, "Visiting use-def links for ");
|
|
print_generic_expr (dump_file, var, dump_flags);
|
|
fprintf (dump_file, "\n");
|
|
}
|
|
|
|
stmt = SSA_NAME_DEF_STMT (var);
|
|
reexamine = false;
|
|
|
|
switch (gimple_code (stmt))
|
|
{
|
|
case GIMPLE_ASSIGN:
|
|
{
|
|
tree rhs = gimple_assign_rhs1 (stmt);
|
|
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
|
|
|| (gimple_assign_rhs_code (stmt) == ADDR_EXPR
|
|
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
|
|
reexamine = plus_stmt_object_size (osi, var, stmt);
|
|
else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
|
|
reexamine = cond_expr_object_size (osi, var, stmt);
|
|
else if (gimple_assign_single_p (stmt)
|
|
|| gimple_assign_unary_nop_p (stmt))
|
|
{
|
|
if (TREE_CODE (rhs) == SSA_NAME
|
|
&& POINTER_TYPE_P (TREE_TYPE (rhs)))
|
|
reexamine = merge_object_sizes (osi, var, rhs, 0);
|
|
else
|
|
expr_object_size (osi, var, rhs);
|
|
}
|
|
else
|
|
unknown_object_size (osi, var);
|
|
break;
|
|
}
|
|
|
|
case GIMPLE_CALL:
|
|
{
|
|
tree arg = pass_through_call (stmt);
|
|
if (arg)
|
|
{
|
|
if (TREE_CODE (arg) == SSA_NAME
|
|
&& POINTER_TYPE_P (TREE_TYPE (arg)))
|
|
reexamine = merge_object_sizes (osi, var, arg, 0);
|
|
else
|
|
expr_object_size (osi, var, arg);
|
|
}
|
|
else
|
|
call_object_size (osi, var, stmt);
|
|
break;
|
|
}
|
|
|
|
case GIMPLE_ASM:
|
|
/* Pointers defined by __asm__ statements can point anywhere. */
|
|
object_sizes[object_size_type][varno] = unknown[object_size_type];
|
|
break;
|
|
|
|
case GIMPLE_NOP:
|
|
if (SSA_NAME_VAR (var)
|
|
&& TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
|
|
expr_object_size (osi, var, SSA_NAME_VAR (var));
|
|
else
|
|
/* Uninitialized SSA names point nowhere. */
|
|
object_sizes[object_size_type][varno] = unknown[object_size_type];
|
|
break;
|
|
|
|
case GIMPLE_PHI:
|
|
{
|
|
unsigned i;
|
|
|
|
for (i = 0; i < gimple_phi_num_args (stmt); i++)
|
|
{
|
|
tree rhs = gimple_phi_arg (stmt, i)->def;
|
|
|
|
if (object_sizes[object_size_type][varno]
|
|
== unknown[object_size_type])
|
|
break;
|
|
|
|
if (TREE_CODE (rhs) == SSA_NAME)
|
|
reexamine |= merge_object_sizes (osi, var, rhs, 0);
|
|
else if (osi->pass == 0)
|
|
expr_object_size (osi, var, rhs);
|
|
}
|
|
break;
|
|
}
|
|
|
|
default:
|
|
gcc_unreachable ();
|
|
}
|
|
|
|
if (! reexamine
|
|
|| object_sizes[object_size_type][varno] == unknown[object_size_type])
|
|
{
|
|
bitmap_set_bit (computed[object_size_type], varno);
|
|
bitmap_clear_bit (osi->reexamine, varno);
|
|
}
|
|
else
|
|
{
|
|
bitmap_set_bit (osi->reexamine, varno);
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
|
{
|
|
fprintf (dump_file, "Need to reexamine ");
|
|
print_generic_expr (dump_file, var, dump_flags);
|
|
fprintf (dump_file, "\n");
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/* Helper function for check_for_plus_in_loops. Called recursively
|
|
to detect loops. */
|
|
|
|
static void
|
|
check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
|
|
unsigned int depth)
|
|
{
|
|
gimple stmt = SSA_NAME_DEF_STMT (var);
|
|
unsigned int varno = SSA_NAME_VERSION (var);
|
|
|
|
if (osi->depths[varno])
|
|
{
|
|
if (osi->depths[varno] != depth)
|
|
{
|
|
unsigned int *sp;
|
|
|
|
/* Found a loop involving pointer addition. */
|
|
for (sp = osi->tos; sp > osi->stack; )
|
|
{
|
|
--sp;
|
|
bitmap_clear_bit (osi->reexamine, *sp);
|
|
bitmap_set_bit (computed[osi->object_size_type], *sp);
|
|
object_sizes[osi->object_size_type][*sp] = 0;
|
|
if (*sp == varno)
|
|
break;
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
else if (! bitmap_bit_p (osi->reexamine, varno))
|
|
return;
|
|
|
|
osi->depths[varno] = depth;
|
|
*osi->tos++ = varno;
|
|
|
|
switch (gimple_code (stmt))
|
|
{
|
|
|
|
case GIMPLE_ASSIGN:
|
|
{
|
|
if ((gimple_assign_single_p (stmt)
|
|
|| gimple_assign_unary_nop_p (stmt))
|
|
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
|
|
{
|
|
tree rhs = gimple_assign_rhs1 (stmt);
|
|
|
|
check_for_plus_in_loops_1 (osi, rhs, depth);
|
|
}
|
|
else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
|
|
{
|
|
tree basevar = gimple_assign_rhs1 (stmt);
|
|
tree cst = gimple_assign_rhs2 (stmt);
|
|
|
|
gcc_assert (TREE_CODE (cst) == INTEGER_CST);
|
|
|
|
check_for_plus_in_loops_1 (osi, basevar,
|
|
depth + !integer_zerop (cst));
|
|
}
|
|
else
|
|
gcc_unreachable ();
|
|
break;
|
|
}
|
|
|
|
case GIMPLE_CALL:
|
|
{
|
|
tree arg = pass_through_call (stmt);
|
|
if (arg)
|
|
{
|
|
if (TREE_CODE (arg) == SSA_NAME)
|
|
check_for_plus_in_loops_1 (osi, arg, depth);
|
|
else
|
|
gcc_unreachable ();
|
|
}
|
|
break;
|
|
}
|
|
|
|
case GIMPLE_PHI:
|
|
{
|
|
unsigned i;
|
|
|
|
for (i = 0; i < gimple_phi_num_args (stmt); i++)
|
|
{
|
|
tree rhs = gimple_phi_arg (stmt, i)->def;
|
|
|
|
if (TREE_CODE (rhs) == SSA_NAME)
|
|
check_for_plus_in_loops_1 (osi, rhs, depth);
|
|
}
|
|
break;
|
|
}
|
|
|
|
default:
|
|
gcc_unreachable ();
|
|
}
|
|
|
|
osi->depths[varno] = 0;
|
|
osi->tos--;
|
|
}
|
|
|
|
|
|
/* Check if some pointer we are computing object size of is being increased
|
|
within a loop. If yes, assume all the SSA variables participating in
|
|
that loop have minimum object sizes 0. */
|
|
|
|
static void
|
|
check_for_plus_in_loops (struct object_size_info *osi, tree var)
|
|
{
|
|
gimple stmt = SSA_NAME_DEF_STMT (var);
|
|
|
|
/* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
|
|
and looked for a POINTER_PLUS_EXPR in the pass-through
|
|
argument, if any. In GIMPLE, however, such an expression
|
|
is not a valid call operand. */
|
|
|
|
if (is_gimple_assign (stmt)
|
|
&& gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
|
|
{
|
|
tree basevar = gimple_assign_rhs1 (stmt);
|
|
tree cst = gimple_assign_rhs2 (stmt);
|
|
|
|
gcc_assert (TREE_CODE (cst) == INTEGER_CST);
|
|
|
|
if (integer_zerop (cst))
|
|
return;
|
|
|
|
osi->depths[SSA_NAME_VERSION (basevar)] = 1;
|
|
*osi->tos++ = SSA_NAME_VERSION (basevar);
|
|
check_for_plus_in_loops_1 (osi, var, 2);
|
|
osi->depths[SSA_NAME_VERSION (basevar)] = 0;
|
|
osi->tos--;
|
|
}
|
|
}
|
|
|
|
|
|
/* Initialize data structures for the object size computation. */
|
|
|
|
void
|
|
init_object_sizes (void)
|
|
{
|
|
int object_size_type;
|
|
|
|
if (computed[0])
|
|
return;
|
|
|
|
for (object_size_type = 0; object_size_type <= 3; object_size_type++)
|
|
{
|
|
object_sizes[object_size_type].safe_grow (num_ssa_names);
|
|
computed[object_size_type] = BITMAP_ALLOC (NULL);
|
|
}
|
|
|
|
init_offset_limit ();
|
|
}
|
|
|
|
|
|
/* Destroy data structures after the object size computation. */
|
|
|
|
static void
|
|
fini_object_sizes (void)
|
|
{
|
|
int object_size_type;
|
|
|
|
for (object_size_type = 0; object_size_type <= 3; object_size_type++)
|
|
{
|
|
object_sizes[object_size_type].release ();
|
|
BITMAP_FREE (computed[object_size_type]);
|
|
}
|
|
}
|
|
|
|
|
|
/* Simple pass to optimize all __builtin_object_size () builtins. */
|
|
|
|
static unsigned int
|
|
compute_object_sizes (void)
|
|
{
|
|
basic_block bb;
|
|
FOR_EACH_BB_FN (bb, cfun)
|
|
{
|
|
gimple_stmt_iterator i;
|
|
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
|
|
{
|
|
tree result;
|
|
gimple call = gsi_stmt (i);
|
|
if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
|
|
continue;
|
|
|
|
init_object_sizes ();
|
|
result = fold_call_stmt (call, false);
|
|
if (!result)
|
|
{
|
|
if (gimple_call_num_args (call) == 2
|
|
&& POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
|
|
{
|
|
tree ost = gimple_call_arg (call, 1);
|
|
|
|
if (tree_fits_uhwi_p (ost))
|
|
{
|
|
unsigned HOST_WIDE_INT object_size_type
|
|
= tree_to_uhwi (ost);
|
|
|
|
if (object_size_type < 2)
|
|
result = fold_convert (size_type_node,
|
|
integer_minus_one_node);
|
|
else if (object_size_type < 4)
|
|
result = build_zero_cst (size_type_node);
|
|
}
|
|
}
|
|
|
|
if (!result)
|
|
continue;
|
|
}
|
|
|
|
gcc_assert (TREE_CODE (result) == INTEGER_CST);
|
|
|
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
|
{
|
|
fprintf (dump_file, "Simplified\n ");
|
|
print_gimple_stmt (dump_file, call, 0, dump_flags);
|
|
fprintf (dump_file, " to ");
|
|
print_generic_expr (dump_file, result, 0);
|
|
fprintf (dump_file, "\n");
|
|
}
|
|
|
|
tree lhs = gimple_call_lhs (call);
|
|
if (!lhs)
|
|
continue;
|
|
|
|
/* Propagate into all uses and fold those stmts. */
|
|
gimple use_stmt;
|
|
imm_use_iterator iter;
|
|
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
|
|
{
|
|
use_operand_p use_p;
|
|
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
|
|
SET_USE (use_p, result);
|
|
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
|
|
fold_stmt (&gsi);
|
|
update_stmt (gsi_stmt (gsi));
|
|
}
|
|
}
|
|
}
|
|
|
|
fini_object_sizes ();
|
|
return 0;
|
|
}
|
|
|
|
namespace {
|
|
|
|
const pass_data pass_data_object_sizes =
|
|
{
|
|
GIMPLE_PASS, /* type */
|
|
"objsz", /* name */
|
|
OPTGROUP_NONE, /* optinfo_flags */
|
|
false, /* has_gate */
|
|
true, /* has_execute */
|
|
TV_NONE, /* tv_id */
|
|
( PROP_cfg | PROP_ssa ), /* properties_required */
|
|
0, /* properties_provided */
|
|
0, /* properties_destroyed */
|
|
0, /* todo_flags_start */
|
|
TODO_verify_ssa, /* todo_flags_finish */
|
|
};
|
|
|
|
class pass_object_sizes : public gimple_opt_pass
|
|
{
|
|
public:
|
|
pass_object_sizes (gcc::context *ctxt)
|
|
: gimple_opt_pass (pass_data_object_sizes, ctxt)
|
|
{}
|
|
|
|
/* opt_pass methods: */
|
|
opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
|
|
unsigned int execute () { return compute_object_sizes (); }
|
|
|
|
}; // class pass_object_sizes
|
|
|
|
} // anon namespace
|
|
|
|
gimple_opt_pass *
|
|
make_pass_object_sizes (gcc::context *ctxt)
|
|
{
|
|
return new pass_object_sizes (ctxt);
|
|
}
|