mirror of
https://gcc.gnu.org/git/gcc.git
synced 2024-11-23 19:03:59 +08:00
PR c++/61339 - add mismatch between struct and class [-Wmismatched-tags] to non-bugs
gcc/c/ChangeLog: PR c++/61339 * c-decl.c (xref_tag): Change class-key of PODs to struct and others to class. (field_decl_cmp): Same. * c-parser.c (c_parser_struct_or_union_specifier): Same. * c-tree.h: Same. * gimple-parser.c (c_parser_gimple_compound_statement): Same. gcc/c-family/ChangeLog: PR c++/61339 * c-opts.c (handle_deferred_opts): : Change class-key of PODs to struct and others to class. * c-pretty-print.h: Same. gcc/cp/ChangeLog: PR c++/61339 * cp-tree.h: Change class-key of PODs to struct and others to class. * search.c: Same. * semantics.c (finalize_nrv_r): Same. gcc/lto/ChangeLog: PR c++/61339 * lto-common.c (lto_splay_tree_new): : Change class-key of PODs to struct and others to class. (mentions_vars_p): Same. (register_resolution): Same. (lto_register_var_decl_in_symtab): Same. (lto_register_function_decl_in_symtab): Same. (cmp_tree): Same. (lto_read_decls): Same. gcc/ChangeLog: PR c++/61339 * auto-profile.c: Change class-key of PODs to struct and others to class. * basic-block.h: Same. * bitmap.c (bitmap_alloc): Same. * bitmap.h: Same. * builtins.c (expand_builtin_prefetch): Same. (expand_builtin_interclass_mathfn): Same. (expand_builtin_strlen): Same. (expand_builtin_mempcpy_args): Same. (expand_cmpstr): Same. (expand_builtin___clear_cache): Same. (expand_ifn_atomic_bit_test_and): Same. (expand_builtin_thread_pointer): Same. (expand_builtin_set_thread_pointer): Same. * caller-save.c (setup_save_areas): Same. (replace_reg_with_saved_mem): Same. (insert_restore): Same. (insert_save): Same. (add_used_regs): Same. * cfg.c (get_bb_copy): Same. (set_loop_copy): Same. * cfg.h: Same. * cfganal.h: Same. * cfgexpand.c (alloc_stack_frame_space): Same. (add_stack_var): Same. (add_stack_var_conflict): Same. (add_scope_conflicts_1): Same. (update_alias_info_with_stack_vars): Same. (expand_used_vars): Same. * cfghooks.c (redirect_edge_and_branch_force): Same. (delete_basic_block): Same. (split_edge): Same. (make_forwarder_block): Same. (force_nonfallthru): Same. (duplicate_block): Same. (lv_flush_pending_stmts): Same. * cfghooks.h: Same. * cfgloop.c (flow_loops_cfg_dump): Same. (flow_loop_nested_p): Same. (superloop_at_depth): Same. (get_loop_latch_edges): Same. (flow_loop_dump): Same. (flow_loops_dump): Same. (flow_loops_free): Same. (flow_loop_nodes_find): Same. (establish_preds): Same. (flow_loop_tree_node_add): Same. (flow_loop_tree_node_remove): Same. (flow_loops_find): Same. (find_subloop_latch_edge_by_profile): Same. (find_subloop_latch_edge_by_ivs): Same. (mfb_redirect_edges_in_set): Same. (form_subloop): Same. (merge_latch_edges): Same. (disambiguate_multiple_latches): Same. (disambiguate_loops_with_multiple_latches): Same. (flow_bb_inside_loop_p): Same. (glb_enum_p): Same. (get_loop_body_with_size): Same. (get_loop_body): Same. (fill_sons_in_loop): Same. (get_loop_body_in_dom_order): Same. (get_loop_body_in_custom_order): Same. (release_recorded_exits): Same. (get_loop_exit_edges): Same. (num_loop_branches): Same. (remove_bb_from_loops): Same. (find_common_loop): Same. (delete_loop): Same. (cancel_loop): Same. (verify_loop_structure): Same. (loop_preheader_edge): Same. (loop_exit_edge_p): Same. (single_exit): Same. (loop_exits_to_bb_p): Same. (loop_exits_from_bb_p): Same. (get_loop_location): Same. (record_niter_bound): Same. (get_estimated_loop_iterations_int): Same. (max_stmt_executions_int): Same. (likely_max_stmt_executions_int): Same. (get_estimated_loop_iterations): Same. (get_max_loop_iterations): Same. (get_max_loop_iterations_int): Same. (get_likely_max_loop_iterations): Same. * cfgloop.h (simple_loop_desc): Same. (get_loop): Same. (loop_depth): Same. (loop_outer): Same. (loop_iterator::next): Same. (loop_outermost): Same. * cfgloopanal.c (mark_irreducible_loops): Same. (num_loop_insns): Same. (average_num_loop_insns): Same. (expected_loop_iterations_unbounded): Same. (expected_loop_iterations): Same. (mark_loop_exit_edges): Same. (single_likely_exit): Same. * cfgloopmanip.c (fix_bb_placement): Same. (fix_bb_placements): Same. (remove_path): Same. (place_new_loop): Same. (add_loop): Same. (scale_loop_frequencies): Same. (scale_loop_profile): Same. (create_empty_if_region_on_edge): Same. (create_empty_loop_on_edge): Same. (loopify): Same. (unloop): Same. (fix_loop_placements): Same. (copy_loop_info): Same. (duplicate_loop): Same. (duplicate_subloops): Same. (loop_redirect_edge): Same. (can_duplicate_loop_p): Same. (duplicate_loop_to_header_edge): Same. (mfb_keep_just): Same. (has_preds_from_loop): Same. (create_preheader): Same. (create_preheaders): Same. (lv_adjust_loop_entry_edge): Same. (loop_version): Same. * cfgloopmanip.h: Same. * cgraph.h: Same. * cgraphbuild.c: Same. * combine.c (make_extraction): Same. * config/i386/i386-features.c: Same. * config/i386/i386-features.h: Same. * config/i386/i386.c (ix86_emit_outlined_ms2sysv_save): Same. (ix86_emit_outlined_ms2sysv_restore): Same. (ix86_noce_conversion_profitable_p): Same. (ix86_init_cost): Same. (ix86_simd_clone_usable): Same. * configure.ac: Same. * coretypes.h: Same. * data-streamer-in.c (string_for_index): Same. (streamer_read_indexed_string): Same. (streamer_read_string): Same. (bp_unpack_indexed_string): Same. (bp_unpack_string): Same. (streamer_read_uhwi): Same. (streamer_read_hwi): Same. (streamer_read_gcov_count): Same. (streamer_read_wide_int): Same. * data-streamer.h (streamer_write_bitpack): Same. (bp_unpack_value): Same. (streamer_write_char_stream): Same. (streamer_write_hwi_in_range): Same. (streamer_write_record_start): Same. * ddg.c (create_ddg_dep_from_intra_loop_link): Same. (add_cross_iteration_register_deps): Same. (build_intra_loop_deps): Same. * df-core.c (df_analyze): Same. (loop_post_order_compute): Same. (loop_inverted_post_order_compute): Same. * df-problems.c (df_rd_alloc): Same. (df_rd_simulate_one_insn): Same. (df_rd_local_compute): Same. (df_rd_init_solution): Same. (df_rd_confluence_n): Same. (df_rd_transfer_function): Same. (df_rd_free): Same. (df_rd_dump_defs_set): Same. (df_rd_top_dump): Same. (df_lr_alloc): Same. (df_lr_reset): Same. (df_lr_local_compute): Same. (df_lr_init): Same. (df_lr_confluence_n): Same. (df_lr_free): Same. (df_lr_top_dump): Same. (df_lr_verify_transfer_functions): Same. (df_live_alloc): Same. (df_live_reset): Same. (df_live_init): Same. (df_live_confluence_n): Same. (df_live_finalize): Same. (df_live_free): Same. (df_live_top_dump): Same. (df_live_verify_transfer_functions): Same. (df_mir_alloc): Same. (df_mir_reset): Same. (df_mir_init): Same. (df_mir_confluence_n): Same. (df_mir_free): Same. (df_mir_top_dump): Same. (df_word_lr_alloc): Same. (df_word_lr_reset): Same. (df_word_lr_init): Same. (df_word_lr_confluence_n): Same. (df_word_lr_free): Same. (df_word_lr_top_dump): Same. (df_md_alloc): Same. (df_md_simulate_one_insn): Same. (df_md_reset): Same. (df_md_init): Same. (df_md_free): Same. (df_md_top_dump): Same. * df-scan.c (df_insn_delete): Same. (df_insn_rescan): Same. (df_notes_rescan): Same. (df_sort_and_compress_mws): Same. (df_install_mws): Same. (df_refs_add_to_chains): Same. (df_ref_create_structure): Same. (df_ref_record): Same. (df_def_record_1): Same. (df_find_hard_reg_defs): Same. (df_uses_record): Same. (df_get_conditional_uses): Same. (df_get_call_refs): Same. (df_recompute_luids): Same. (df_get_entry_block_def_set): Same. (df_entry_block_defs_collect): Same. (df_get_exit_block_use_set): Same. (df_exit_block_uses_collect): Same. (df_mws_verify): Same. (df_bb_verify): Same. * df.h (df_scan_get_bb_info): Same. * doc/tm.texi: Same. * dse.c (record_store): Same. * dumpfile.h: Same. * emit-rtl.c (const_fixed_hasher::equal): Same. (set_mem_attributes_minus_bitpos): Same. (change_address): Same. (adjust_address_1): Same. (offset_address): Same. * emit-rtl.h: Same. * except.c (dw2_build_landing_pads): Same. (sjlj_emit_dispatch_table): Same. * explow.c (allocate_dynamic_stack_space): Same. (emit_stack_probe): Same. (probe_stack_range): Same. * expmed.c (store_bit_field_using_insv): Same. (store_bit_field_1): Same. (store_integral_bit_field): Same. (extract_bit_field_using_extv): Same. (extract_bit_field_1): Same. (emit_cstore): Same. * expr.c (emit_block_move_via_cpymem): Same. (expand_cmpstrn_or_cmpmem): Same. (set_storage_via_setmem): Same. (emit_single_push_insn_1): Same. (expand_assignment): Same. (store_constructor): Same. (expand_expr_real_2): Same. (expand_expr_real_1): Same. (try_casesi): Same. * flags.h: Same. * function.c (try_fit_stack_local): Same. (assign_stack_local_1): Same. (assign_stack_local): Same. (cut_slot_from_list): Same. (insert_slot_to_list): Same. (max_slot_level): Same. (move_slot_to_level): Same. (temp_address_hasher::equal): Same. (remove_unused_temp_slot_addresses): Same. (assign_temp): Same. (combine_temp_slots): Same. (update_temp_slot_address): Same. (preserve_temp_slots): Same. * function.h: Same. * fwprop.c: Same. * gcc-rich-location.h: Same. * gcov.c: Same. * genattrtab.c (check_attr_test): Same. (check_attr_value): Same. (convert_set_attr_alternative): Same. (convert_set_attr): Same. (check_defs): Same. (copy_boolean): Same. (get_attr_value): Same. (expand_delays): Same. (make_length_attrs): Same. (min_fn): Same. (make_alternative_compare): Same. (simplify_test_exp): Same. (tests_attr_p): Same. (get_attr_order): Same. (clear_struct_flag): Same. (gen_attr): Same. (compares_alternatives_p): Same. (gen_insn): Same. (gen_delay): Same. (find_attrs_to_cache): Same. (write_test_expr): Same. (walk_attr_value): Same. (write_attr_get): Same. (eliminate_known_true): Same. (write_insn_cases): Same. (write_attr_case): Same. (write_attr_valueq): Same. (write_attr_value): Same. (write_dummy_eligible_delay): Same. (next_comma_elt): Same. (find_attr): Same. (make_internal_attr): Same. (copy_rtx_unchanging): Same. (gen_insn_reserv): Same. (check_tune_attr): Same. (make_automaton_attrs): Same. (handle_arg): Same. * genextract.c (gen_insn): Same. (VEC_char_to_string): Same. * genmatch.c (print_operand): Same. (lower): Same. (parser::parse_operation): Same. (parser::parse_capture): Same. (parser::parse_c_expr): Same. (parser::parse_simplify): Same. (main): Same. * genoutput.c (output_operand_data): Same. (output_get_insn_name): Same. (compare_operands): Same. (place_operands): Same. (process_template): Same. (validate_insn_alternatives): Same. (validate_insn_operands): Same. (gen_expand): Same. (note_constraint): Same. * genpreds.c (write_one_predicate_function): Same. (add_constraint): Same. (process_define_register_constraint): Same. (write_lookup_constraint_1): Same. (write_lookup_constraint_array): Same. (write_insn_constraint_len): Same. (write_reg_class_for_constraint_1): Same. (write_constraint_satisfied_p_array): Same. * genrecog.c (optimize_subroutine_group): Same. * gensupport.c (process_define_predicate): Same. (queue_pattern): Same. (remove_from_queue): Same. (process_rtx): Same. (is_predicable): Same. (change_subst_attribute): Same. (subst_pattern_match): Same. (alter_constraints): Same. (alter_attrs_for_insn): Same. (shift_output_template): Same. (alter_output_for_subst_insn): Same. (process_one_cond_exec): Same. (subst_dup): Same. (process_define_cond_exec): Same. (mnemonic_htab_callback): Same. (gen_mnemonic_attr): Same. (read_md_rtx): Same. * ggc-page.c: Same. * gimple-loop-interchange.cc (dump_reduction): Same. (dump_induction): Same. (loop_cand::~loop_cand): Same. (free_data_refs_with_aux): Same. (tree_loop_interchange::interchange_loops): Same. (tree_loop_interchange::map_inductions_to_loop): Same. (tree_loop_interchange::move_code_to_inner_loop): Same. (compute_access_stride): Same. (compute_access_strides): Same. (proper_loop_form_for_interchange): Same. (tree_loop_interchange_compute_ddrs): Same. (prune_datarefs_not_in_loop): Same. (prepare_data_references): Same. (pass_linterchange::execute): Same. * gimple-loop-jam.c (bb_prevents_fusion_p): Same. (unroll_jam_possible_p): Same. (fuse_loops): Same. (adjust_unroll_factor): Same. (tree_loop_unroll_and_jam): Same. * gimple-loop-versioning.cc (loop_versioning::~loop_versioning): Same. (loop_versioning::expensive_stmt_p): Same. (loop_versioning::version_for_unity): Same. (loop_versioning::dump_inner_likelihood): Same. (loop_versioning::find_per_loop_multiplication): Same. (loop_versioning::analyze_term_using_scevs): Same. (loop_versioning::record_address_fragment): Same. (loop_versioning::analyze_expr): Same. (loop_versioning::analyze_blocks): Same. (loop_versioning::prune_conditions): Same. (loop_versioning::merge_loop_info): Same. (loop_versioning::add_loop_to_queue): Same. (loop_versioning::decide_whether_loop_is_versionable): Same. (loop_versioning::make_versioning_decisions): Same. (loop_versioning::implement_versioning_decisions): Same. * gimple-ssa-evrp-analyze.c (evrp_range_analyzer::record_ranges_from_phis): Same. * gimple-ssa-store-merging.c (split_store::split_store): Same. (count_multiple_uses): Same. (split_group): Same. (imm_store_chain_info::output_merged_store): Same. (pass_store_merging::process_store): Same. * gimple-ssa-strength-reduction.c (slsr_process_phi): Same. * gimple-ssa-warn-alloca.c (adjusted_warn_limit): Same. (is_max): Same. (alloca_call_type): Same. (pass_walloca::execute): Same. * gimple-streamer-in.c (input_phi): Same. (input_gimple_stmt): Same. * gimple-streamer.h: Same. * godump.c (go_force_record_alignment): Same. (go_format_type): Same. (go_output_type): Same. (go_output_fndecl): Same. (go_output_typedef): Same. (keyword_hash_init): Same. (find_dummy_types): Same. * graph.c (draw_cfg_nodes_no_loops): Same. (draw_cfg_nodes_for_loop): Same. * hard-reg-set.h (hard_reg_set_iter_next): Same. * hsa-brig.c: Same. * hsa-common.h (hsa_internal_fn_hasher::equal): Same. * hsa-dump.c (dump_hsa_cfun): Same. * hsa-gen.c (gen_function_def_parameters): Same. * hsa-regalloc.c (dump_hsa_cfun_regalloc): Same. * input.c (dump_line_table_statistics): Same. (test_lexer): Same. * input.h: Same. * internal-fn.c (get_multi_vector_move): Same. (expand_load_lanes_optab_fn): Same. (expand_GOMP_SIMT_ENTER_ALLOC): Same. (expand_GOMP_SIMT_EXIT): Same. (expand_GOMP_SIMT_LAST_LANE): Same. (expand_GOMP_SIMT_ORDERED_PRED): Same. (expand_GOMP_SIMT_VOTE_ANY): Same. (expand_GOMP_SIMT_XCHG_BFLY): Same. (expand_GOMP_SIMT_XCHG_IDX): Same. (expand_addsub_overflow): Same. (expand_neg_overflow): Same. (expand_mul_overflow): Same. (expand_call_mem_ref): Same. (expand_mask_load_optab_fn): Same. (expand_scatter_store_optab_fn): Same. (expand_gather_load_optab_fn): Same. * ipa-cp.c (ipa_get_parm_lattices): Same. (print_all_lattices): Same. (ignore_edge_p): Same. (build_toporder_info): Same. (free_toporder_info): Same. (push_node_to_stack): Same. (ipcp_lattice<valtype>::set_contains_variable): Same. (set_agg_lats_to_bottom): Same. (ipcp_bits_lattice::meet_with): Same. (set_single_call_flag): Same. (initialize_node_lattices): Same. (ipa_get_jf_ancestor_result): Same. (ipcp_verify_propagated_values): Same. (propagate_scalar_across_jump_function): Same. (propagate_context_across_jump_function): Same. (propagate_bits_across_jump_function): Same. (ipa_vr_operation_and_type_effects): Same. (propagate_vr_across_jump_function): Same. (set_check_aggs_by_ref): Same. (set_chain_of_aglats_contains_variable): Same. (merge_aggregate_lattices): Same. (agg_pass_through_permissible_p): Same. (propagate_aggs_across_jump_function): Same. (call_passes_through_thunk_p): Same. (propagate_constants_across_call): Same. (devirtualization_time_bonus): Same. (good_cloning_opportunity_p): Same. (context_independent_aggregate_values): Same. (gather_context_independent_values): Same. (perform_estimation_of_a_value): Same. (estimate_local_effects): Same. (value_topo_info<valtype>::add_val): Same. (add_all_node_vals_to_toposort): Same. (value_topo_info<valtype>::propagate_effects): Same. (ipcp_propagate_stage): Same. (ipcp_discover_new_direct_edges): Same. (same_node_or_its_all_contexts_clone_p): Same. (cgraph_edge_brings_value_p): Same. (gather_edges_for_value): Same. (create_specialized_node): Same. (find_more_scalar_values_for_callers_subset): Same. (find_more_contexts_for_caller_subset): Same. (copy_plats_to_inter): Same. (intersect_aggregates_with_edge): Same. (find_aggregate_values_for_callers_subset): Same. (cgraph_edge_brings_all_agg_vals_for_node): Same. (decide_about_value): Same. (decide_whether_version_node): Same. (spread_undeadness): Same. (identify_dead_nodes): Same. (ipcp_store_vr_results): Same. * ipa-devirt.c (final_warning_record::grow_type_warnings): Same. * ipa-fnsummary.c (ipa_fn_summary::account_size_time): Same. (redirect_to_unreachable): Same. (edge_set_predicate): Same. (evaluate_conditions_for_known_args): Same. (evaluate_properties_for_edge): Same. (ipa_fn_summary_t::duplicate): Same. (ipa_call_summary_t::duplicate): Same. (dump_ipa_call_summary): Same. (ipa_dump_fn_summary): Same. (eliminated_by_inlining_prob): Same. (set_cond_stmt_execution_predicate): Same. (set_switch_stmt_execution_predicate): Same. (compute_bb_predicates): Same. (will_be_nonconstant_expr_predicate): Same. (phi_result_unknown_predicate): Same. (analyze_function_body): Same. (compute_fn_summary): Same. (estimate_edge_devirt_benefit): Same. (estimate_edge_size_and_time): Same. (estimate_calls_size_and_time): Same. (estimate_node_size_and_time): Same. (remap_edge_change_prob): Same. (remap_edge_summaries): Same. (ipa_merge_fn_summary_after_inlining): Same. (ipa_fn_summary_generate): Same. (inline_read_section): Same. (ipa_fn_summary_read): Same. (ipa_fn_summary_write): Same. * ipa-fnsummary.h: Same. * ipa-hsa.c (ipa_hsa_read_section): Same. * ipa-icf-gimple.c (func_checker::compare_loops): Same. * ipa-icf.c (sem_function::param_used_p): Same. * ipa-inline-analysis.c (do_estimate_edge_time): Same. * ipa-inline.c (edge_badness): Same. (inline_small_functions): Same. * ipa-polymorphic-call.c (ipa_polymorphic_call_context::stream_out): Same. * ipa-predicate.c (predicate::remap_after_duplication): Same. (predicate::remap_after_inlining): Same. (predicate::stream_out): Same. * ipa-predicate.h: Same. * ipa-profile.c (ipa_profile_read_summary): Same. * ipa-prop.c (ipa_get_param_decl_index_1): Same. (count_formal_params): Same. (ipa_dump_param): Same. (ipa_alloc_node_params): Same. (ipa_print_node_jump_functions_for_edge): Same. (ipa_print_node_jump_functions): Same. (ipa_load_from_parm_agg): Same. (get_ancestor_addr_info): Same. (ipa_compute_jump_functions_for_edge): Same. (ipa_analyze_virtual_call_uses): Same. (ipa_analyze_stmt_uses): Same. (ipa_analyze_params_uses_in_bb): Same. (update_jump_functions_after_inlining): Same. (try_decrement_rdesc_refcount): Same. (ipa_impossible_devirt_target): Same. (update_indirect_edges_after_inlining): Same. (combine_controlled_uses_counters): Same. (ipa_edge_args_sum_t::duplicate): Same. (ipa_write_jump_function): Same. (ipa_write_indirect_edge_info): Same. (ipa_write_node_info): Same. (ipa_read_edge_info): Same. (ipa_prop_read_section): Same. (read_replacements_section): Same. * ipa-prop.h (ipa_get_param_count): Same. (ipa_get_param): Same. (ipa_get_type): Same. (ipa_get_param_move_cost): Same. (ipa_set_param_used): Same. (ipa_get_controlled_uses): Same. (ipa_set_controlled_uses): Same. (ipa_get_cs_argument_count): Same. * ipa-pure-const.c (analyze_function): Same. (pure_const_read_summary): Same. * ipa-ref.h: Same. * ipa-reference.c (ipa_reference_read_optimization_summary): Same. * ipa-split.c (test_nonssa_use): Same. (dump_split_point): Same. (dominated_by_forbidden): Same. (split_part_set_ssa_name_p): Same. (find_split_points): Same. * ira-build.c (finish_loop_tree_nodes): Same. (low_pressure_loop_node_p): Same. * ira-color.c (ira_reuse_stack_slot): Same. * ira-int.h: Same. * ira.c (setup_reg_equiv): Same. (print_insn_chain): Same. (ira): Same. * loop-doloop.c (doloop_condition_get): Same. (add_test): Same. (record_reg_sets): Same. (doloop_optimize): Same. * loop-init.c (loop_optimizer_init): Same. (fix_loop_structure): Same. * loop-invariant.c (merge_identical_invariants): Same. (compute_always_reached): Same. (find_exits): Same. (may_assign_reg_p): Same. (find_invariants_bb): Same. (find_invariants_body): Same. (replace_uses): Same. (can_move_invariant_reg): Same. (free_inv_motion_data): Same. (move_single_loop_invariants): Same. (change_pressure): Same. (mark_ref_regs): Same. (calculate_loop_reg_pressure): Same. * loop-iv.c (biv_entry_hasher::equal): Same. (iv_extend_to_rtx_code): Same. (check_iv_ref_table_size): Same. (clear_iv_info): Same. (latch_dominating_def): Same. (iv_get_reaching_def): Same. (iv_constant): Same. (iv_subreg): Same. (iv_extend): Same. (iv_neg): Same. (iv_add): Same. (iv_mult): Same. (get_biv_step): Same. (record_iv): Same. (analyzed_for_bivness_p): Same. (record_biv): Same. (iv_analyze_biv): Same. (iv_analyze_expr): Same. (iv_analyze_def): Same. (iv_analyze_op): Same. (iv_analyze): Same. (iv_analyze_result): Same. (biv_p): Same. (eliminate_implied_conditions): Same. (simplify_using_initial_values): Same. (shorten_into_mode): Same. (canonicalize_iv_subregs): Same. (determine_max_iter): Same. (check_simple_exit): Same. (find_simple_exit): Same. (get_simple_loop_desc): Same. * loop-unroll.c (report_unroll): Same. (decide_unrolling): Same. (unroll_loops): Same. (loop_exit_at_end_p): Same. (decide_unroll_constant_iterations): Same. (unroll_loop_constant_iterations): Same. (compare_and_jump_seq): Same. (unroll_loop_runtime_iterations): Same. (decide_unroll_stupid): Same. (unroll_loop_stupid): Same. (referenced_in_one_insn_in_loop_p): Same. (reset_debug_uses_in_loop): Same. (analyze_iv_to_split_insn): Same. * lra-eliminations.c (lra_debug_elim_table): Same. (setup_can_eliminate): Same. (form_sum): Same. (lra_get_elimination_hard_regno): Same. (lra_eliminate_regs_1): Same. (eliminate_regs_in_insn): Same. (update_reg_eliminate): Same. (init_elimination): Same. (lra_eliminate): Same. * lra-int.h: Same. * lra-lives.c (initiate_live_solver): Same. * lra-remat.c (create_remat_bb_data): Same. * lra-spills.c (lra_spill): Same. * lra.c (lra_set_insn_recog_data): Same. (lra_set_used_insn_alternative_by_uid): Same. (init_reg_info): Same. (expand_reg_info): Same. * lto-cgraph.c (output_symtab): Same. (read_identifier): Same. (get_alias_symbol): Same. (input_node): Same. (input_varpool_node): Same. (input_ref): Same. (input_edge): Same. (input_cgraph_1): Same. (input_refs): Same. (input_symtab): Same. (input_offload_tables): Same. (output_cgraph_opt_summary): Same. (input_edge_opt_summary): Same. (input_cgraph_opt_section): Same. * lto-section-in.c (lto_free_raw_section_data): Same. (lto_create_simple_input_block): Same. (lto_free_function_in_decl_state_for_node): Same. * lto-streamer-in.c (lto_tag_check_set): Same. (lto_location_cache::revert_location_cache): Same. (lto_location_cache::input_location): Same. (lto_input_location): Same. (stream_input_location_now): Same. (lto_input_tree_ref): Same. (lto_input_eh_catch_list): Same. (input_eh_region): Same. (lto_init_eh): Same. (make_new_block): Same. (input_cfg): Same. (fixup_call_stmt_edges): Same. (input_struct_function_base): Same. (input_function): Same. (lto_read_body_or_constructor): Same. (lto_read_tree_1): Same. (lto_read_tree): Same. (lto_input_scc): Same. (lto_input_tree_1): Same. (lto_input_toplevel_asms): Same. (lto_input_mode_table): Same. (lto_reader_init): Same. (lto_data_in_create): Same. * lto-streamer-out.c (output_cfg): Same. * lto-streamer.h: Same. * modulo-sched.c (duplicate_insns_of_cycles): Same. (generate_prolog_epilog): Same. (mark_loop_unsched): Same. (dump_insn_location): Same. (loop_canon_p): Same. (sms_schedule): Same. * omp-expand.c (expand_omp_for_ordered_loops): Same. (expand_omp_for_generic): Same. (expand_omp_for_static_nochunk): Same. (expand_omp_for_static_chunk): Same. (expand_omp_simd): Same. (expand_omp_taskloop_for_inner): Same. (expand_oacc_for): Same. (expand_omp_atomic_pipeline): Same. (mark_loops_in_oacc_kernels_region): Same. * omp-offload.c (oacc_xform_loop): Same. * omp-simd-clone.c (simd_clone_adjust): Same. * optabs-query.c (get_traditional_extraction_insn): Same. * optabs.c (expand_vector_broadcast): Same. (expand_binop_directly): Same. (expand_twoval_unop): Same. (expand_twoval_binop): Same. (expand_unop_direct): Same. (emit_indirect_jump): Same. (emit_conditional_move): Same. (emit_conditional_neg_or_complement): Same. (emit_conditional_add): Same. (vector_compare_rtx): Same. (expand_vec_perm_1): Same. (expand_vec_perm_const): Same. (expand_vec_cond_expr): Same. (expand_vec_series_expr): Same. (maybe_emit_atomic_exchange): Same. (maybe_emit_sync_lock_test_and_set): Same. (expand_atomic_compare_and_swap): Same. (expand_atomic_load): Same. (expand_atomic_store): Same. (maybe_emit_op): Same. (valid_multiword_target_p): Same. (create_integer_operand): Same. (maybe_legitimize_operand_same_code): Same. (maybe_legitimize_operand): Same. (create_convert_operand_from_type): Same. (can_reuse_operands_p): Same. (maybe_legitimize_operands): Same. (maybe_gen_insn): Same. (maybe_expand_insn): Same. (maybe_expand_jump_insn): Same. (expand_insn): Same. * optabs.h (create_expand_operand): Same. (create_fixed_operand): Same. (create_output_operand): Same. (create_input_operand): Same. (create_convert_operand_to): Same. (create_convert_operand_from): Same. * optinfo.h: Same. * poly-int.h: Same. * predict.c (optimize_insn_for_speed_p): Same. (optimize_loop_for_size_p): Same. (optimize_loop_for_speed_p): Same. (optimize_loop_nest_for_speed_p): Same. (get_base_value): Same. (predicted_by_loop_heuristics_p): Same. (predict_extra_loop_exits): Same. (predict_loops): Same. (predict_paths_for_bb): Same. (predict_paths_leading_to): Same. (propagate_freq): Same. (pass_profile::execute): Same. * predict.h: Same. * profile-count.c (profile_count::differs_from_p): Same. (profile_probability::differs_lot_from_p): Same. * profile-count.h: Same. * profile.c (branch_prob): Same. * regrename.c (free_chain_data): Same. (mark_conflict): Same. (create_new_chain): Same. (merge_overlapping_regs): Same. (init_rename_info): Same. (merge_chains): Same. (regrename_analyze): Same. (regrename_do_replace): Same. (scan_rtx_reg): Same. (record_out_operands): Same. (build_def_use): Same. * regrename.h: Same. * reload.h: Same. * reload1.c (init_reload): Same. (maybe_fix_stack_asms): Same. (copy_reloads): Same. (count_pseudo): Same. (count_spilled_pseudo): Same. (find_reg): Same. (find_reload_regs): Same. (select_reload_regs): Same. (spill_hard_reg): Same. (fixup_eh_region_note): Same. (set_reload_reg): Same. (allocate_reload_reg): Same. (compute_reload_subreg_offset): Same. (reload_adjust_reg_for_icode): Same. (emit_input_reload_insns): Same. (emit_output_reload_insns): Same. (do_input_reload): Same. (inherit_piecemeal_p): Same. * rtl.h: Same. * sanopt.c (maybe_get_dominating_check): Same. (maybe_optimize_ubsan_ptr_ifn): Same. (can_remove_asan_check): Same. (maybe_optimize_asan_check_ifn): Same. (sanopt_optimize_walker): Same. * sched-deps.c (add_dependence_list): Same. (chain_to_prev_insn): Same. (add_insn_mem_dependence): Same. (create_insn_reg_set): Same. (maybe_extend_reg_info_p): Same. (sched_analyze_reg): Same. (sched_analyze_1): Same. (get_implicit_reg_pending_clobbers): Same. (chain_to_prev_insn_p): Same. (deps_analyze_insn): Same. (deps_start_bb): Same. (sched_free_deps): Same. (init_deps): Same. (init_deps_reg_last): Same. (free_deps): Same. * sched-ebb.c: Same. * sched-int.h: Same. * sched-rgn.c (add_branch_dependences): Same. (concat_insn_mem_list): Same. (deps_join): Same. (sched_rgn_compute_dependencies): Same. * sel-sched-ir.c (reset_target_context): Same. (copy_deps_context): Same. (init_id_from_df): Same. (has_dependence_p): Same. (change_loops_latches): Same. (bb_top_order_comparator): Same. (make_region_from_loop_preheader): Same. (sel_init_pipelining): Same. (get_loop_nest_for_rgn): Same. (make_regions_from_the_rest): Same. (sel_is_loop_preheader_p): Same. * sel-sched-ir.h (inner_loop_header_p): Same. (get_all_loop_exits): Same. * selftest.h: Same. * sese.c (sese_build_liveouts): Same. (sese_insert_phis_for_liveouts): Same. * sese.h (defined_in_sese_p): Same. * sreal.c (sreal::stream_out): Same. * sreal.h: Same. * streamer-hooks.h: Same. * target-globals.c (save_target_globals): Same. * target-globals.h: Same. * target.def: Same. * target.h: Same. * targhooks.c (default_has_ifunc_p): Same. (default_empty_mask_is_expensive): Same. (default_init_cost): Same. * targhooks.h: Same. * toplev.c: Same. * tree-affine.c (aff_combination_mult): Same. (aff_combination_expand): Same. (aff_combination_constant_multiple_p): Same. * tree-affine.h: Same. * tree-cfg.c (build_gimple_cfg): Same. (replace_loop_annotate_in_block): Same. (replace_uses_by): Same. (remove_bb): Same. (dump_cfg_stats): Same. (gimple_duplicate_sese_region): Same. (gimple_duplicate_sese_tail): Same. (move_block_to_fn): Same. (replace_block_vars_by_duplicates): Same. (move_sese_region_to_fn): Same. (print_loops_bb): Same. (print_loop): Same. (print_loops): Same. (debug): Same. (debug_loops): Same. * tree-cfg.h: Same. * tree-chrec.c (chrec_fold_plus_poly_poly): Same. (chrec_fold_multiply_poly_poly): Same. (chrec_evaluate): Same. (chrec_component_in_loop_num): Same. (reset_evolution_in_loop): Same. (is_multivariate_chrec): Same. (chrec_contains_symbols): Same. (nb_vars_in_chrec): Same. (chrec_convert_1): Same. (chrec_convert_aggressive): Same. * tree-chrec.h: Same. * tree-core.h: Same. * tree-data-ref.c (dump_data_dependence_relation): Same. (canonicalize_base_object_address): Same. (data_ref_compare_tree): Same. (prune_runtime_alias_test_list): Same. (get_segment_min_max): Same. (create_intersect_range_checks): Same. (conflict_fn_no_dependence): Same. (object_address_invariant_in_loop_p): Same. (analyze_ziv_subscript): Same. (analyze_siv_subscript_cst_affine): Same. (analyze_miv_subscript): Same. (analyze_overlapping_iterations): Same. (build_classic_dist_vector_1): Same. (add_other_self_distances): Same. (same_access_functions): Same. (build_classic_dir_vector): Same. (subscript_dependence_tester_1): Same. (subscript_dependence_tester): Same. (access_functions_are_affine_or_constant_p): Same. (get_references_in_stmt): Same. (loop_nest_has_data_refs): Same. (graphite_find_data_references_in_stmt): Same. (find_data_references_in_bb): Same. (get_base_for_alignment): Same. (find_loop_nest_1): Same. (find_loop_nest): Same. * tree-data-ref.h (dr_alignment): Same. (ddr_dependence_level): Same. * tree-if-conv.c (fold_build_cond_expr): Same. (add_to_predicate_list): Same. (add_to_dst_predicate_list): Same. (phi_convertible_by_degenerating_args): Same. (idx_within_array_bound): Same. (all_preds_critical_p): Same. (pred_blocks_visited_p): Same. (predicate_bbs): Same. (build_region): Same. (if_convertible_loop_p_1): Same. (is_cond_scalar_reduction): Same. (predicate_scalar_phi): Same. (remove_conditions_and_labels): Same. (combine_blocks): Same. (version_loop_for_if_conversion): Same. (versionable_outer_loop_p): Same. (ifcvt_local_dce): Same. (tree_if_conversion): Same. (pass_if_conversion::gate): Same. * tree-if-conv.h: Same. * tree-inline.c (maybe_move_debug_stmts_to_successors): Same. * tree-loop-distribution.c (bb_top_order_cmp): Same. (free_rdg): Same. (stmt_has_scalar_dependences_outside_loop): Same. (copy_loop_before): Same. (create_bb_after_loop): Same. (const_with_all_bytes_same): Same. (generate_memset_builtin): Same. (generate_memcpy_builtin): Same. (destroy_loop): Same. (build_rdg_partition_for_vertex): Same. (compute_access_range): Same. (data_ref_segment_size): Same. (latch_dominated_by_data_ref): Same. (compute_alias_check_pairs): Same. (fuse_memset_builtins): Same. (finalize_partitions): Same. (find_seed_stmts_for_distribution): Same. (prepare_perfect_loop_nest): Same. * tree-parloops.c (lambda_transform_legal_p): Same. (loop_parallel_p): Same. (reduc_stmt_res): Same. (add_field_for_name): Same. (create_call_for_reduction_1): Same. (replace_uses_in_bb_by): Same. (transform_to_exit_first_loop_alt): Same. (try_transform_to_exit_first_loop_alt): Same. (transform_to_exit_first_loop): Same. (num_phis): Same. (gen_parallel_loop): Same. (gather_scalar_reductions): Same. (get_omp_data_i_param): Same. (try_create_reduction_list): Same. (oacc_entry_exit_single_gang): Same. (parallelize_loops): Same. * tree-pass.h: Same. * tree-predcom.c (determine_offset): Same. (last_always_executed_block): Same. (split_data_refs_to_components): Same. (suitable_component_p): Same. (valid_initializer_p): Same. (find_looparound_phi): Same. (insert_looparound_copy): Same. (add_looparound_copies): Same. (determine_roots_comp): Same. (predcom_tmp_var): Same. (initialize_root_vars): Same. (initialize_root_vars_store_elim_1): Same. (initialize_root_vars_store_elim_2): Same. (finalize_eliminated_stores): Same. (initialize_root_vars_lm): Same. (remove_stmt): Same. (determine_unroll_factor): Same. (execute_pred_commoning_cbck): Same. (base_names_in_chain_on): Same. (combine_chains): Same. (pcom_stmt_dominates_stmt_p): Same. (try_combine_chains): Same. (prepare_initializers_chain_store_elim): Same. (prepare_initializers_chain): Same. (prepare_initializers): Same. (prepare_finalizers_chain): Same. (prepare_finalizers): Same. (insert_init_seqs): Same. * tree-scalar-evolution.c (loop_phi_node_p): Same. (compute_overall_effect_of_inner_loop): Same. (add_to_evolution_1): Same. (add_to_evolution): Same. (follow_ssa_edge_binary): Same. (follow_ssa_edge_expr): Same. (backedge_phi_arg_p): Same. (follow_ssa_edge_in_condition_phi_branch): Same. (follow_ssa_edge_in_condition_phi): Same. (follow_ssa_edge_inner_loop_phi): Same. (follow_ssa_edge): Same. (analyze_evolution_in_loop): Same. (analyze_initial_condition): Same. (interpret_loop_phi): Same. (interpret_condition_phi): Same. (interpret_rhs_expr): Same. (interpret_expr): Same. (interpret_gimple_assign): Same. (analyze_scalar_evolution_1): Same. (analyze_scalar_evolution): Same. (analyze_scalar_evolution_for_address_of): Same. (get_instantiated_value_entry): Same. (loop_closed_phi_def): Same. (instantiate_scev_name): Same. (instantiate_scev_poly): Same. (instantiate_scev_binary): Same. (instantiate_scev_convert): Same. (instantiate_scev_not): Same. (instantiate_scev_r): Same. (instantiate_scev): Same. (resolve_mixers): Same. (initialize_scalar_evolutions_analyzer): Same. (scev_reset_htab): Same. (scev_reset): Same. (derive_simple_iv_with_niters): Same. (simple_iv_with_niters): Same. (expression_expensive_p): Same. (final_value_replacement_loop): Same. * tree-scalar-evolution.h (block_before_loop): Same. * tree-ssa-address.h: Same. * tree-ssa-dce.c (find_obviously_necessary_stmts): Same. * tree-ssa-dom.c (edge_info::record_simple_equiv): Same. (record_edge_info): Same. * tree-ssa-live.c (var_map_base_fini): Same. (remove_unused_locals): Same. * tree-ssa-live.h: Same. * tree-ssa-loop-ch.c (should_duplicate_loop_header_p): Same. (pass_ch_vect::execute): Same. (pass_ch::process_loop_p): Same. * tree-ssa-loop-im.c (mem_ref_hasher::hash): Same. (movement_possibility): Same. (outermost_invariant_loop): Same. (stmt_cost): Same. (determine_max_movement): Same. (invariantness_dom_walker::before_dom_children): Same. (move_computations): Same. (may_move_till): Same. (force_move_till_op): Same. (force_move_till): Same. (memref_free): Same. (record_mem_ref_loc): Same. (set_ref_stored_in_loop): Same. (mark_ref_stored): Same. (sort_bbs_in_loop_postorder_cmp): Same. (sort_locs_in_loop_postorder_cmp): Same. (analyze_memory_references): Same. (mem_refs_may_alias_p): Same. (find_ref_loc_in_loop_cmp): Same. (rewrite_mem_ref_loc::operator): Same. (first_mem_ref_loc_1::operator): Same. (sm_set_flag_if_changed::operator): Same. (execute_sm_if_changed_flag_set): Same. (execute_sm): Same. (hoist_memory_references): Same. (ref_always_accessed::operator): Same. (refs_independent_p): Same. (record_dep_loop): Same. (ref_indep_loop_p_1): Same. (ref_indep_loop_p): Same. (can_sm_ref_p): Same. (find_refs_for_sm): Same. (loop_suitable_for_sm): Same. (store_motion_loop): Same. (store_motion): Same. (fill_always_executed_in): Same. * tree-ssa-loop-ivcanon.c (constant_after_peeling): Same. (estimated_unrolled_size): Same. (loop_edge_to_cancel): Same. (remove_exits_and_undefined_stmts): Same. (remove_redundant_iv_tests): Same. (unloop_loops): Same. (estimated_peeled_sequence_size): Same. (try_peel_loop): Same. (canonicalize_loop_induction_variables): Same. (canonicalize_induction_variables): Same. * tree-ssa-loop-ivopts.c (iv_inv_expr_hasher::equal): Same. (name_info): Same. (stmt_after_inc_pos): Same. (contains_abnormal_ssa_name_p): Same. (niter_for_exit): Same. (find_bivs): Same. (mark_bivs): Same. (find_givs_in_bb): Same. (find_induction_variables): Same. (find_interesting_uses_cond): Same. (outermost_invariant_loop_for_expr): Same. (idx_find_step): Same. (add_candidate_1): Same. (add_iv_candidate_derived_from_uses): Same. (alloc_use_cost_map): Same. (prepare_decl_rtl): Same. (generic_predict_doloop_p): Same. (computation_cost): Same. (determine_common_wider_type): Same. (get_computation_aff_1): Same. (get_use_type): Same. (determine_group_iv_cost_address): Same. (iv_period): Same. (difference_cannot_overflow_p): Same. (may_eliminate_iv): Same. (determine_set_costs): Same. (cheaper_cost_pair): Same. (compare_cost_pair): Same. (iv_ca_cand_for_group): Same. (iv_ca_recount_cost): Same. (iv_ca_set_remove_invs): Same. (iv_ca_set_no_cp): Same. (iv_ca_set_add_invs): Same. (iv_ca_set_cp): Same. (iv_ca_add_group): Same. (iv_ca_cost): Same. (iv_ca_compare_deps): Same. (iv_ca_delta_reverse): Same. (iv_ca_delta_commit): Same. (iv_ca_cand_used_p): Same. (iv_ca_delta_free): Same. (iv_ca_new): Same. (iv_ca_free): Same. (iv_ca_dump): Same. (iv_ca_extend): Same. (iv_ca_narrow): Same. (iv_ca_prune): Same. (cheaper_cost_with_cand): Same. (iv_ca_replace): Same. (try_add_cand_for): Same. (get_initial_solution): Same. (try_improve_iv_set): Same. (find_optimal_iv_set_1): Same. (create_new_iv): Same. (rewrite_use_compare): Same. (remove_unused_ivs): Same. (determine_scaling_factor): Same. * tree-ssa-loop-ivopts.h: Same. * tree-ssa-loop-manip.c (create_iv): Same. (compute_live_loop_exits): Same. (add_exit_phi): Same. (add_exit_phis): Same. (find_uses_to_rename_use): Same. (find_uses_to_rename_def): Same. (find_uses_to_rename_in_loop): Same. (rewrite_into_loop_closed_ssa): Same. (check_loop_closed_ssa_bb): Same. (split_loop_exit_edge): Same. (ip_end_pos): Same. (ip_normal_pos): Same. (copy_phi_node_args): Same. (gimple_duplicate_loop_to_header_edge): Same. (can_unroll_loop_p): Same. (determine_exit_conditions): Same. (scale_dominated_blocks_in_loop): Same. (niter_for_unrolled_loop): Same. (tree_transform_and_unroll_loop): Same. (rewrite_all_phi_nodes_with_iv): Same. * tree-ssa-loop-manip.h: Same. * tree-ssa-loop-niter.c (number_of_iterations_ne_max): Same. (number_of_iterations_ne): Same. (assert_no_overflow_lt): Same. (assert_loop_rolls_lt): Same. (number_of_iterations_lt): Same. (adjust_cond_for_loop_until_wrap): Same. (tree_simplify_using_condition): Same. (simplify_using_initial_conditions): Same. (simplify_using_outer_evolutions): Same. (loop_only_exit_p): Same. (ssa_defined_by_minus_one_stmt_p): Same. (number_of_iterations_popcount): Same. (number_of_iterations_exit): Same. (find_loop_niter): Same. (finite_loop_p): Same. (chain_of_csts_start): Same. (get_val_for): Same. (loop_niter_by_eval): Same. (derive_constant_upper_bound_ops): Same. (do_warn_aggressive_loop_optimizations): Same. (record_estimate): Same. (get_cst_init_from_scev): Same. (record_nonwrapping_iv): Same. (idx_infer_loop_bounds): Same. (infer_loop_bounds_from_ref): Same. (infer_loop_bounds_from_array): Same. (infer_loop_bounds_from_pointer_arith): Same. (infer_loop_bounds_from_signedness): Same. (bound_index): Same. (discover_iteration_bound_by_body_walk): Same. (maybe_lower_iteration_bound): Same. (estimate_numbers_of_iterations): Same. (estimated_loop_iterations): Same. (estimated_loop_iterations_int): Same. (max_loop_iterations): Same. (max_loop_iterations_int): Same. (likely_max_loop_iterations): Same. (likely_max_loop_iterations_int): Same. (estimated_stmt_executions_int): Same. (max_stmt_executions): Same. (likely_max_stmt_executions): Same. (estimated_stmt_executions): Same. (stmt_dominates_stmt_p): Same. (nowrap_type_p): Same. (loop_exits_before_overflow): Same. (scev_var_range_cant_overflow): Same. (scev_probably_wraps_p): Same. (free_numbers_of_iterations_estimates): Same. * tree-ssa-loop-niter.h: Same. * tree-ssa-loop-prefetch.c (release_mem_refs): Same. (idx_analyze_ref): Same. (analyze_ref): Same. (gather_memory_references_ref): Same. (mark_nontemporal_store): Same. (emit_mfence_after_loop): Same. (may_use_storent_in_loop_p): Same. (mark_nontemporal_stores): Same. (should_unroll_loop_p): Same. (volume_of_dist_vector): Same. (add_subscript_strides): Same. (self_reuse_distance): Same. (insn_to_prefetch_ratio_too_small_p): Same. * tree-ssa-loop-split.c (split_at_bb_p): Same. (patch_loop_exit): Same. (find_or_create_guard_phi): Same. (easy_exit_values): Same. (connect_loop_phis): Same. (connect_loops): Same. (compute_new_first_bound): Same. (split_loop): Same. (tree_ssa_split_loops): Same. * tree-ssa-loop-unswitch.c (tree_ssa_unswitch_loops): Same. (is_maybe_undefined): Same. (tree_may_unswitch_on): Same. (simplify_using_entry_checks): Same. (tree_unswitch_single_loop): Same. (tree_unswitch_loop): Same. (tree_unswitch_outer_loop): Same. (empty_bb_without_guard_p): Same. (used_outside_loop_p): Same. (get_vop_from_header): Same. (hoist_guard): Same. * tree-ssa-loop.c (gate_oacc_kernels): Same. (get_lsm_tmp_name): Same. * tree-ssa-loop.h: Same. * tree-ssa-reassoc.c (add_repeat_to_ops_vec): Same. (build_and_add_sum): Same. (no_side_effect_bb): Same. (get_ops): Same. (linearize_expr): Same. (should_break_up_subtract): Same. (linearize_expr_tree): Same. * tree-ssa-scopedtables.c: Same. * tree-ssa-scopedtables.h: Same. * tree-ssa-structalias.c (condense_visit): Same. (label_visit): Same. (dump_pred_graph): Same. (perform_var_substitution): Same. (move_complex_constraints): Same. (remove_preds_and_fake_succs): Same. * tree-ssa-threadupdate.c (dbds_continue_enumeration_p): Same. (determine_bb_domination_status): Same. (duplicate_thread_path): Same. (thread_through_all_blocks): Same. * tree-ssa-threadupdate.h: Same. * tree-streamer-in.c (streamer_read_string_cst): Same. (input_identifier): Same. (unpack_ts_type_common_value_fields): Same. (unpack_ts_block_value_fields): Same. (unpack_ts_translation_unit_decl_value_fields): Same. (unpack_ts_omp_clause_value_fields): Same. (streamer_read_tree_bitfields): Same. (streamer_alloc_tree): Same. (lto_input_ts_common_tree_pointers): Same. (lto_input_ts_vector_tree_pointers): Same. (lto_input_ts_poly_tree_pointers): Same. (lto_input_ts_complex_tree_pointers): Same. (lto_input_ts_decl_minimal_tree_pointers): Same. (lto_input_ts_decl_common_tree_pointers): Same. (lto_input_ts_decl_non_common_tree_pointers): Same. (lto_input_ts_decl_with_vis_tree_pointers): Same. (lto_input_ts_field_decl_tree_pointers): Same. (lto_input_ts_function_decl_tree_pointers): Same. (lto_input_ts_type_common_tree_pointers): Same. (lto_input_ts_type_non_common_tree_pointers): Same. (lto_input_ts_list_tree_pointers): Same. (lto_input_ts_vec_tree_pointers): Same. (lto_input_ts_exp_tree_pointers): Same. (lto_input_ts_block_tree_pointers): Same. (lto_input_ts_binfo_tree_pointers): Same. (lto_input_ts_constructor_tree_pointers): Same. (lto_input_ts_omp_clause_tree_pointers): Same. (streamer_read_tree_body): Same. * tree-streamer.h: Same. * tree-switch-conversion.c (bit_test_cluster::is_beneficial): Same. * tree-vect-data-refs.c (vect_get_smallest_scalar_type): Same. (vect_analyze_possibly_independent_ddr): Same. (vect_analyze_data_ref_dependence): Same. (vect_compute_data_ref_alignment): Same. (vect_enhance_data_refs_alignment): Same. (vect_analyze_data_ref_access): Same. (vect_check_gather_scatter): Same. (vect_find_stmt_data_reference): Same. (vect_create_addr_base_for_vector_ref): Same. (vect_setup_realignment): Same. (vect_supportable_dr_alignment): Same. * tree-vect-loop-manip.c (rename_variables_in_bb): Same. (adjust_phi_and_debug_stmts): Same. (vect_set_loop_mask): Same. (add_preheader_seq): Same. (vect_maybe_permute_loop_masks): Same. (vect_set_loop_masks_directly): Same. (vect_set_loop_condition_masked): Same. (vect_set_loop_condition_unmasked): Same. (slpeel_duplicate_current_defs_from_edges): Same. (slpeel_add_loop_guard): Same. (slpeel_can_duplicate_loop_p): Same. (create_lcssa_for_virtual_phi): Same. (iv_phi_p): Same. (vect_update_ivs_after_vectorizer): Same. (vect_gen_vector_loop_niters_mult_vf): Same. (slpeel_update_phi_nodes_for_loops): Same. (slpeel_update_phi_nodes_for_guard1): Same. (find_guard_arg): Same. (slpeel_update_phi_nodes_for_guard2): Same. (slpeel_update_phi_nodes_for_lcssa): Same. (vect_do_peeling): Same. (vect_create_cond_for_alias_checks): Same. (vect_loop_versioning): Same. * tree-vect-loop.c (vect_determine_vf_for_stmt): Same. (vect_inner_phi_in_double_reduction_p): Same. (vect_analyze_scalar_cycles_1): Same. (vect_fixup_scalar_cycles_with_patterns): Same. (vect_get_loop_niters): Same. (bb_in_loop_p): Same. (vect_get_max_nscalars_per_iter): Same. (vect_verify_full_masking): Same. (vect_compute_single_scalar_iteration_cost): Same. (vect_analyze_loop_form_1): Same. (vect_analyze_loop_form): Same. (vect_active_double_reduction_p): Same. (vect_analyze_loop_operations): Same. (neutral_op_for_slp_reduction): Same. (vect_is_simple_reduction): Same. (vect_model_reduction_cost): Same. (get_initial_def_for_reduction): Same. (get_initial_defs_for_reduction): Same. (vect_create_epilog_for_reduction): Same. (vectorize_fold_left_reduction): Same. (vectorizable_reduction): Same. (vectorizable_induction): Same. (vectorizable_live_operation): Same. (loop_niters_no_overflow): Same. (vect_get_loop_mask): Same. (vect_transform_loop_stmt): Same. (vect_transform_loop): Same. * tree-vect-patterns.c (vect_reassociating_reduction_p): Same. (vect_determine_precisions): Same. (vect_pattern_recog_1): Same. * tree-vect-slp.c (vect_analyze_slp_instance): Same. * tree-vect-stmts.c (stmt_vectype): Same. (process_use): Same. (vect_init_vector_1): Same. (vect_truncate_gather_scatter_offset): Same. (get_group_load_store_type): Same. (vect_build_gather_load_calls): Same. (vect_get_strided_load_store_ops): Same. (vectorizable_simd_clone_call): Same. (vectorizable_store): Same. (permute_vec_elements): Same. (vectorizable_load): Same. (vect_transform_stmt): Same. (supportable_widening_operation): Same. * tree-vectorizer.c (vec_info::replace_stmt): Same. (vec_info::free_stmt_vec_info): Same. (vect_free_loop_info_assumptions): Same. (vect_loop_vectorized_call): Same. (set_uid_loop_bbs): Same. (vectorize_loops): Same. * tree-vectorizer.h (STMT_VINFO_BB_VINFO): Same. * tree.c (add_tree_to_fld_list): Same. (fld_type_variant_equal_p): Same. (fld_decl_context): Same. (fld_incomplete_type_of): Same. (free_lang_data_in_binfo): Same. (need_assembler_name_p): Same. (find_decls_types_r): Same. (get_eh_types_for_runtime): Same. (find_decls_types_in_eh_region): Same. (find_decls_types_in_node): Same. (assign_assembler_name_if_needed): Same. * value-prof.c (stream_out_histogram_value): Same. * value-prof.h: Same. * var-tracking.c (use_narrower_mode): Same. (prepare_call_arguments): Same. (vt_expand_loc_callback): Same. (resolve_expansions_pending_recursion): Same. (vt_expand_loc): Same. * varasm.c (const_hash_1): Same. (compare_constant): Same. (tree_output_constant_def): Same. (simplify_subtraction): Same. (get_pool_constant): Same. (output_constant_pool_2): Same. (output_constant_pool_1): Same. (mark_constants_in_pattern): Same. (mark_constant_pool): Same. (get_section_anchor): Same. * vr-values.c (compare_range_with_value): Same. (vr_values::extract_range_from_phi_node): Same. * vr-values.h: Same. * web.c (unionfind_union): Same. * wide-int.h: Same. From-SVN: r273311
This commit is contained in:
parent
18c0ed4b46
commit
99b1c316ec
1431
gcc/ChangeLog
1431
gcc/ChangeLog
File diff suppressed because it is too large
Load Diff
@ -104,7 +104,7 @@ namespace autofdo
|
||||
/* Intermediate edge info used when propagating AutoFDO profile information.
|
||||
We can't edge->count() directly since it's computed from edge's probability
|
||||
while probability is yet not decided during propagation. */
|
||||
#define AFDO_EINFO(e) ((struct edge_info *) e->aux)
|
||||
#define AFDO_EINFO(e) ((class edge_info *) e->aux)
|
||||
class edge_info
|
||||
{
|
||||
public:
|
||||
|
@ -123,7 +123,7 @@ struct GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb"))) basic_block_d
|
||||
PTR GTY ((skip (""))) aux;
|
||||
|
||||
/* Innermost loop containing the block. */
|
||||
struct loop *loop_father;
|
||||
class loop *loop_father;
|
||||
|
||||
/* The dominance and postdominance information node. */
|
||||
struct et_node * GTY ((skip (""))) dom[2];
|
||||
|
@ -775,7 +775,7 @@ bitmap_alloc (bitmap_obstack *bit_obstack MEM_STAT_DECL)
|
||||
bit_obstack = &bitmap_default_obstack;
|
||||
map = bit_obstack->heads;
|
||||
if (map)
|
||||
bit_obstack->heads = (struct bitmap_head *) map->first;
|
||||
bit_obstack->heads = (class bitmap_head *) map->first;
|
||||
else
|
||||
map = XOBNEW (&bit_obstack->obstack, bitmap_head);
|
||||
bitmap_initialize (map, bit_obstack PASS_MEM_STAT);
|
||||
|
@ -290,7 +290,7 @@ typedef unsigned long BITMAP_WORD;
|
||||
/* Obstack for allocating bitmaps and elements from. */
|
||||
struct bitmap_obstack {
|
||||
struct bitmap_element *elements;
|
||||
struct bitmap_head *heads;
|
||||
bitmap_head *heads;
|
||||
struct obstack obstack;
|
||||
};
|
||||
|
||||
|
@ -1400,7 +1400,7 @@ expand_builtin_prefetch (tree exp)
|
||||
|
||||
if (targetm.have_prefetch ())
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
|
||||
create_address_operand (&ops[0], op0);
|
||||
create_integer_operand (&ops[1], INTVAL (op1));
|
||||
@ -2445,7 +2445,7 @@ expand_builtin_interclass_mathfn (tree exp, rtx target)
|
||||
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[1];
|
||||
class expand_operand ops[1];
|
||||
rtx_insn *last = get_last_insn ();
|
||||
tree orig_arg = arg;
|
||||
|
||||
@ -2946,7 +2946,7 @@ expand_builtin_strlen (tree exp, rtx target,
|
||||
if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
|
||||
return NULL_RTX;
|
||||
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx pat;
|
||||
tree len;
|
||||
tree src = CALL_EXPR_ARG (exp, 0);
|
||||
@ -3923,7 +3923,7 @@ expand_builtin_mempcpy_args (tree dest, tree src, tree len,
|
||||
static rtx
|
||||
expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
rtx dest_mem;
|
||||
rtx src_mem;
|
||||
|
||||
@ -4633,7 +4633,7 @@ expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
|
||||
if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
|
||||
target = NULL_RTX;
|
||||
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
create_output_operand (&ops[0], target, insn_mode);
|
||||
create_fixed_operand (&ops[1], arg1_rtx);
|
||||
create_fixed_operand (&ops[2], arg2_rtx);
|
||||
@ -5606,7 +5606,7 @@ expand_builtin___clear_cache (tree exp)
|
||||
|
||||
if (targetm.have_clear_cache ())
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
|
||||
begin = CALL_EXPR_ARG (exp, 0);
|
||||
begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
|
||||
@ -6566,7 +6566,7 @@ expand_ifn_atomic_bit_test_and (gcall *call)
|
||||
machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
|
||||
enum rtx_code code;
|
||||
optab optab;
|
||||
struct expand_operand ops[5];
|
||||
class expand_operand ops[5];
|
||||
|
||||
gcc_assert (flag_inline_atomics);
|
||||
|
||||
@ -6874,7 +6874,7 @@ expand_builtin_thread_pointer (tree exp, rtx target)
|
||||
icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand op;
|
||||
class expand_operand op;
|
||||
/* If the target is not sutitable then create a new target. */
|
||||
if (target == NULL_RTX
|
||||
|| !REG_P (target)
|
||||
@ -6897,7 +6897,7 @@ expand_builtin_set_thread_pointer (tree exp)
|
||||
icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand op;
|
||||
class expand_operand op;
|
||||
rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
|
||||
Pmode, EXPAND_NORMAL);
|
||||
create_input_operand (&op, val, Pmode);
|
||||
|
@ -1,3 +1,10 @@
|
||||
2019-07-09 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR c++/61339
|
||||
* c-opts.c (handle_deferred_opts): : Change class-key of PODs to struct
|
||||
and others to class.
|
||||
* c-pretty-print.h: Same.
|
||||
|
||||
2019-07-09 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR c++/61339
|
||||
|
@ -1287,7 +1287,7 @@ handle_deferred_opts (void)
|
||||
if (!deps_seen)
|
||||
return;
|
||||
|
||||
struct mkdeps *deps = cpp_get_deps (parse_in);
|
||||
mkdeps *deps = cpp_get_deps (parse_in);
|
||||
|
||||
for (size_t i = 0; i < deferred_count; i++)
|
||||
{
|
||||
|
@ -36,7 +36,7 @@ enum pp_c_pretty_print_flags
|
||||
|
||||
/* The data type used to bundle information necessary for pretty-printing
|
||||
a C or C++ entity. */
|
||||
struct c_pretty_printer;
|
||||
class c_pretty_printer;
|
||||
|
||||
/* The type of a C pretty-printer 'member' function. */
|
||||
typedef void (*c_pretty_print_fn) (c_pretty_printer *, tree);
|
||||
|
@ -1,3 +1,13 @@
|
||||
2019-07-09 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR c++/61339
|
||||
* c-decl.c (xref_tag): Change class-key of PODs to struct and others
|
||||
to class.
|
||||
(field_decl_cmp): Same.
|
||||
* c-parser.c (c_parser_struct_or_union_specifier): Same.
|
||||
* c-tree.h: Same.
|
||||
* gimple-parser.c (c_parser_gimple_compound_statement): Same.
|
||||
|
||||
2019-07-09 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR c++/61339
|
||||
|
@ -592,7 +592,7 @@ public:
|
||||
|
||||
/* Information for the struct or union currently being parsed, or
|
||||
NULL if not parsing a struct or union. */
|
||||
static struct c_struct_parse_info *struct_parse_info;
|
||||
static class c_struct_parse_info *struct_parse_info;
|
||||
|
||||
/* Forward declarations. */
|
||||
static tree lookup_name_in_scope (tree, struct c_scope *);
|
||||
@ -7768,7 +7768,7 @@ xref_tag (enum tree_code code, tree name)
|
||||
|
||||
tree
|
||||
start_struct (location_t loc, enum tree_code code, tree name,
|
||||
struct c_struct_parse_info **enclosing_struct_parse_info)
|
||||
class c_struct_parse_info **enclosing_struct_parse_info)
|
||||
{
|
||||
/* If there is already a tag defined at this scope
|
||||
(as a forward reference), just return it. */
|
||||
@ -8183,7 +8183,7 @@ field_decl_cmp (const void *x_p, const void *y_p)
|
||||
|
||||
tree
|
||||
finish_struct (location_t loc, tree t, tree fieldlist, tree attributes,
|
||||
struct c_struct_parse_info *enclosing_struct_parse_info)
|
||||
class c_struct_parse_info *enclosing_struct_parse_info)
|
||||
{
|
||||
tree x;
|
||||
bool toplevel = file_scope == current_scope;
|
||||
|
@ -3145,7 +3145,7 @@ c_parser_struct_or_union_specifier (c_parser *parser)
|
||||
{
|
||||
/* Parse a struct or union definition. Start the scope of the
|
||||
tag before parsing components. */
|
||||
struct c_struct_parse_info *struct_info;
|
||||
class c_struct_parse_info *struct_info;
|
||||
tree type = start_struct (struct_loc, code, ident, &struct_info);
|
||||
tree postfix_attrs;
|
||||
/* We chain the components in reverse order, then put them in
|
||||
|
@ -525,7 +525,7 @@ extern void gen_aux_info_record (tree, int, int, int);
|
||||
|
||||
/* in c-decl.c */
|
||||
struct c_spot_bindings;
|
||||
struct c_struct_parse_info;
|
||||
class c_struct_parse_info;
|
||||
extern struct obstack parser_obstack;
|
||||
extern tree c_break_label;
|
||||
extern tree c_cont_label;
|
||||
@ -562,7 +562,7 @@ extern void finish_decl (tree, location_t, tree, tree, tree);
|
||||
extern tree finish_enum (tree, tree, tree);
|
||||
extern void finish_function (void);
|
||||
extern tree finish_struct (location_t, tree, tree, tree,
|
||||
struct c_struct_parse_info *);
|
||||
class c_struct_parse_info *);
|
||||
extern struct c_arg_info *build_arg_info (void);
|
||||
extern struct c_arg_info *get_parm_info (bool, tree);
|
||||
extern tree grokfield (location_t, struct c_declarator *,
|
||||
@ -586,7 +586,7 @@ extern bool start_function (struct c_declspecs *, struct c_declarator *, tree);
|
||||
extern tree start_decl (struct c_declarator *, struct c_declspecs *, bool,
|
||||
tree);
|
||||
extern tree start_struct (location_t, enum tree_code, tree,
|
||||
struct c_struct_parse_info **);
|
||||
class c_struct_parse_info **);
|
||||
extern void store_parm_decls (void);
|
||||
extern void store_parm_decls_from (struct c_arg_info *);
|
||||
extern void temp_store_parm_decls (tree, tree);
|
||||
|
@ -585,7 +585,7 @@ c_parser_gimple_compound_statement (gimple_parser &parser, gimple_seq *seq)
|
||||
profile_probability::always ());
|
||||
|
||||
/* We leave the proper setting to fixup. */
|
||||
struct loop *loop_father = loops_for_fn (cfun)->tree_root;
|
||||
class loop *loop_father = loops_for_fn (cfun)->tree_root;
|
||||
/* If the new block is a loop header, allocate a loop
|
||||
struct. Fixup will take care of proper placement within
|
||||
the loop tree. */
|
||||
@ -598,7 +598,7 @@ c_parser_gimple_compound_statement (gimple_parser &parser, gimple_seq *seq)
|
||||
}
|
||||
else
|
||||
{
|
||||
struct loop *loop = alloc_loop ();
|
||||
class loop *loop = alloc_loop ();
|
||||
loop->num = is_loop_header_of;
|
||||
loop->header = bb;
|
||||
vec_safe_grow_cleared (loops_for_fn (cfun)->larray,
|
||||
|
@ -88,11 +88,11 @@ static void mark_set_regs (rtx, const_rtx, void *);
|
||||
static void mark_referenced_regs (rtx *, refmarker_fn *mark, void *mark_arg);
|
||||
static refmarker_fn mark_reg_as_referenced;
|
||||
static refmarker_fn replace_reg_with_saved_mem;
|
||||
static int insert_save (struct insn_chain *, int, HARD_REG_SET *,
|
||||
static int insert_save (class insn_chain *, int, HARD_REG_SET *,
|
||||
machine_mode *);
|
||||
static int insert_restore (struct insn_chain *, int, int, int,
|
||||
static int insert_restore (class insn_chain *, int, int, int,
|
||||
machine_mode *);
|
||||
static struct insn_chain *insert_one_insn (struct insn_chain *, int, int,
|
||||
static class insn_chain *insert_one_insn (class insn_chain *, int, int,
|
||||
rtx);
|
||||
static void add_stored_regs (rtx, const_rtx, void *);
|
||||
|
||||
@ -419,7 +419,7 @@ setup_save_areas (void)
|
||||
HARD_REG_SET hard_regs_used;
|
||||
struct saved_hard_reg *saved_reg;
|
||||
rtx_insn *insn;
|
||||
struct insn_chain *chain, *next;
|
||||
class insn_chain *chain, *next;
|
||||
unsigned int regno;
|
||||
HARD_REG_SET hard_regs_to_save, used_regs, this_insn_sets;
|
||||
reg_set_iterator rsi;
|
||||
@ -744,7 +744,7 @@ setup_save_areas (void)
|
||||
void
|
||||
save_call_clobbered_regs (void)
|
||||
{
|
||||
struct insn_chain *chain, *next, *last = NULL;
|
||||
class insn_chain *chain, *next, *last = NULL;
|
||||
machine_mode save_mode [FIRST_PSEUDO_REGISTER];
|
||||
|
||||
/* Computed in mark_set_regs, holds all registers set by the current
|
||||
@ -1174,14 +1174,14 @@ replace_reg_with_saved_mem (rtx *loc,
|
||||
Return the extra number of registers saved. */
|
||||
|
||||
static int
|
||||
insert_restore (struct insn_chain *chain, int before_p, int regno,
|
||||
insert_restore (class insn_chain *chain, int before_p, int regno,
|
||||
int maxrestore, machine_mode *save_mode)
|
||||
{
|
||||
int i, k;
|
||||
rtx pat = NULL_RTX;
|
||||
int code;
|
||||
unsigned int numregs = 0;
|
||||
struct insn_chain *new_chain;
|
||||
class insn_chain *new_chain;
|
||||
rtx mem;
|
||||
|
||||
/* A common failure mode if register status is not correct in the
|
||||
@ -1253,7 +1253,7 @@ insert_restore (struct insn_chain *chain, int before_p, int regno,
|
||||
/* Like insert_restore above, but save registers instead. */
|
||||
|
||||
static int
|
||||
insert_save (struct insn_chain *chain, int regno,
|
||||
insert_save (class insn_chain *chain, int regno,
|
||||
HARD_REG_SET *to_save, machine_mode *save_mode)
|
||||
{
|
||||
int i;
|
||||
@ -1261,7 +1261,7 @@ insert_save (struct insn_chain *chain, int regno,
|
||||
rtx pat = NULL_RTX;
|
||||
int code;
|
||||
unsigned int numregs = 0;
|
||||
struct insn_chain *new_chain;
|
||||
class insn_chain *new_chain;
|
||||
rtx mem;
|
||||
|
||||
/* A common failure mode if register status is not correct in the
|
||||
@ -1351,11 +1351,11 @@ add_used_regs (rtx *loc, void *data)
|
||||
}
|
||||
|
||||
/* Emit a new caller-save insn and set the code. */
|
||||
static struct insn_chain *
|
||||
insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
||||
static class insn_chain *
|
||||
insert_one_insn (class insn_chain *chain, int before_p, int code, rtx pat)
|
||||
{
|
||||
rtx_insn *insn = chain->insn;
|
||||
struct insn_chain *new_chain;
|
||||
class insn_chain *new_chain;
|
||||
|
||||
/* If INSN references CC0, put our insns in front of the insn that sets
|
||||
CC0. This is always safe, since the only way we could be passed an
|
||||
|
@ -1145,7 +1145,7 @@ get_bb_copy (basic_block bb)
|
||||
initialized so passes not needing this don't need to care. */
|
||||
|
||||
void
|
||||
set_loop_copy (struct loop *loop, struct loop *copy)
|
||||
set_loop_copy (class loop *loop, class loop *copy)
|
||||
{
|
||||
if (!copy)
|
||||
copy_original_table_clear (loop_copy, loop->num);
|
||||
@ -1155,8 +1155,8 @@ set_loop_copy (struct loop *loop, struct loop *copy)
|
||||
|
||||
/* Get the copy of LOOP. */
|
||||
|
||||
struct loop *
|
||||
get_loop_copy (struct loop *loop)
|
||||
class loop *
|
||||
get_loop_copy (class loop *loop)
|
||||
{
|
||||
struct htab_bb_copy_original_entry *entry;
|
||||
struct htab_bb_copy_original_entry key;
|
||||
|
@ -122,8 +122,8 @@ extern void set_bb_original (basic_block, basic_block);
|
||||
extern basic_block get_bb_original (basic_block);
|
||||
extern void set_bb_copy (basic_block, basic_block);
|
||||
extern basic_block get_bb_copy (basic_block);
|
||||
void set_loop_copy (struct loop *, struct loop *);
|
||||
struct loop *get_loop_copy (struct loop *);
|
||||
void set_loop_copy (class loop *, class loop *);
|
||||
class loop *get_loop_copy (class loop *);
|
||||
|
||||
/* Generic RAII class to allocate a bit from storage of integer type T.
|
||||
The allocated bit is accessible as mask with the single bit set
|
||||
|
@ -72,8 +72,8 @@ extern int rev_post_order_and_mark_dfs_back_seme (struct function *, edge,
|
||||
extern int dfs_enumerate_from (basic_block, int,
|
||||
bool (*)(const_basic_block, const void *),
|
||||
basic_block *, int, const void *);
|
||||
extern void compute_dominance_frontiers (struct bitmap_head *);
|
||||
extern bitmap compute_idf (bitmap, struct bitmap_head *);
|
||||
extern void compute_dominance_frontiers (class bitmap_head *);
|
||||
extern bitmap compute_idf (bitmap, class bitmap_head *);
|
||||
extern void bitmap_intersection_of_succs (sbitmap, sbitmap *, basic_block);
|
||||
extern void bitmap_intersection_of_preds (sbitmap, sbitmap *, basic_block);
|
||||
extern void bitmap_union_of_succs (sbitmap, sbitmap *, basic_block);
|
||||
|
@ -332,7 +332,7 @@ public:
|
||||
#define EOC ((size_t)-1)
|
||||
|
||||
/* We have an array of such objects while deciding allocation. */
|
||||
static struct stack_var *stack_vars;
|
||||
static class stack_var *stack_vars;
|
||||
static size_t stack_vars_alloc;
|
||||
static size_t stack_vars_num;
|
||||
static hash_map<tree, size_t> *decl_to_stack_part;
|
||||
@ -426,7 +426,7 @@ alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
|
||||
static void
|
||||
add_stack_var (tree decl, bool really_expand)
|
||||
{
|
||||
struct stack_var *v;
|
||||
class stack_var *v;
|
||||
|
||||
if (stack_vars_num >= stack_vars_alloc)
|
||||
{
|
||||
@ -435,7 +435,7 @@ add_stack_var (tree decl, bool really_expand)
|
||||
else
|
||||
stack_vars_alloc = 32;
|
||||
stack_vars
|
||||
= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
|
||||
= XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
|
||||
}
|
||||
if (!decl_to_stack_part)
|
||||
decl_to_stack_part = new hash_map<tree, size_t>;
|
||||
@ -474,8 +474,8 @@ add_stack_var (tree decl, bool really_expand)
|
||||
static void
|
||||
add_stack_var_conflict (size_t x, size_t y)
|
||||
{
|
||||
struct stack_var *a = &stack_vars[x];
|
||||
struct stack_var *b = &stack_vars[y];
|
||||
class stack_var *a = &stack_vars[x];
|
||||
class stack_var *b = &stack_vars[y];
|
||||
if (x == y)
|
||||
return;
|
||||
if (!a->conflicts)
|
||||
@ -491,8 +491,8 @@ add_stack_var_conflict (size_t x, size_t y)
|
||||
static bool
|
||||
stack_var_conflict_p (size_t x, size_t y)
|
||||
{
|
||||
struct stack_var *a = &stack_vars[x];
|
||||
struct stack_var *b = &stack_vars[y];
|
||||
class stack_var *a = &stack_vars[x];
|
||||
class stack_var *b = &stack_vars[y];
|
||||
if (x == y)
|
||||
return false;
|
||||
/* Partitions containing an SSA name result from gimple registers
|
||||
@ -607,7 +607,7 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
|
||||
unsigned i;
|
||||
EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
|
||||
{
|
||||
struct stack_var *a = &stack_vars[i];
|
||||
class stack_var *a = &stack_vars[i];
|
||||
if (!a->conflicts)
|
||||
a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
|
||||
bitmap_ior_into (a->conflicts, work);
|
||||
@ -853,7 +853,7 @@ update_alias_info_with_stack_vars (void)
|
||||
static void
|
||||
union_stack_vars (size_t a, size_t b)
|
||||
{
|
||||
struct stack_var *vb = &stack_vars[b];
|
||||
class stack_var *vb = &stack_vars[b];
|
||||
bitmap_iterator bi;
|
||||
unsigned u;
|
||||
|
||||
@ -1045,7 +1045,7 @@ public:
|
||||
with that location. */
|
||||
|
||||
static void
|
||||
expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
|
||||
expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
|
||||
{
|
||||
size_t si, i, j, n = stack_vars_num;
|
||||
poly_uint64 large_size = 0, large_alloc = 0;
|
||||
@ -2232,7 +2232,7 @@ expand_used_vars (void)
|
||||
/* Assign rtl to each variable based on these partitions. */
|
||||
if (stack_vars_num > 0)
|
||||
{
|
||||
struct stack_vars_data data;
|
||||
class stack_vars_data data;
|
||||
|
||||
data.asan_base = NULL_RTX;
|
||||
data.asan_alignb = 0;
|
||||
|
@ -496,7 +496,7 @@ redirect_edge_and_branch_force (edge e, basic_block dest)
|
||||
{
|
||||
if (ret != NULL)
|
||||
{
|
||||
struct loop *loop
|
||||
class loop *loop
|
||||
= find_common_loop (single_pred (ret)->loop_father,
|
||||
single_succ (ret)->loop_father);
|
||||
add_bb_to_loop (ret, loop);
|
||||
@ -604,7 +604,7 @@ delete_basic_block (basic_block bb)
|
||||
|
||||
if (current_loops != NULL)
|
||||
{
|
||||
struct loop *loop = bb->loop_father;
|
||||
class loop *loop = bb->loop_father;
|
||||
|
||||
/* If we remove the header or the latch of a loop, mark the loop for
|
||||
removal. */
|
||||
@ -640,7 +640,7 @@ split_edge (edge e)
|
||||
profile_count count = e->count ();
|
||||
edge f;
|
||||
bool irr = (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
basic_block src = e->src, dest = e->dest;
|
||||
|
||||
if (!cfg_hooks->split_edge)
|
||||
@ -870,7 +870,7 @@ make_forwarder_block (basic_block bb, bool (*redirect_edge_p) (edge),
|
||||
edge e, fallthru;
|
||||
edge_iterator ei;
|
||||
basic_block dummy, jump;
|
||||
struct loop *loop, *ploop, *cloop;
|
||||
class loop *loop, *ploop, *cloop;
|
||||
|
||||
if (!cfg_hooks->make_forwarder_block)
|
||||
internal_error ("%s does not support make_forwarder_block",
|
||||
@ -1035,7 +1035,7 @@ force_nonfallthru (edge e)
|
||||
{
|
||||
basic_block pred = single_pred (ret);
|
||||
basic_block succ = single_succ (ret);
|
||||
struct loop *loop
|
||||
class loop *loop
|
||||
= find_common_loop (pred->loop_father, succ->loop_father);
|
||||
rescan_loop_exit (e, false, true);
|
||||
add_bb_to_loop (ret, loop);
|
||||
@ -1118,8 +1118,8 @@ duplicate_block (basic_block bb, edge e, basic_block after, copy_bb_data *id)
|
||||
of BB if the loop is not being copied. */
|
||||
if (current_loops != NULL)
|
||||
{
|
||||
struct loop *cloop = bb->loop_father;
|
||||
struct loop *copy = get_loop_copy (cloop);
|
||||
class loop *cloop = bb->loop_father;
|
||||
class loop *copy = get_loop_copy (cloop);
|
||||
/* If we copied the loop header block but not the loop
|
||||
we have created a loop with multiple entries. Ditch the loop,
|
||||
add the new block to the outer loop and arrange for a fixup. */
|
||||
@ -1228,7 +1228,7 @@ lv_flush_pending_stmts (edge e)
|
||||
a need to call the tree_duplicate_loop_to_header_edge rather
|
||||
than duplicate_loop_to_header_edge when we are in tree mode. */
|
||||
bool
|
||||
cfg_hook_duplicate_loop_to_header_edge (struct loop *loop, edge e,
|
||||
cfg_hook_duplicate_loop_to_header_edge (class loop *loop, edge e,
|
||||
unsigned int ndupl,
|
||||
sbitmap wont_exit, edge orig,
|
||||
vec<edge> *to_remove,
|
||||
@ -1336,7 +1336,7 @@ end:
|
||||
void
|
||||
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
|
||||
edge *edges, unsigned num_edges, edge *new_edges,
|
||||
struct loop *base, basic_block after, bool update_dominance)
|
||||
class loop *base, basic_block after, bool update_dominance)
|
||||
{
|
||||
unsigned i, j;
|
||||
basic_block bb, new_bb, dom_bb;
|
||||
|
@ -166,7 +166,7 @@ struct cfg_hooks
|
||||
|
||||
/* A hook for duplicating loop in CFG, currently this is used
|
||||
in loop versioning. */
|
||||
bool (*cfg_hook_duplicate_loop_to_header_edge) (struct loop *, edge,
|
||||
bool (*cfg_hook_duplicate_loop_to_header_edge) (class loop *, edge,
|
||||
unsigned, sbitmap,
|
||||
edge, vec<edge> *,
|
||||
int);
|
||||
@ -250,7 +250,7 @@ extern bool block_ends_with_condjump_p (const_basic_block bb);
|
||||
extern int flow_call_edges_add (sbitmap);
|
||||
extern void execute_on_growing_pred (edge);
|
||||
extern void execute_on_shrinking_pred (edge);
|
||||
extern bool cfg_hook_duplicate_loop_to_header_edge (struct loop *loop, edge,
|
||||
extern bool cfg_hook_duplicate_loop_to_header_edge (class loop *loop, edge,
|
||||
unsigned int ndupl,
|
||||
sbitmap wont_exit,
|
||||
edge orig,
|
||||
@ -266,7 +266,7 @@ extern void lv_add_condition_to_bb (basic_block, basic_block, basic_block,
|
||||
|
||||
extern bool can_copy_bbs_p (basic_block *, unsigned);
|
||||
extern void copy_bbs (basic_block *, unsigned, basic_block *,
|
||||
edge *, unsigned, edge *, struct loop *,
|
||||
edge *, unsigned, edge *, class loop *,
|
||||
basic_block, bool);
|
||||
|
||||
void profile_record_check_consistency (profile_record *);
|
||||
|
134
gcc/cfgloop.c
134
gcc/cfgloop.c
@ -59,7 +59,7 @@ flow_loops_cfg_dump (FILE *file)
|
||||
/* Return nonzero if the nodes of LOOP are a subset of OUTER. */
|
||||
|
||||
bool
|
||||
flow_loop_nested_p (const struct loop *outer, const struct loop *loop)
|
||||
flow_loop_nested_p (const class loop *outer, const class loop *loop)
|
||||
{
|
||||
unsigned odepth = loop_depth (outer);
|
||||
|
||||
@ -70,8 +70,8 @@ flow_loop_nested_p (const struct loop *outer, const struct loop *loop)
|
||||
/* Returns the loop such that LOOP is nested DEPTH (indexed from zero)
|
||||
loops within LOOP. */
|
||||
|
||||
struct loop *
|
||||
superloop_at_depth (struct loop *loop, unsigned depth)
|
||||
class loop *
|
||||
superloop_at_depth (class loop *loop, unsigned depth)
|
||||
{
|
||||
unsigned ldepth = loop_depth (loop);
|
||||
|
||||
@ -86,7 +86,7 @@ superloop_at_depth (struct loop *loop, unsigned depth)
|
||||
/* Returns the list of the latch edges of LOOP. */
|
||||
|
||||
static vec<edge>
|
||||
get_loop_latch_edges (const struct loop *loop)
|
||||
get_loop_latch_edges (const class loop *loop)
|
||||
{
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
@ -105,8 +105,8 @@ get_loop_latch_edges (const struct loop *loop)
|
||||
using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
|
||||
|
||||
void
|
||||
flow_loop_dump (const struct loop *loop, FILE *file,
|
||||
void (*loop_dump_aux) (const struct loop *, FILE *, int),
|
||||
flow_loop_dump (const class loop *loop, FILE *file,
|
||||
void (*loop_dump_aux) (const class loop *, FILE *, int),
|
||||
int verbose)
|
||||
{
|
||||
basic_block *bbs;
|
||||
@ -160,9 +160,9 @@ flow_loop_dump (const struct loop *loop, FILE *file,
|
||||
using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
|
||||
|
||||
void
|
||||
flow_loops_dump (FILE *file, void (*loop_dump_aux) (const struct loop *, FILE *, int), int verbose)
|
||||
flow_loops_dump (FILE *file, void (*loop_dump_aux) (const class loop *, FILE *, int), int verbose)
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
|
||||
if (!current_loops || ! file)
|
||||
return;
|
||||
@ -181,7 +181,7 @@ flow_loops_dump (FILE *file, void (*loop_dump_aux) (const struct loop *, FILE *,
|
||||
/* Free data allocated for LOOP. */
|
||||
|
||||
void
|
||||
flow_loop_free (struct loop *loop)
|
||||
flow_loop_free (class loop *loop)
|
||||
{
|
||||
struct loop_exit *exit, *next;
|
||||
|
||||
@ -229,7 +229,7 @@ flow_loops_free (struct loops *loops)
|
||||
Return the number of nodes within the loop. */
|
||||
|
||||
int
|
||||
flow_loop_nodes_find (basic_block header, struct loop *loop)
|
||||
flow_loop_nodes_find (basic_block header, class loop *loop)
|
||||
{
|
||||
vec<basic_block> stack = vNULL;
|
||||
int num_nodes = 1;
|
||||
@ -278,7 +278,7 @@ flow_loop_nodes_find (basic_block header, struct loop *loop)
|
||||
superloop is FATHER. */
|
||||
|
||||
static void
|
||||
establish_preds (struct loop *loop, struct loop *father)
|
||||
establish_preds (class loop *loop, class loop *father)
|
||||
{
|
||||
loop_p ploop;
|
||||
unsigned depth = loop_depth (father) + 1;
|
||||
@ -302,8 +302,8 @@ establish_preds (struct loop *loop, struct loop *father)
|
||||
of FATHERs siblings. */
|
||||
|
||||
void
|
||||
flow_loop_tree_node_add (struct loop *father, struct loop *loop,
|
||||
struct loop *after)
|
||||
flow_loop_tree_node_add (class loop *father, class loop *loop,
|
||||
class loop *after)
|
||||
{
|
||||
if (after)
|
||||
{
|
||||
@ -322,9 +322,9 @@ flow_loop_tree_node_add (struct loop *father, struct loop *loop,
|
||||
/* Remove LOOP from the loop hierarchy tree. */
|
||||
|
||||
void
|
||||
flow_loop_tree_node_remove (struct loop *loop)
|
||||
flow_loop_tree_node_remove (class loop *loop)
|
||||
{
|
||||
struct loop *prev, *father;
|
||||
class loop *prev, *father;
|
||||
|
||||
father = loop_outer (loop);
|
||||
|
||||
@ -343,10 +343,10 @@ flow_loop_tree_node_remove (struct loop *loop)
|
||||
|
||||
/* Allocates and returns new loop structure. */
|
||||
|
||||
struct loop *
|
||||
class loop *
|
||||
alloc_loop (void)
|
||||
{
|
||||
struct loop *loop = ggc_cleared_alloc<struct loop> ();
|
||||
class loop *loop = ggc_cleared_alloc<class loop> ();
|
||||
|
||||
loop->exits = ggc_cleared_alloc<loop_exit> ();
|
||||
loop->exits->next = loop->exits->prev = loop->exits;
|
||||
@ -365,7 +365,7 @@ void
|
||||
init_loops_structure (struct function *fn,
|
||||
struct loops *loops, unsigned num_loops)
|
||||
{
|
||||
struct loop *root;
|
||||
class loop *root;
|
||||
|
||||
memset (loops, 0, sizeof *loops);
|
||||
vec_alloc (loops->larray, num_loops);
|
||||
@ -460,7 +460,7 @@ flow_loops_find (struct loops *loops)
|
||||
basic_block header = BASIC_BLOCK_FOR_FN (cfun, rc_order[b]);
|
||||
if (bb_loop_header_p (header))
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
|
||||
/* The current active loop tree has valid loop-fathers for
|
||||
header blocks. */
|
||||
@ -503,7 +503,7 @@ flow_loops_find (struct loops *loops)
|
||||
and assign basic-block ownership. */
|
||||
for (i = 0; i < larray.length (); ++i)
|
||||
{
|
||||
struct loop *loop = larray[i];
|
||||
class loop *loop = larray[i];
|
||||
basic_block header = loop->header;
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
@ -539,8 +539,8 @@ static int *sort_sibling_loops_cmp_rpo;
|
||||
static int
|
||||
sort_sibling_loops_cmp (const void *la_, const void *lb_)
|
||||
{
|
||||
const struct loop *la = *(const struct loop * const *)la_;
|
||||
const struct loop *lb = *(const struct loop * const *)lb_;
|
||||
const class loop *la = *(const class loop * const *)la_;
|
||||
const class loop *lb = *(const class loop * const *)lb_;
|
||||
return (sort_sibling_loops_cmp_rpo[la->header->index]
|
||||
- sort_sibling_loops_cmp_rpo[lb->header->index]);
|
||||
}
|
||||
@ -643,7 +643,7 @@ find_subloop_latch_edge_by_profile (vec<edge> latches)
|
||||
another edge. */
|
||||
|
||||
static edge
|
||||
find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> latches)
|
||||
find_subloop_latch_edge_by_ivs (class loop *loop ATTRIBUTE_UNUSED, vec<edge> latches)
|
||||
{
|
||||
edge e, latch = latches[0];
|
||||
unsigned i;
|
||||
@ -695,7 +695,7 @@ find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> la
|
||||
returns NULL. */
|
||||
|
||||
static edge
|
||||
find_subloop_latch_edge (struct loop *loop)
|
||||
find_subloop_latch_edge (class loop *loop)
|
||||
{
|
||||
vec<edge> latches = get_loop_latch_edges (loop);
|
||||
edge latch = NULL;
|
||||
@ -729,11 +729,11 @@ mfb_redirect_edges_in_set (edge e)
|
||||
/* Creates a subloop of LOOP with latch edge LATCH. */
|
||||
|
||||
static void
|
||||
form_subloop (struct loop *loop, edge latch)
|
||||
form_subloop (class loop *loop, edge latch)
|
||||
{
|
||||
edge_iterator ei;
|
||||
edge e, new_entry;
|
||||
struct loop *new_loop;
|
||||
class loop *new_loop;
|
||||
|
||||
mfb_reis_set = new hash_set<edge>;
|
||||
FOR_EACH_EDGE (e, ei, loop->header->preds)
|
||||
@ -759,7 +759,7 @@ form_subloop (struct loop *loop, edge latch)
|
||||
a new latch of LOOP. */
|
||||
|
||||
static void
|
||||
merge_latch_edges (struct loop *loop)
|
||||
merge_latch_edges (class loop *loop)
|
||||
{
|
||||
vec<edge> latches = get_loop_latch_edges (loop);
|
||||
edge latch, e;
|
||||
@ -792,7 +792,7 @@ merge_latch_edges (struct loop *loop)
|
||||
loops with single latch edge. */
|
||||
|
||||
static void
|
||||
disambiguate_multiple_latches (struct loop *loop)
|
||||
disambiguate_multiple_latches (class loop *loop)
|
||||
{
|
||||
edge e;
|
||||
|
||||
@ -836,7 +836,7 @@ disambiguate_multiple_latches (struct loop *loop)
|
||||
void
|
||||
disambiguate_loops_with_multiple_latches (void)
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
|
||||
FOR_EACH_LOOP (loop, 0)
|
||||
{
|
||||
@ -847,9 +847,9 @@ disambiguate_loops_with_multiple_latches (void)
|
||||
|
||||
/* Return nonzero if basic block BB belongs to LOOP. */
|
||||
bool
|
||||
flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
|
||||
flow_bb_inside_loop_p (const class loop *loop, const_basic_block bb)
|
||||
{
|
||||
struct loop *source_loop;
|
||||
class loop *source_loop;
|
||||
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
@ -863,7 +863,7 @@ flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
|
||||
static bool
|
||||
glb_enum_p (const_basic_block bb, const void *glb_loop)
|
||||
{
|
||||
const struct loop *const loop = (const struct loop *) glb_loop;
|
||||
const class loop *const loop = (const class loop *) glb_loop;
|
||||
return (bb != loop->header
|
||||
&& dominated_by_p (CDI_DOMINATORS, bb, loop->header));
|
||||
}
|
||||
@ -876,7 +876,7 @@ glb_enum_p (const_basic_block bb, const void *glb_loop)
|
||||
returned. */
|
||||
|
||||
unsigned
|
||||
get_loop_body_with_size (const struct loop *loop, basic_block *body,
|
||||
get_loop_body_with_size (const class loop *loop, basic_block *body,
|
||||
unsigned max_size)
|
||||
{
|
||||
return dfs_enumerate_from (loop->header, 1, glb_enum_p,
|
||||
@ -888,7 +888,7 @@ get_loop_body_with_size (const struct loop *loop, basic_block *body,
|
||||
header != latch, latch is the 1-st block. */
|
||||
|
||||
basic_block *
|
||||
get_loop_body (const struct loop *loop)
|
||||
get_loop_body (const class loop *loop)
|
||||
{
|
||||
basic_block *body, bb;
|
||||
unsigned tv = 0;
|
||||
@ -918,7 +918,7 @@ get_loop_body (const struct loop *loop)
|
||||
array TOVISIT from index *TV. */
|
||||
|
||||
static void
|
||||
fill_sons_in_loop (const struct loop *loop, basic_block bb,
|
||||
fill_sons_in_loop (const class loop *loop, basic_block bb,
|
||||
basic_block *tovisit, int *tv)
|
||||
{
|
||||
basic_block son, postpone = NULL;
|
||||
@ -948,7 +948,7 @@ fill_sons_in_loop (const struct loop *loop, basic_block bb,
|
||||
the latch, then only blocks dominated by s are be after it. */
|
||||
|
||||
basic_block *
|
||||
get_loop_body_in_dom_order (const struct loop *loop)
|
||||
get_loop_body_in_dom_order (const class loop *loop)
|
||||
{
|
||||
basic_block *tovisit;
|
||||
int tv;
|
||||
@ -970,7 +970,7 @@ get_loop_body_in_dom_order (const struct loop *loop)
|
||||
/* Gets body of a LOOP sorted via provided BB_COMPARATOR. */
|
||||
|
||||
basic_block *
|
||||
get_loop_body_in_custom_order (const struct loop *loop,
|
||||
get_loop_body_in_custom_order (const class loop *loop,
|
||||
int (*bb_comparator) (const void *, const void *))
|
||||
{
|
||||
basic_block *bbs = get_loop_body (loop);
|
||||
@ -983,7 +983,7 @@ get_loop_body_in_custom_order (const struct loop *loop,
|
||||
/* Get body of a LOOP in breadth first sort order. */
|
||||
|
||||
basic_block *
|
||||
get_loop_body_in_bfs_order (const struct loop *loop)
|
||||
get_loop_body_in_bfs_order (const class loop *loop)
|
||||
{
|
||||
basic_block *blocks;
|
||||
basic_block bb;
|
||||
@ -1069,7 +1069,7 @@ void
|
||||
rescan_loop_exit (edge e, bool new_edge, bool removed)
|
||||
{
|
||||
struct loop_exit *exits = NULL, *exit;
|
||||
struct loop *aloop, *cloop;
|
||||
class loop *aloop, *cloop;
|
||||
|
||||
if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
|
||||
return;
|
||||
@ -1190,7 +1190,7 @@ release_recorded_exits (function *fn)
|
||||
/* Returns the list of the exit edges of a LOOP. */
|
||||
|
||||
vec<edge>
|
||||
get_loop_exit_edges (const struct loop *loop)
|
||||
get_loop_exit_edges (const class loop *loop)
|
||||
{
|
||||
vec<edge> edges = vNULL;
|
||||
edge e;
|
||||
@ -1226,7 +1226,7 @@ get_loop_exit_edges (const struct loop *loop)
|
||||
/* Counts the number of conditional branches inside LOOP. */
|
||||
|
||||
unsigned
|
||||
num_loop_branches (const struct loop *loop)
|
||||
num_loop_branches (const class loop *loop)
|
||||
{
|
||||
unsigned i, n;
|
||||
basic_block * body;
|
||||
@ -1245,7 +1245,7 @@ num_loop_branches (const struct loop *loop)
|
||||
|
||||
/* Adds basic block BB to LOOP. */
|
||||
void
|
||||
add_bb_to_loop (basic_block bb, struct loop *loop)
|
||||
add_bb_to_loop (basic_block bb, class loop *loop)
|
||||
{
|
||||
unsigned i;
|
||||
loop_p ploop;
|
||||
@ -1273,7 +1273,7 @@ void
|
||||
remove_bb_from_loops (basic_block bb)
|
||||
{
|
||||
unsigned i;
|
||||
struct loop *loop = bb->loop_father;
|
||||
class loop *loop = bb->loop_father;
|
||||
loop_p ploop;
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
@ -1295,8 +1295,8 @@ remove_bb_from_loops (basic_block bb)
|
||||
}
|
||||
|
||||
/* Finds nearest common ancestor in loop tree for given loops. */
|
||||
struct loop *
|
||||
find_common_loop (struct loop *loop_s, struct loop *loop_d)
|
||||
class loop *
|
||||
find_common_loop (class loop *loop_s, class loop *loop_d)
|
||||
{
|
||||
unsigned sdepth, ddepth;
|
||||
|
||||
@ -1322,7 +1322,7 @@ find_common_loop (struct loop *loop_s, struct loop *loop_d)
|
||||
/* Removes LOOP from structures and frees its data. */
|
||||
|
||||
void
|
||||
delete_loop (struct loop *loop)
|
||||
delete_loop (class loop *loop)
|
||||
{
|
||||
/* Remove the loop from structure. */
|
||||
flow_loop_tree_node_remove (loop);
|
||||
@ -1337,11 +1337,11 @@ delete_loop (struct loop *loop)
|
||||
/* Cancels the LOOP; it must be innermost one. */
|
||||
|
||||
static void
|
||||
cancel_loop (struct loop *loop)
|
||||
cancel_loop (class loop *loop)
|
||||
{
|
||||
basic_block *bbs;
|
||||
unsigned i;
|
||||
struct loop *outer = loop_outer (loop);
|
||||
class loop *outer = loop_outer (loop);
|
||||
|
||||
gcc_assert (!loop->inner);
|
||||
|
||||
@ -1356,7 +1356,7 @@ cancel_loop (struct loop *loop)
|
||||
|
||||
/* Cancels LOOP and all its subloops. */
|
||||
void
|
||||
cancel_loop_tree (struct loop *loop)
|
||||
cancel_loop_tree (class loop *loop)
|
||||
{
|
||||
while (loop->inner)
|
||||
cancel_loop_tree (loop->inner);
|
||||
@ -1385,7 +1385,7 @@ verify_loop_structure (void)
|
||||
{
|
||||
unsigned *sizes, i, j;
|
||||
basic_block bb, *bbs;
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
int err = 0;
|
||||
edge e;
|
||||
unsigned num = number_of_loops (cfun);
|
||||
@ -1727,14 +1727,14 @@ verify_loop_structure (void)
|
||||
|
||||
/* Returns latch edge of LOOP. */
|
||||
edge
|
||||
loop_latch_edge (const struct loop *loop)
|
||||
loop_latch_edge (const class loop *loop)
|
||||
{
|
||||
return find_edge (loop->latch, loop->header);
|
||||
}
|
||||
|
||||
/* Returns preheader edge of LOOP. */
|
||||
edge
|
||||
loop_preheader_edge (const struct loop *loop)
|
||||
loop_preheader_edge (const class loop *loop)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -1758,7 +1758,7 @@ loop_preheader_edge (const struct loop *loop)
|
||||
/* Returns true if E is an exit of LOOP. */
|
||||
|
||||
bool
|
||||
loop_exit_edge_p (const struct loop *loop, const_edge e)
|
||||
loop_exit_edge_p (const class loop *loop, const_edge e)
|
||||
{
|
||||
return (flow_bb_inside_loop_p (loop, e->src)
|
||||
&& !flow_bb_inside_loop_p (loop, e->dest));
|
||||
@ -1769,7 +1769,7 @@ loop_exit_edge_p (const struct loop *loop, const_edge e)
|
||||
is returned always. */
|
||||
|
||||
edge
|
||||
single_exit (const struct loop *loop)
|
||||
single_exit (const class loop *loop)
|
||||
{
|
||||
struct loop_exit *exit = loop->exits->next;
|
||||
|
||||
@ -1785,7 +1785,7 @@ single_exit (const struct loop *loop)
|
||||
/* Returns true when BB has an incoming edge exiting LOOP. */
|
||||
|
||||
bool
|
||||
loop_exits_to_bb_p (struct loop *loop, basic_block bb)
|
||||
loop_exits_to_bb_p (class loop *loop, basic_block bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -1800,7 +1800,7 @@ loop_exits_to_bb_p (struct loop *loop, basic_block bb)
|
||||
/* Returns true when BB has an outgoing edge exiting LOOP. */
|
||||
|
||||
bool
|
||||
loop_exits_from_bb_p (struct loop *loop, basic_block bb)
|
||||
loop_exits_from_bb_p (class loop *loop, basic_block bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -1815,10 +1815,10 @@ loop_exits_from_bb_p (struct loop *loop, basic_block bb)
|
||||
/* Return location corresponding to the loop control condition if possible. */
|
||||
|
||||
dump_user_location_t
|
||||
get_loop_location (struct loop *loop)
|
||||
get_loop_location (class loop *loop)
|
||||
{
|
||||
rtx_insn *insn = NULL;
|
||||
struct niter_desc *desc = NULL;
|
||||
class niter_desc *desc = NULL;
|
||||
edge exit;
|
||||
|
||||
/* For a for or while loop, we would like to return the location
|
||||
@ -1869,7 +1869,7 @@ get_loop_location (struct loop *loop)
|
||||
I_BOUND times. */
|
||||
|
||||
void
|
||||
record_niter_bound (struct loop *loop, const widest_int &i_bound,
|
||||
record_niter_bound (class loop *loop, const widest_int &i_bound,
|
||||
bool realistic, bool upper)
|
||||
{
|
||||
/* Update the bounds only when there is no previous estimation, or when the
|
||||
@ -1920,7 +1920,7 @@ record_niter_bound (struct loop *loop, const widest_int &i_bound,
|
||||
on the number of iterations of LOOP could not be derived, returns -1. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
get_estimated_loop_iterations_int (struct loop *loop)
|
||||
get_estimated_loop_iterations_int (class loop *loop)
|
||||
{
|
||||
widest_int nit;
|
||||
HOST_WIDE_INT hwi_nit;
|
||||
@ -1940,7 +1940,7 @@ get_estimated_loop_iterations_int (struct loop *loop)
|
||||
the number of execution of the latch by one. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
max_stmt_executions_int (struct loop *loop)
|
||||
max_stmt_executions_int (class loop *loop)
|
||||
{
|
||||
HOST_WIDE_INT nit = get_max_loop_iterations_int (loop);
|
||||
HOST_WIDE_INT snit;
|
||||
@ -1959,7 +1959,7 @@ max_stmt_executions_int (struct loop *loop)
|
||||
the number of execution of the latch by one. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
likely_max_stmt_executions_int (struct loop *loop)
|
||||
likely_max_stmt_executions_int (class loop *loop)
|
||||
{
|
||||
HOST_WIDE_INT nit = get_likely_max_loop_iterations_int (loop);
|
||||
HOST_WIDE_INT snit;
|
||||
@ -1978,7 +1978,7 @@ likely_max_stmt_executions_int (struct loop *loop)
|
||||
returns true. */
|
||||
|
||||
bool
|
||||
get_estimated_loop_iterations (struct loop *loop, widest_int *nit)
|
||||
get_estimated_loop_iterations (class loop *loop, widest_int *nit)
|
||||
{
|
||||
/* Even if the bound is not recorded, possibly we can derrive one from
|
||||
profile. */
|
||||
@ -2002,7 +2002,7 @@ get_estimated_loop_iterations (struct loop *loop, widest_int *nit)
|
||||
false, otherwise returns true. */
|
||||
|
||||
bool
|
||||
get_max_loop_iterations (const struct loop *loop, widest_int *nit)
|
||||
get_max_loop_iterations (const class loop *loop, widest_int *nit)
|
||||
{
|
||||
if (!loop->any_upper_bound)
|
||||
return false;
|
||||
@ -2016,7 +2016,7 @@ get_max_loop_iterations (const struct loop *loop, widest_int *nit)
|
||||
on the number of iterations of LOOP could not be derived, returns -1. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
get_max_loop_iterations_int (const struct loop *loop)
|
||||
get_max_loop_iterations_int (const class loop *loop)
|
||||
{
|
||||
widest_int nit;
|
||||
HOST_WIDE_INT hwi_nit;
|
||||
@ -2036,7 +2036,7 @@ get_max_loop_iterations_int (const struct loop *loop)
|
||||
false, otherwise returns true. */
|
||||
|
||||
bool
|
||||
get_likely_max_loop_iterations (struct loop *loop, widest_int *nit)
|
||||
get_likely_max_loop_iterations (class loop *loop, widest_int *nit)
|
||||
{
|
||||
if (!loop->any_likely_upper_bound)
|
||||
return false;
|
||||
@ -2050,7 +2050,7 @@ get_likely_max_loop_iterations (struct loop *loop, widest_int *nit)
|
||||
on the number of iterations of LOOP could not be derived, returns -1. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
get_likely_max_loop_iterations_int (struct loop *loop)
|
||||
get_likely_max_loop_iterations_int (class loop *loop)
|
||||
{
|
||||
widest_int nit;
|
||||
HOST_WIDE_INT hwi_nit;
|
||||
|
146
gcc/cfgloop.h
146
gcc/cfgloop.h
@ -66,7 +66,7 @@ public:
|
||||
bool is_exit;
|
||||
|
||||
/* The next bound in the list. */
|
||||
struct nb_iter_bound *next;
|
||||
class nb_iter_bound *next;
|
||||
};
|
||||
|
||||
/* Description of the loop exit. */
|
||||
@ -92,7 +92,7 @@ struct loop_exit_hasher : ggc_ptr_hash<loop_exit>
|
||||
static void remove (loop_exit *);
|
||||
};
|
||||
|
||||
typedef struct loop *loop_p;
|
||||
typedef class loop *loop_p;
|
||||
|
||||
/* An integer estimation of the number of iterations. Estimate_state
|
||||
describes what is the state of the estimation. */
|
||||
@ -142,10 +142,10 @@ public:
|
||||
vec<loop_p, va_gc> *superloops;
|
||||
|
||||
/* The first inner (child) loop or NULL if innermost loop. */
|
||||
struct loop *inner;
|
||||
class loop *inner;
|
||||
|
||||
/* Link to the next (sibling) loop. */
|
||||
struct loop *next;
|
||||
class loop *next;
|
||||
|
||||
/* Auxiliary info specific to a pass. */
|
||||
PTR GTY ((skip (""))) aux;
|
||||
@ -252,7 +252,7 @@ public:
|
||||
int orig_loop_num;
|
||||
|
||||
/* Upper bound on number of iterations of a loop. */
|
||||
struct nb_iter_bound *bounds;
|
||||
class nb_iter_bound *bounds;
|
||||
|
||||
/* Non-overflow control ivs of a loop. */
|
||||
struct control_iv *control_ivs;
|
||||
@ -261,7 +261,7 @@ public:
|
||||
struct loop_exit *exits;
|
||||
|
||||
/* Number of iteration analysis data for RTL. */
|
||||
struct niter_desc *simple_loop_desc;
|
||||
class niter_desc *simple_loop_desc;
|
||||
|
||||
/* For sanity checking during loop fixup we record here the former
|
||||
loop header for loops marked for removal. Note that this prevents
|
||||
@ -277,21 +277,21 @@ public:
|
||||
|
||||
/* Set C to the LOOP constraint. */
|
||||
static inline void
|
||||
loop_constraint_set (struct loop *loop, unsigned c)
|
||||
loop_constraint_set (class loop *loop, unsigned c)
|
||||
{
|
||||
loop->constraints |= c;
|
||||
}
|
||||
|
||||
/* Clear C from the LOOP constraint. */
|
||||
static inline void
|
||||
loop_constraint_clear (struct loop *loop, unsigned c)
|
||||
loop_constraint_clear (class loop *loop, unsigned c)
|
||||
{
|
||||
loop->constraints &= ~c;
|
||||
}
|
||||
|
||||
/* Check if C is set in the LOOP constraint. */
|
||||
static inline bool
|
||||
loop_constraint_set_p (struct loop *loop, unsigned c)
|
||||
loop_constraint_set_p (class loop *loop, unsigned c)
|
||||
{
|
||||
return (loop->constraints & c) == c;
|
||||
}
|
||||
@ -327,7 +327,7 @@ struct GTY (()) loops {
|
||||
hash_table<loop_exit_hasher> *GTY(()) exits;
|
||||
|
||||
/* Pointer to root of loop hierarchy tree. */
|
||||
struct loop *tree_root;
|
||||
class loop *tree_root;
|
||||
};
|
||||
|
||||
/* Loop recognition. */
|
||||
@ -337,12 +337,12 @@ extern struct loops *flow_loops_find (struct loops *);
|
||||
extern void disambiguate_loops_with_multiple_latches (void);
|
||||
extern void flow_loops_free (struct loops *);
|
||||
extern void flow_loops_dump (FILE *,
|
||||
void (*)(const struct loop *, FILE *, int), int);
|
||||
extern void flow_loop_dump (const struct loop *, FILE *,
|
||||
void (*)(const struct loop *, FILE *, int), int);
|
||||
struct loop *alloc_loop (void);
|
||||
extern void flow_loop_free (struct loop *);
|
||||
int flow_loop_nodes_find (basic_block, struct loop *);
|
||||
void (*)(const class loop *, FILE *, int), int);
|
||||
extern void flow_loop_dump (const class loop *, FILE *,
|
||||
void (*)(const class loop *, FILE *, int), int);
|
||||
class loop *alloc_loop (void);
|
||||
extern void flow_loop_free (class loop *);
|
||||
int flow_loop_nodes_find (basic_block, class loop *);
|
||||
unsigned fix_loop_structure (bitmap changed_bbs);
|
||||
bool mark_irreducible_loops (void);
|
||||
void release_recorded_exits (function *);
|
||||
@ -351,54 +351,54 @@ void rescan_loop_exit (edge, bool, bool);
|
||||
void sort_sibling_loops (function *);
|
||||
|
||||
/* Loop data structure manipulation/querying. */
|
||||
extern void flow_loop_tree_node_add (struct loop *, struct loop *,
|
||||
struct loop * = NULL);
|
||||
extern void flow_loop_tree_node_remove (struct loop *);
|
||||
extern bool flow_loop_nested_p (const struct loop *, const struct loop *);
|
||||
extern bool flow_bb_inside_loop_p (const struct loop *, const_basic_block);
|
||||
extern struct loop * find_common_loop (struct loop *, struct loop *);
|
||||
struct loop *superloop_at_depth (struct loop *, unsigned);
|
||||
extern void flow_loop_tree_node_add (class loop *, class loop *,
|
||||
class loop * = NULL);
|
||||
extern void flow_loop_tree_node_remove (class loop *);
|
||||
extern bool flow_loop_nested_p (const class loop *, const class loop *);
|
||||
extern bool flow_bb_inside_loop_p (const class loop *, const_basic_block);
|
||||
extern class loop * find_common_loop (class loop *, class loop *);
|
||||
class loop *superloop_at_depth (class loop *, unsigned);
|
||||
struct eni_weights;
|
||||
extern int num_loop_insns (const struct loop *);
|
||||
extern int average_num_loop_insns (const struct loop *);
|
||||
extern unsigned get_loop_level (const struct loop *);
|
||||
extern bool loop_exit_edge_p (const struct loop *, const_edge);
|
||||
extern bool loop_exits_to_bb_p (struct loop *, basic_block);
|
||||
extern bool loop_exits_from_bb_p (struct loop *, basic_block);
|
||||
extern int num_loop_insns (const class loop *);
|
||||
extern int average_num_loop_insns (const class loop *);
|
||||
extern unsigned get_loop_level (const class loop *);
|
||||
extern bool loop_exit_edge_p (const class loop *, const_edge);
|
||||
extern bool loop_exits_to_bb_p (class loop *, basic_block);
|
||||
extern bool loop_exits_from_bb_p (class loop *, basic_block);
|
||||
extern void mark_loop_exit_edges (void);
|
||||
extern dump_user_location_t get_loop_location (struct loop *loop);
|
||||
extern dump_user_location_t get_loop_location (class loop *loop);
|
||||
|
||||
/* Loops & cfg manipulation. */
|
||||
extern basic_block *get_loop_body (const struct loop *);
|
||||
extern unsigned get_loop_body_with_size (const struct loop *, basic_block *,
|
||||
extern basic_block *get_loop_body (const class loop *);
|
||||
extern unsigned get_loop_body_with_size (const class loop *, basic_block *,
|
||||
unsigned);
|
||||
extern basic_block *get_loop_body_in_dom_order (const struct loop *);
|
||||
extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
|
||||
extern basic_block *get_loop_body_in_custom_order (const struct loop *,
|
||||
extern basic_block *get_loop_body_in_dom_order (const class loop *);
|
||||
extern basic_block *get_loop_body_in_bfs_order (const class loop *);
|
||||
extern basic_block *get_loop_body_in_custom_order (const class loop *,
|
||||
int (*) (const void *, const void *));
|
||||
|
||||
extern vec<edge> get_loop_exit_edges (const struct loop *);
|
||||
extern edge single_exit (const struct loop *);
|
||||
extern edge single_likely_exit (struct loop *loop);
|
||||
extern unsigned num_loop_branches (const struct loop *);
|
||||
extern vec<edge> get_loop_exit_edges (const class loop *);
|
||||
extern edge single_exit (const class loop *);
|
||||
extern edge single_likely_exit (class loop *loop);
|
||||
extern unsigned num_loop_branches (const class loop *);
|
||||
|
||||
extern edge loop_preheader_edge (const struct loop *);
|
||||
extern edge loop_latch_edge (const struct loop *);
|
||||
extern edge loop_preheader_edge (const class loop *);
|
||||
extern edge loop_latch_edge (const class loop *);
|
||||
|
||||
extern void add_bb_to_loop (basic_block, struct loop *);
|
||||
extern void add_bb_to_loop (basic_block, class loop *);
|
||||
extern void remove_bb_from_loops (basic_block);
|
||||
|
||||
extern void cancel_loop_tree (struct loop *);
|
||||
extern void delete_loop (struct loop *);
|
||||
extern void cancel_loop_tree (class loop *);
|
||||
extern void delete_loop (class loop *);
|
||||
|
||||
|
||||
extern void verify_loop_structure (void);
|
||||
|
||||
/* Loop analysis. */
|
||||
extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
|
||||
gcov_type expected_loop_iterations_unbounded (const struct loop *,
|
||||
extern bool just_once_each_iteration_p (const class loop *, const_basic_block);
|
||||
gcov_type expected_loop_iterations_unbounded (const class loop *,
|
||||
bool *read_profile_p = NULL, bool by_profile_only = false);
|
||||
extern unsigned expected_loop_iterations (struct loop *);
|
||||
extern unsigned expected_loop_iterations (class loop *);
|
||||
extern rtx doloop_condition_get (rtx_insn *);
|
||||
|
||||
void mark_loop_for_removal (loop_p);
|
||||
@ -490,21 +490,21 @@ public:
|
||||
rtx niter_expr;
|
||||
};
|
||||
|
||||
extern void iv_analysis_loop_init (struct loop *);
|
||||
extern bool iv_analyze (rtx_insn *, scalar_int_mode, rtx, struct rtx_iv *);
|
||||
extern bool iv_analyze_result (rtx_insn *, rtx, struct rtx_iv *);
|
||||
extern void iv_analysis_loop_init (class loop *);
|
||||
extern bool iv_analyze (rtx_insn *, scalar_int_mode, rtx, class rtx_iv *);
|
||||
extern bool iv_analyze_result (rtx_insn *, rtx, class rtx_iv *);
|
||||
extern bool iv_analyze_expr (rtx_insn *, scalar_int_mode, rtx,
|
||||
struct rtx_iv *);
|
||||
extern rtx get_iv_value (struct rtx_iv *, rtx);
|
||||
class rtx_iv *);
|
||||
extern rtx get_iv_value (class rtx_iv *, rtx);
|
||||
extern bool biv_p (rtx_insn *, scalar_int_mode, rtx);
|
||||
extern void find_simple_exit (struct loop *, struct niter_desc *);
|
||||
extern void find_simple_exit (class loop *, class niter_desc *);
|
||||
extern void iv_analysis_done (void);
|
||||
|
||||
extern struct niter_desc *get_simple_loop_desc (struct loop *loop);
|
||||
extern void free_simple_loop_desc (struct loop *loop);
|
||||
extern class niter_desc *get_simple_loop_desc (class loop *loop);
|
||||
extern void free_simple_loop_desc (class loop *loop);
|
||||
|
||||
static inline struct niter_desc *
|
||||
simple_loop_desc (struct loop *loop)
|
||||
static inline class niter_desc *
|
||||
simple_loop_desc (class loop *loop)
|
||||
{
|
||||
return loop->simple_loop_desc;
|
||||
}
|
||||
@ -513,7 +513,7 @@ simple_loop_desc (struct loop *loop)
|
||||
|
||||
/* Returns the loop with index NUM from FNs loop tree. */
|
||||
|
||||
static inline struct loop *
|
||||
static inline class loop *
|
||||
get_loop (struct function *fn, unsigned num)
|
||||
{
|
||||
return (*loops_for_fn (fn)->larray)[num];
|
||||
@ -522,7 +522,7 @@ get_loop (struct function *fn, unsigned num)
|
||||
/* Returns the number of superloops of LOOP. */
|
||||
|
||||
static inline unsigned
|
||||
loop_depth (const struct loop *loop)
|
||||
loop_depth (const class loop *loop)
|
||||
{
|
||||
return vec_safe_length (loop->superloops);
|
||||
}
|
||||
@ -530,8 +530,8 @@ loop_depth (const struct loop *loop)
|
||||
/* Returns the immediate superloop of LOOP, or NULL if LOOP is the outermost
|
||||
loop. */
|
||||
|
||||
static inline struct loop *
|
||||
loop_outer (const struct loop *loop)
|
||||
static inline class loop *
|
||||
loop_outer (const class loop *loop)
|
||||
{
|
||||
unsigned n = vec_safe_length (loop->superloops);
|
||||
|
||||
@ -544,7 +544,7 @@ loop_outer (const struct loop *loop)
|
||||
/* Returns true if LOOP has at least one exit edge. */
|
||||
|
||||
static inline bool
|
||||
loop_has_exit_edges (const struct loop *loop)
|
||||
loop_has_exit_edges (const class loop *loop)
|
||||
{
|
||||
return loop->exits->next->e != NULL;
|
||||
}
|
||||
@ -692,7 +692,7 @@ loop_iterator::next ()
|
||||
inline
|
||||
loop_iterator::loop_iterator (function *fn, loop_p *loop, unsigned flags)
|
||||
{
|
||||
struct loop *aloop;
|
||||
class loop *aloop;
|
||||
unsigned i;
|
||||
int mn;
|
||||
|
||||
@ -843,11 +843,11 @@ enum
|
||||
|
||||
extern void doloop_optimize_loops (void);
|
||||
extern void move_loop_invariants (void);
|
||||
extern vec<basic_block> get_loop_hot_path (const struct loop *loop);
|
||||
extern vec<basic_block> get_loop_hot_path (const class loop *loop);
|
||||
|
||||
/* Returns the outermost loop of the loop nest that contains LOOP.*/
|
||||
static inline struct loop *
|
||||
loop_outermost (struct loop *loop)
|
||||
static inline class loop *
|
||||
loop_outermost (class loop *loop)
|
||||
{
|
||||
unsigned n = vec_safe_length (loop->superloops);
|
||||
|
||||
@ -857,13 +857,13 @@ loop_outermost (struct loop *loop)
|
||||
return (*loop->superloops)[1];
|
||||
}
|
||||
|
||||
extern void record_niter_bound (struct loop *, const widest_int &, bool, bool);
|
||||
extern HOST_WIDE_INT get_estimated_loop_iterations_int (struct loop *);
|
||||
extern HOST_WIDE_INT get_max_loop_iterations_int (const struct loop *);
|
||||
extern HOST_WIDE_INT get_likely_max_loop_iterations_int (struct loop *);
|
||||
extern bool get_estimated_loop_iterations (struct loop *loop, widest_int *nit);
|
||||
extern bool get_max_loop_iterations (const struct loop *loop, widest_int *nit);
|
||||
extern bool get_likely_max_loop_iterations (struct loop *loop, widest_int *nit);
|
||||
extern void record_niter_bound (class loop *, const widest_int &, bool, bool);
|
||||
extern HOST_WIDE_INT get_estimated_loop_iterations_int (class loop *);
|
||||
extern HOST_WIDE_INT get_max_loop_iterations_int (const class loop *);
|
||||
extern HOST_WIDE_INT get_likely_max_loop_iterations_int (class loop *);
|
||||
extern bool get_estimated_loop_iterations (class loop *loop, widest_int *nit);
|
||||
extern bool get_max_loop_iterations (const class loop *loop, widest_int *nit);
|
||||
extern bool get_likely_max_loop_iterations (class loop *loop, widest_int *nit);
|
||||
extern int bb_loop_depth (const_basic_block);
|
||||
|
||||
/* Converts VAL to widest_int. */
|
||||
|
@ -41,7 +41,7 @@ struct target_cfgloop *this_target_cfgloop = &default_target_cfgloop;
|
||||
/* Checks whether BB is executed exactly once in each LOOP iteration. */
|
||||
|
||||
bool
|
||||
just_once_each_iteration_p (const struct loop *loop, const_basic_block bb)
|
||||
just_once_each_iteration_p (const class loop *loop, const_basic_block bb)
|
||||
{
|
||||
/* It must be executed at least once each iteration. */
|
||||
if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
|
||||
@ -81,7 +81,7 @@ mark_irreducible_loops (void)
|
||||
unsigned depth;
|
||||
struct graph *g;
|
||||
int num = number_of_loops (cfun);
|
||||
struct loop *cloop;
|
||||
class loop *cloop;
|
||||
bool irred_loop_found = false;
|
||||
int i;
|
||||
|
||||
@ -173,7 +173,7 @@ mark_irreducible_loops (void)
|
||||
|
||||
/* Counts number of insns inside LOOP. */
|
||||
int
|
||||
num_loop_insns (const struct loop *loop)
|
||||
num_loop_insns (const class loop *loop)
|
||||
{
|
||||
basic_block *bbs, bb;
|
||||
unsigned i, ninsns = 0;
|
||||
@ -197,7 +197,7 @@ num_loop_insns (const struct loop *loop)
|
||||
|
||||
/* Counts number of insns executed on average per iteration LOOP. */
|
||||
int
|
||||
average_num_loop_insns (const struct loop *loop)
|
||||
average_num_loop_insns (const class loop *loop)
|
||||
{
|
||||
basic_block *bbs, bb;
|
||||
unsigned i, binsns;
|
||||
@ -238,7 +238,7 @@ average_num_loop_insns (const struct loop *loop)
|
||||
return -1 in those scenarios. */
|
||||
|
||||
gcov_type
|
||||
expected_loop_iterations_unbounded (const struct loop *loop,
|
||||
expected_loop_iterations_unbounded (const class loop *loop,
|
||||
bool *read_profile_p,
|
||||
bool by_profile_only)
|
||||
{
|
||||
@ -310,7 +310,7 @@ expected_loop_iterations_unbounded (const struct loop *loop,
|
||||
by REG_BR_PROB_BASE. */
|
||||
|
||||
unsigned
|
||||
expected_loop_iterations (struct loop *loop)
|
||||
expected_loop_iterations (class loop *loop)
|
||||
{
|
||||
gcov_type expected = expected_loop_iterations_unbounded (loop);
|
||||
return (expected > REG_BR_PROB_BASE ? REG_BR_PROB_BASE : expected);
|
||||
@ -319,9 +319,9 @@ expected_loop_iterations (struct loop *loop)
|
||||
/* Returns the maximum level of nesting of subloops of LOOP. */
|
||||
|
||||
unsigned
|
||||
get_loop_level (const struct loop *loop)
|
||||
get_loop_level (const class loop *loop)
|
||||
{
|
||||
const struct loop *ploop;
|
||||
const class loop *ploop;
|
||||
unsigned mx = 0, l;
|
||||
|
||||
for (ploop = loop->inner; ploop; ploop = ploop->next)
|
||||
@ -463,7 +463,7 @@ mark_loop_exit_edges (void)
|
||||
to noreturn call. */
|
||||
|
||||
edge
|
||||
single_likely_exit (struct loop *loop)
|
||||
single_likely_exit (class loop *loop)
|
||||
{
|
||||
edge found = single_exit (loop);
|
||||
vec<edge> exits;
|
||||
@ -500,7 +500,7 @@ single_likely_exit (struct loop *loop)
|
||||
header != latch, latch is the 1-st block. */
|
||||
|
||||
vec<basic_block>
|
||||
get_loop_hot_path (const struct loop *loop)
|
||||
get_loop_hot_path (const class loop *loop)
|
||||
{
|
||||
basic_block bb = loop->header;
|
||||
vec<basic_block> path = vNULL;
|
||||
|
@ -32,13 +32,13 @@ along with GCC; see the file COPYING3. If not see
|
||||
#include "tree-ssa-loop-manip.h"
|
||||
#include "dumpfile.h"
|
||||
|
||||
static void copy_loops_to (struct loop **, int,
|
||||
struct loop *);
|
||||
static void copy_loops_to (class loop **, int,
|
||||
class loop *);
|
||||
static void loop_redirect_edge (edge, basic_block);
|
||||
static void remove_bbs (basic_block *, int);
|
||||
static bool rpe_enum_p (const_basic_block, const void *);
|
||||
static int find_path (edge, basic_block **);
|
||||
static void fix_loop_placements (struct loop *, bool *);
|
||||
static void fix_loop_placements (class loop *, bool *);
|
||||
static bool fix_bb_placement (basic_block);
|
||||
static void fix_bb_placements (basic_block, bool *, bitmap);
|
||||
|
||||
@ -89,7 +89,7 @@ fix_bb_placement (basic_block bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
struct loop *loop = current_loops->tree_root, *act;
|
||||
class loop *loop = current_loops->tree_root, *act;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
@ -122,12 +122,12 @@ fix_bb_placement (basic_block bb)
|
||||
invalidate the information about irreducible regions. */
|
||||
|
||||
static bool
|
||||
fix_loop_placement (struct loop *loop, bool *irred_invalidated)
|
||||
fix_loop_placement (class loop *loop, bool *irred_invalidated)
|
||||
{
|
||||
unsigned i;
|
||||
edge e;
|
||||
vec<edge> exits = get_loop_exit_edges (loop);
|
||||
struct loop *father = current_loops->tree_root, *act;
|
||||
class loop *father = current_loops->tree_root, *act;
|
||||
bool ret = false;
|
||||
|
||||
FOR_EACH_VEC_ELT (exits, i, e)
|
||||
@ -182,7 +182,7 @@ fix_bb_placements (basic_block from,
|
||||
bitmap loop_closed_ssa_invalidated)
|
||||
{
|
||||
basic_block *queue, *qtop, *qbeg, *qend;
|
||||
struct loop *base_loop, *target_loop;
|
||||
class loop *base_loop, *target_loop;
|
||||
edge e;
|
||||
|
||||
/* We pass through blocks back-reachable from FROM, testing whether some
|
||||
@ -255,7 +255,7 @@ fix_bb_placements (basic_block from,
|
||||
FOR_EACH_EDGE (e, ei, from->preds)
|
||||
{
|
||||
basic_block pred = e->src;
|
||||
struct loop *nca;
|
||||
class loop *nca;
|
||||
|
||||
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
|
||||
*irred_invalidated = true;
|
||||
@ -307,7 +307,7 @@ remove_path (edge e, bool *irred_invalidated,
|
||||
int i, nrem, n_bord_bbs;
|
||||
bool local_irred_invalidated = false;
|
||||
edge_iterator ei;
|
||||
struct loop *l, *f;
|
||||
class loop *l, *f;
|
||||
|
||||
if (! irred_invalidated)
|
||||
irred_invalidated = &local_irred_invalidated;
|
||||
@ -427,7 +427,7 @@ remove_path (edge e, bool *irred_invalidated,
|
||||
/* Creates place for a new LOOP in loops structure of FN. */
|
||||
|
||||
void
|
||||
place_new_loop (struct function *fn, struct loop *loop)
|
||||
place_new_loop (struct function *fn, class loop *loop)
|
||||
{
|
||||
loop->num = number_of_loops (fn);
|
||||
vec_safe_push (loops_for_fn (fn)->larray, loop);
|
||||
@ -438,11 +438,11 @@ place_new_loop (struct function *fn, struct loop *loop)
|
||||
outer. */
|
||||
|
||||
void
|
||||
add_loop (struct loop *loop, struct loop *outer)
|
||||
add_loop (class loop *loop, class loop *outer)
|
||||
{
|
||||
basic_block *bbs;
|
||||
int i, n;
|
||||
struct loop *subloop;
|
||||
class loop *subloop;
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
@ -490,7 +490,7 @@ add_loop (struct loop *loop, struct loop *outer)
|
||||
/* Scale profile of loop by P. */
|
||||
|
||||
void
|
||||
scale_loop_frequencies (struct loop *loop, profile_probability p)
|
||||
scale_loop_frequencies (class loop *loop, profile_probability p)
|
||||
{
|
||||
basic_block *bbs;
|
||||
|
||||
@ -508,7 +508,7 @@ scale_loop_frequencies (struct loop *loop, profile_probability p)
|
||||
they need to be scaled synchronously. */
|
||||
|
||||
void
|
||||
scale_loop_profile (struct loop *loop, profile_probability p,
|
||||
scale_loop_profile (class loop *loop, profile_probability p,
|
||||
gcov_type iteration_bound)
|
||||
{
|
||||
edge e, preheader_e;
|
||||
@ -618,7 +618,7 @@ scale_loop_profile (struct loop *loop, profile_probability p,
|
||||
/* Recompute dominance information for basic blocks outside LOOP. */
|
||||
|
||||
static void
|
||||
update_dominators_in_loop (struct loop *loop)
|
||||
update_dominators_in_loop (class loop *loop)
|
||||
{
|
||||
vec<basic_block> dom_bbs = vNULL;
|
||||
basic_block *body;
|
||||
@ -763,17 +763,17 @@ create_empty_if_region_on_edge (edge entry_edge, tree condition)
|
||||
should be used only when the UPPER_BOUND expression is a loop
|
||||
invariant. */
|
||||
|
||||
struct loop *
|
||||
class loop *
|
||||
create_empty_loop_on_edge (edge entry_edge,
|
||||
tree initial_value,
|
||||
tree stride, tree upper_bound,
|
||||
tree iv,
|
||||
tree *iv_before,
|
||||
tree *iv_after,
|
||||
struct loop *outer)
|
||||
class loop *outer)
|
||||
{
|
||||
basic_block loop_header, loop_latch, succ_bb, pred_bb;
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
gimple_stmt_iterator gsi;
|
||||
gimple_seq stmts;
|
||||
gcond *cond_expr;
|
||||
@ -857,7 +857,7 @@ create_empty_loop_on_edge (edge entry_edge,
|
||||
Returns the newly created loop. Frequencies and counts in the new loop
|
||||
are scaled by FALSE_SCALE and in the old one by TRUE_SCALE. */
|
||||
|
||||
struct loop *
|
||||
class loop *
|
||||
loopify (edge latch_edge, edge header_edge,
|
||||
basic_block switch_bb, edge true_edge, edge false_edge,
|
||||
bool redirect_all_edges, profile_probability true_scale,
|
||||
@ -865,8 +865,8 @@ loopify (edge latch_edge, edge header_edge,
|
||||
{
|
||||
basic_block succ_bb = latch_edge->dest;
|
||||
basic_block pred_bb = header_edge->src;
|
||||
struct loop *loop = alloc_loop ();
|
||||
struct loop *outer = loop_outer (succ_bb->loop_father);
|
||||
class loop *loop = alloc_loop ();
|
||||
class loop *outer = loop_outer (succ_bb->loop_father);
|
||||
profile_count cnt;
|
||||
|
||||
loop->header = header_edge->dest;
|
||||
@ -923,11 +923,11 @@ loopify (edge latch_edge, edge header_edge,
|
||||
basic blocks that had non-trivial update on their loop_father.*/
|
||||
|
||||
void
|
||||
unloop (struct loop *loop, bool *irred_invalidated,
|
||||
unloop (class loop *loop, bool *irred_invalidated,
|
||||
bitmap loop_closed_ssa_invalidated)
|
||||
{
|
||||
basic_block *body;
|
||||
struct loop *ploop;
|
||||
class loop *ploop;
|
||||
unsigned i, n;
|
||||
basic_block latch = loop->latch;
|
||||
bool dummy = false;
|
||||
@ -978,9 +978,9 @@ unloop (struct loop *loop, bool *irred_invalidated,
|
||||
invalidate the information about irreducible regions. */
|
||||
|
||||
static void
|
||||
fix_loop_placements (struct loop *loop, bool *irred_invalidated)
|
||||
fix_loop_placements (class loop *loop, bool *irred_invalidated)
|
||||
{
|
||||
struct loop *outer;
|
||||
class loop *outer;
|
||||
|
||||
while (loop_outer (loop))
|
||||
{
|
||||
@ -1003,7 +1003,7 @@ fix_loop_placements (struct loop *loop, bool *irred_invalidated)
|
||||
the loop into its duplicate. */
|
||||
|
||||
void
|
||||
copy_loop_info (struct loop *loop, struct loop *target)
|
||||
copy_loop_info (class loop *loop, class loop *target)
|
||||
{
|
||||
gcc_checking_assert (!target->any_upper_bound && !target->any_estimate);
|
||||
target->any_upper_bound = loop->any_upper_bound;
|
||||
@ -1031,10 +1031,10 @@ copy_loop_info (struct loop *loop, struct loop *target)
|
||||
created loop into loops structure. If AFTER is non-null
|
||||
the new loop is added at AFTER->next, otherwise in front of TARGETs
|
||||
sibling list. */
|
||||
struct loop *
|
||||
duplicate_loop (struct loop *loop, struct loop *target, struct loop *after)
|
||||
class loop *
|
||||
duplicate_loop (class loop *loop, class loop *target, class loop *after)
|
||||
{
|
||||
struct loop *cloop;
|
||||
class loop *cloop;
|
||||
cloop = alloc_loop ();
|
||||
place_new_loop (cfun, cloop);
|
||||
|
||||
@ -1053,9 +1053,9 @@ duplicate_loop (struct loop *loop, struct loop *target, struct loop *after)
|
||||
newly created loops into loop tree at the end of TARGETs sibling
|
||||
list in the original order. */
|
||||
void
|
||||
duplicate_subloops (struct loop *loop, struct loop *target)
|
||||
duplicate_subloops (class loop *loop, class loop *target)
|
||||
{
|
||||
struct loop *aloop, *cloop, *tail;
|
||||
class loop *aloop, *cloop, *tail;
|
||||
|
||||
for (tail = target->inner; tail && tail->next; tail = tail->next)
|
||||
;
|
||||
@ -1072,9 +1072,9 @@ duplicate_subloops (struct loop *loop, struct loop *target)
|
||||
into TARGET loop, placing newly created loops into loop tree adding
|
||||
them to TARGETs sibling list at the end in order. */
|
||||
static void
|
||||
copy_loops_to (struct loop **copied_loops, int n, struct loop *target)
|
||||
copy_loops_to (class loop **copied_loops, int n, class loop *target)
|
||||
{
|
||||
struct loop *aloop, *tail;
|
||||
class loop *aloop, *tail;
|
||||
int i;
|
||||
|
||||
for (tail = target->inner; tail && tail->next; tail = tail->next)
|
||||
@ -1100,7 +1100,7 @@ loop_redirect_edge (edge e, basic_block dest)
|
||||
|
||||
/* Check whether LOOP's body can be duplicated. */
|
||||
bool
|
||||
can_duplicate_loop_p (const struct loop *loop)
|
||||
can_duplicate_loop_p (const class loop *loop)
|
||||
{
|
||||
int ret;
|
||||
basic_block *bbs = get_loop_body (loop);
|
||||
@ -1124,13 +1124,13 @@ can_duplicate_loop_p (const struct loop *loop)
|
||||
impossible. */
|
||||
|
||||
bool
|
||||
duplicate_loop_to_header_edge (struct loop *loop, edge e,
|
||||
duplicate_loop_to_header_edge (class loop *loop, edge e,
|
||||
unsigned int ndupl, sbitmap wont_exit,
|
||||
edge orig, vec<edge> *to_remove,
|
||||
int flags)
|
||||
{
|
||||
struct loop *target, *aloop;
|
||||
struct loop **orig_loops;
|
||||
class loop *target, *aloop;
|
||||
class loop **orig_loops;
|
||||
unsigned n_orig_loops;
|
||||
basic_block header = loop->header, latch = loop->latch;
|
||||
basic_block *new_bbs, *bbs, *first_active;
|
||||
@ -1276,7 +1276,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
|
||||
n_orig_loops = 0;
|
||||
for (aloop = loop->inner; aloop; aloop = aloop->next)
|
||||
n_orig_loops++;
|
||||
orig_loops = XNEWVEC (struct loop *, n_orig_loops);
|
||||
orig_loops = XNEWVEC (class loop *, n_orig_loops);
|
||||
for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
|
||||
orig_loops[i] = aloop;
|
||||
|
||||
@ -1453,7 +1453,7 @@ mfb_keep_just (edge e)
|
||||
/* True when a candidate preheader BLOCK has predecessors from LOOP. */
|
||||
|
||||
static bool
|
||||
has_preds_from_loop (basic_block block, struct loop *loop)
|
||||
has_preds_from_loop (basic_block block, class loop *loop)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
@ -1473,7 +1473,7 @@ has_preds_from_loop (basic_block block, struct loop *loop)
|
||||
The function also updates dominators. */
|
||||
|
||||
basic_block
|
||||
create_preheader (struct loop *loop, int flags)
|
||||
create_preheader (class loop *loop, int flags)
|
||||
{
|
||||
edge e;
|
||||
basic_block dummy;
|
||||
@ -1573,7 +1573,7 @@ create_preheader (struct loop *loop, int flags)
|
||||
void
|
||||
create_preheaders (int flags)
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
|
||||
if (!current_loops)
|
||||
return;
|
||||
@ -1588,7 +1588,7 @@ create_preheaders (int flags)
|
||||
void
|
||||
force_single_succ_latches (void)
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
edge e;
|
||||
|
||||
FOR_EACH_LOOP (loop, 0)
|
||||
@ -1677,8 +1677,8 @@ lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
|
||||
If PLACE_AFTER is true, we place the new loop after LOOP in the
|
||||
instruction stream, otherwise it is placed before LOOP. */
|
||||
|
||||
struct loop *
|
||||
loop_version (struct loop *loop,
|
||||
class loop *
|
||||
loop_version (class loop *loop,
|
||||
void *cond_expr, basic_block *condition_bb,
|
||||
profile_probability then_prob, profile_probability else_prob,
|
||||
profile_probability then_scale, profile_probability else_scale,
|
||||
@ -1687,7 +1687,7 @@ loop_version (struct loop *loop,
|
||||
basic_block first_head, second_head;
|
||||
edge entry, latch_edge, true_edge, false_edge;
|
||||
int irred_flag;
|
||||
struct loop *nloop;
|
||||
class loop *nloop;
|
||||
basic_block cond_bb;
|
||||
|
||||
/* Record entry and latch edges for the loop */
|
||||
|
@ -35,30 +35,30 @@ enum
|
||||
extern edge mfb_kj_edge;
|
||||
|
||||
extern bool remove_path (edge, bool * = NULL, bitmap = NULL);
|
||||
extern void place_new_loop (struct function *, struct loop *);
|
||||
extern void add_loop (struct loop *, struct loop *);
|
||||
extern void scale_loop_frequencies (struct loop *, profile_probability);
|
||||
extern void scale_loop_profile (struct loop *, profile_probability, gcov_type);
|
||||
extern void place_new_loop (struct function *, class loop *);
|
||||
extern void add_loop (class loop *, class loop *);
|
||||
extern void scale_loop_frequencies (class loop *, profile_probability);
|
||||
extern void scale_loop_profile (class loop *, profile_probability, gcov_type);
|
||||
extern edge create_empty_if_region_on_edge (edge, tree);
|
||||
extern struct loop *create_empty_loop_on_edge (edge, tree, tree, tree, tree,
|
||||
tree *, tree *, struct loop *);
|
||||
extern struct loop *loopify (edge, edge,
|
||||
extern class loop *create_empty_loop_on_edge (edge, tree, tree, tree, tree,
|
||||
tree *, tree *, class loop *);
|
||||
extern class loop *loopify (edge, edge,
|
||||
basic_block, edge, edge, bool,
|
||||
profile_probability, profile_probability);
|
||||
extern void unloop (struct loop *, bool *, bitmap);
|
||||
extern void copy_loop_info (struct loop *loop, struct loop *target);
|
||||
extern struct loop * duplicate_loop (struct loop *, struct loop *,
|
||||
struct loop * = NULL);
|
||||
extern void duplicate_subloops (struct loop *, struct loop *);
|
||||
extern bool can_duplicate_loop_p (const struct loop *loop);
|
||||
extern bool duplicate_loop_to_header_edge (struct loop *, edge,
|
||||
extern void unloop (class loop *, bool *, bitmap);
|
||||
extern void copy_loop_info (class loop *loop, class loop *target);
|
||||
extern class loop * duplicate_loop (class loop *, class loop *,
|
||||
class loop * = NULL);
|
||||
extern void duplicate_subloops (class loop *, class loop *);
|
||||
extern bool can_duplicate_loop_p (const class loop *loop);
|
||||
extern bool duplicate_loop_to_header_edge (class loop *, edge,
|
||||
unsigned, sbitmap, edge,
|
||||
vec<edge> *, int);
|
||||
extern bool mfb_keep_just (edge);
|
||||
basic_block create_preheader (struct loop *, int);
|
||||
basic_block create_preheader (class loop *, int);
|
||||
extern void create_preheaders (int);
|
||||
extern void force_single_succ_latches (void);
|
||||
struct loop * loop_version (struct loop *, void *,
|
||||
class loop * loop_version (class loop *, void *,
|
||||
basic_block *,
|
||||
profile_probability, profile_probability,
|
||||
profile_probability, profile_probability, bool);
|
||||
|
15
gcc/cgraph.h
15
gcc/cgraph.h
@ -912,9 +912,8 @@ struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
|
||||
/* The cgraph data structure.
|
||||
Each function decl has assigned cgraph_node listing callees and callers. */
|
||||
|
||||
class GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
|
||||
struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
|
||||
{
|
||||
public:
|
||||
friend class symbol_table;
|
||||
|
||||
/* Remove the node from cgraph and all inline clones inlined into it.
|
||||
@ -1506,7 +1505,7 @@ struct cgraph_node_set_def
|
||||
typedef cgraph_node_set_def *cgraph_node_set;
|
||||
typedef struct varpool_node_set_def *varpool_node_set;
|
||||
|
||||
class varpool_node;
|
||||
struct varpool_node;
|
||||
|
||||
/* A varpool node set is a collection of varpool nodes. A varpool node
|
||||
can appear in multiple sets. */
|
||||
@ -1620,7 +1619,7 @@ public:
|
||||
|
||||
/* LTO streaming. */
|
||||
void stream_out (struct output_block *) const;
|
||||
void stream_in (struct lto_input_block *, struct data_in *data_in);
|
||||
void stream_in (class lto_input_block *, class data_in *data_in);
|
||||
|
||||
private:
|
||||
bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
|
||||
@ -1679,7 +1678,7 @@ class GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
|
||||
for_user)) cgraph_edge
|
||||
{
|
||||
public:
|
||||
friend class cgraph_node;
|
||||
friend struct cgraph_node;
|
||||
friend class symbol_table;
|
||||
|
||||
/* Remove the edge in the cgraph. */
|
||||
@ -2078,9 +2077,9 @@ struct asmname_hasher : ggc_ptr_hash <symtab_node>
|
||||
class GTY((tag ("SYMTAB"))) symbol_table
|
||||
{
|
||||
public:
|
||||
friend class symtab_node;
|
||||
friend class cgraph_node;
|
||||
friend class cgraph_edge;
|
||||
friend struct symtab_node;
|
||||
friend struct cgraph_node;
|
||||
friend struct cgraph_edge;
|
||||
|
||||
symbol_table (): cgraph_max_uid (1), cgraph_max_summary_id (0),
|
||||
edges_max_uid (1), edges_max_summary_id (0)
|
||||
|
@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
struct record_reference_ctx
|
||||
{
|
||||
bool only_vars;
|
||||
class varpool_node *varpool_node;
|
||||
struct varpool_node *varpool_node;
|
||||
};
|
||||
|
||||
/* Walk tree and record all calls and references to functions/variables.
|
||||
|
@ -7829,7 +7829,7 @@ make_extraction (machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
||||
For memory, assume that the desired extraction_mode and pos_mode
|
||||
are the same as for a register operation, since at present we don't
|
||||
have named patterns for aligned memory structures. */
|
||||
struct extraction_insn insn;
|
||||
class extraction_insn insn;
|
||||
unsigned int inner_size;
|
||||
if (GET_MODE_BITSIZE (inner_mode).is_constant (&inner_size)
|
||||
&& get_best_reg_extraction_insn (&insn, pattern, inner_size, mode))
|
||||
|
@ -152,7 +152,7 @@ const xlogue_layout xlogue_layout::s_instances[XLOGUE_SET_COUNT] = {
|
||||
|
||||
/* Return an appropriate const instance of xlogue_layout based upon values
|
||||
in cfun->machine and crtl. */
|
||||
const struct xlogue_layout &
|
||||
const class xlogue_layout &
|
||||
xlogue_layout::get_instance ()
|
||||
{
|
||||
enum xlogue_stub_sets stub_set;
|
||||
|
@ -84,7 +84,7 @@ public:
|
||||
return STUB_INDEX_OFFSET + m_stack_align_off_in;
|
||||
}
|
||||
|
||||
static const struct xlogue_layout &get_instance ();
|
||||
static const class xlogue_layout &get_instance ();
|
||||
static unsigned count_stub_managed_regs ();
|
||||
static bool is_stub_managed_reg (unsigned regno, unsigned count);
|
||||
|
||||
|
@ -7689,7 +7689,7 @@ ix86_emit_outlined_ms2sysv_save (const struct ix86_frame &frame)
|
||||
rtx_insn *insn;
|
||||
rtx sym, addr;
|
||||
rtx rax = gen_rtx_REG (word_mode, AX_REG);
|
||||
const struct xlogue_layout &xlogue = xlogue_layout::get_instance ();
|
||||
const class xlogue_layout &xlogue = xlogue_layout::get_instance ();
|
||||
|
||||
/* AL should only be live with sysv_abi. */
|
||||
gcc_assert (!ix86_eax_live_at_start_p ());
|
||||
@ -8492,7 +8492,7 @@ ix86_emit_outlined_ms2sysv_restore (const struct ix86_frame &frame,
|
||||
rtx sym, tmp;
|
||||
rtx rsi = gen_rtx_REG (word_mode, SI_REG);
|
||||
rtx r10 = NULL_RTX;
|
||||
const struct xlogue_layout &xlogue = xlogue_layout::get_instance ();
|
||||
const class xlogue_layout &xlogue = xlogue_layout::get_instance ();
|
||||
HOST_WIDE_INT stub_ptr_offset = xlogue.get_stub_ptr_offset ();
|
||||
HOST_WIDE_INT rsi_offset = frame.stack_realign_offset + stub_ptr_offset;
|
||||
rtx rsi_frame_load = NULL_RTX;
|
||||
@ -21503,7 +21503,7 @@ ix86_noce_conversion_profitable_p (rtx_insn *seq, struct noce_if_info *if_info)
|
||||
/* Implement targetm.vectorize.init_cost. */
|
||||
|
||||
static void *
|
||||
ix86_init_cost (struct loop *)
|
||||
ix86_init_cost (class loop *)
|
||||
{
|
||||
unsigned *cost = XNEWVEC (unsigned, 3);
|
||||
cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
|
||||
@ -21514,7 +21514,7 @@ ix86_init_cost (struct loop *)
|
||||
|
||||
static unsigned
|
||||
ix86_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
|
||||
struct _stmt_vec_info *stmt_info, int misalign,
|
||||
class _stmt_vec_info *stmt_info, int misalign,
|
||||
enum vect_cost_model_location where)
|
||||
{
|
||||
unsigned *cost = (unsigned *) data;
|
||||
@ -21942,7 +21942,7 @@ ix86_simd_clone_usable (struct cgraph_node *node)
|
||||
(value 32 is used) as a heuristic. */
|
||||
|
||||
static unsigned
|
||||
ix86_loop_unroll_adjust (unsigned nunroll, struct loop *loop)
|
||||
ix86_loop_unroll_adjust (unsigned nunroll, class loop *loop)
|
||||
{
|
||||
basic_block *bbs;
|
||||
rtx_insn *insn;
|
||||
|
@ -482,7 +482,8 @@ AC_ARG_ENABLE(build-format-warnings,
|
||||
AS_IF([test $enable_build_format_warnings = no],
|
||||
[wf_opt=-Wno-format],[wf_opt=])
|
||||
ACX_PROG_CXX_WARNING_OPTS(
|
||||
m4_quote(m4_do([-W -Wall -Wno-narrowing -Wwrite-strings ],
|
||||
m4_quote(m4_do([-W -Wall -Wclass-is-pod -Wmismatched-tags ],
|
||||
[-Wno-narrowing -Wstruct-not-pod -Wwrite-strings ],
|
||||
[-Wcast-qual -Wno-error=format-diag $wf_opt])),
|
||||
[loose_warn])
|
||||
ACX_PROG_CC_WARNING_OPTS(
|
||||
|
@ -47,9 +47,9 @@ typedef int64_t gcov_type;
|
||||
typedef uint64_t gcov_type_unsigned;
|
||||
|
||||
struct bitmap_obstack;
|
||||
struct bitmap_head;
|
||||
typedef struct bitmap_head *bitmap;
|
||||
typedef const struct bitmap_head *const_bitmap;
|
||||
class bitmap_head;
|
||||
typedef class bitmap_head *bitmap;
|
||||
typedef const class bitmap_head *const_bitmap;
|
||||
struct simple_bitmap_def;
|
||||
typedef struct simple_bitmap_def *sbitmap;
|
||||
typedef const struct simple_bitmap_def *const_sbitmap;
|
||||
@ -65,7 +65,7 @@ template<typename> class opt_mode;
|
||||
typedef opt_mode<scalar_mode> opt_scalar_mode;
|
||||
typedef opt_mode<scalar_int_mode> opt_scalar_int_mode;
|
||||
typedef opt_mode<scalar_float_mode> opt_scalar_float_mode;
|
||||
template<typename> class pod_mode;
|
||||
template<typename> struct pod_mode;
|
||||
typedef pod_mode<scalar_mode> scalar_mode_pod;
|
||||
typedef pod_mode<scalar_int_mode> scalar_int_mode_pod;
|
||||
typedef pod_mode<fixed_size_mode> fixed_size_mode_pod;
|
||||
@ -73,19 +73,19 @@ typedef pod_mode<fixed_size_mode> fixed_size_mode_pod;
|
||||
/* Subclasses of rtx_def, using indentation to show the class
|
||||
hierarchy, along with the relevant invariant.
|
||||
Where possible, keep this list in the same order as in rtl.def. */
|
||||
class rtx_def;
|
||||
class rtx_expr_list; /* GET_CODE (X) == EXPR_LIST */
|
||||
class rtx_insn_list; /* GET_CODE (X) == INSN_LIST */
|
||||
class rtx_sequence; /* GET_CODE (X) == SEQUENCE */
|
||||
class rtx_insn;
|
||||
class rtx_debug_insn; /* DEBUG_INSN_P (X) */
|
||||
class rtx_nonjump_insn; /* NONJUMP_INSN_P (X) */
|
||||
class rtx_jump_insn; /* JUMP_P (X) */
|
||||
class rtx_call_insn; /* CALL_P (X) */
|
||||
class rtx_jump_table_data; /* JUMP_TABLE_DATA_P (X) */
|
||||
class rtx_barrier; /* BARRIER_P (X) */
|
||||
class rtx_code_label; /* LABEL_P (X) */
|
||||
class rtx_note; /* NOTE_P (X) */
|
||||
struct rtx_def;
|
||||
struct rtx_expr_list; /* GET_CODE (X) == EXPR_LIST */
|
||||
struct rtx_insn_list; /* GET_CODE (X) == INSN_LIST */
|
||||
struct rtx_sequence; /* GET_CODE (X) == SEQUENCE */
|
||||
struct rtx_insn;
|
||||
struct rtx_debug_insn; /* DEBUG_INSN_P (X) */
|
||||
struct rtx_nonjump_insn; /* NONJUMP_INSN_P (X) */
|
||||
struct rtx_jump_insn; /* JUMP_P (X) */
|
||||
struct rtx_call_insn; /* CALL_P (X) */
|
||||
struct rtx_jump_table_data; /* JUMP_TABLE_DATA_P (X) */
|
||||
struct rtx_barrier; /* BARRIER_P (X) */
|
||||
struct rtx_code_label; /* LABEL_P (X) */
|
||||
struct rtx_note; /* NOTE_P (X) */
|
||||
|
||||
struct rtvec_def;
|
||||
typedef struct rtvec_def *rtvec;
|
||||
@ -138,9 +138,9 @@ struct gomp_teams;
|
||||
/* Subclasses of symtab_node, using indentation to show the class
|
||||
hierarchy. */
|
||||
|
||||
class symtab_node;
|
||||
struct symtab_node;
|
||||
struct cgraph_node;
|
||||
class varpool_node;
|
||||
struct varpool_node;
|
||||
|
||||
union section;
|
||||
typedef union section section;
|
||||
@ -151,7 +151,7 @@ struct cl_option;
|
||||
struct cl_decoded_option;
|
||||
struct cl_option_handlers;
|
||||
struct diagnostic_context;
|
||||
struct pretty_printer;
|
||||
class pretty_printer;
|
||||
|
||||
/* Address space number for named address space support. */
|
||||
typedef unsigned char addr_space_t;
|
||||
@ -298,9 +298,9 @@ enum warn_strict_overflow_code
|
||||
set yet). */
|
||||
typedef int alias_set_type;
|
||||
|
||||
struct edge_def;
|
||||
typedef struct edge_def *edge;
|
||||
typedef const struct edge_def *const_edge;
|
||||
class edge_def;
|
||||
typedef class edge_def *edge;
|
||||
typedef const class edge_def *const_edge;
|
||||
struct basic_block_def;
|
||||
typedef struct basic_block_def *basic_block;
|
||||
typedef const struct basic_block_def *const_basic_block;
|
||||
|
@ -1,3 +1,10 @@
|
||||
2019-07-09 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR c++/61339
|
||||
* cp-tree.h: Change class-key of PODs to struct and others to class.
|
||||
* search.c: Same.
|
||||
* semantics.c (finalize_nrv_r): Same.
|
||||
|
||||
2019-07-09 Martin Sebor <msebor@redhat.com>
|
||||
|
||||
PR c++/61339
|
||||
|
@ -1275,7 +1275,7 @@ tree
|
||||
lookup_member_fuzzy (tree xbasetype, tree name, bool want_type_p)
|
||||
{
|
||||
tree type = NULL_TREE, basetype_path = NULL_TREE;
|
||||
struct lookup_field_fuzzy_info lffi (want_type_p);
|
||||
class lookup_field_fuzzy_info lffi (want_type_p);
|
||||
|
||||
/* rval_binfo is the binfo associated with the found member, note,
|
||||
this can be set with useful information, even when rval is not
|
||||
|
@ -4395,7 +4395,7 @@ public:
|
||||
static tree
|
||||
finalize_nrv_r (tree* tp, int* walk_subtrees, void* data)
|
||||
{
|
||||
struct nrv_data *dp = (struct nrv_data *)data;
|
||||
class nrv_data *dp = (class nrv_data *)data;
|
||||
tree_node **slot;
|
||||
|
||||
/* No need to walk into types. There wouldn't be any need to walk into
|
||||
@ -4453,7 +4453,7 @@ finalize_nrv_r (tree* tp, int* walk_subtrees, void* data)
|
||||
void
|
||||
finalize_nrv (tree *tp, tree var, tree result)
|
||||
{
|
||||
struct nrv_data data;
|
||||
class nrv_data data;
|
||||
|
||||
/* Copy name from VAR to RESULT. */
|
||||
DECL_NAME (result) = DECL_NAME (var);
|
||||
|
@ -33,7 +33,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
IB. Write the length to RLEN. */
|
||||
|
||||
static const char *
|
||||
string_for_index (struct data_in *data_in, unsigned int loc, unsigned int *rlen)
|
||||
string_for_index (class data_in *data_in, unsigned int loc, unsigned int *rlen)
|
||||
{
|
||||
unsigned int len;
|
||||
const char *result;
|
||||
@ -62,8 +62,8 @@ string_for_index (struct data_in *data_in, unsigned int loc, unsigned int *rlen)
|
||||
IB. Write the length to RLEN. */
|
||||
|
||||
const char *
|
||||
streamer_read_indexed_string (struct data_in *data_in,
|
||||
struct lto_input_block *ib, unsigned int *rlen)
|
||||
streamer_read_indexed_string (class data_in *data_in,
|
||||
class lto_input_block *ib, unsigned int *rlen)
|
||||
{
|
||||
return string_for_index (data_in, streamer_read_uhwi (ib), rlen);
|
||||
}
|
||||
@ -72,7 +72,7 @@ streamer_read_indexed_string (struct data_in *data_in,
|
||||
/* Read a NULL terminated string from the string table in DATA_IN. */
|
||||
|
||||
const char *
|
||||
streamer_read_string (struct data_in *data_in, struct lto_input_block *ib)
|
||||
streamer_read_string (class data_in *data_in, class lto_input_block *ib)
|
||||
{
|
||||
unsigned int len;
|
||||
const char *ptr;
|
||||
@ -91,7 +91,7 @@ streamer_read_string (struct data_in *data_in, struct lto_input_block *ib)
|
||||
Write the length to RLEN. */
|
||||
|
||||
const char *
|
||||
bp_unpack_indexed_string (struct data_in *data_in,
|
||||
bp_unpack_indexed_string (class data_in *data_in,
|
||||
struct bitpack_d *bp, unsigned int *rlen)
|
||||
{
|
||||
return string_for_index (data_in, bp_unpack_var_len_unsigned (bp), rlen);
|
||||
@ -101,7 +101,7 @@ bp_unpack_indexed_string (struct data_in *data_in,
|
||||
/* Read a NULL terminated string from the string table in DATA_IN. */
|
||||
|
||||
const char *
|
||||
bp_unpack_string (struct data_in *data_in, struct bitpack_d *bp)
|
||||
bp_unpack_string (class data_in *data_in, struct bitpack_d *bp)
|
||||
{
|
||||
unsigned int len;
|
||||
const char *ptr;
|
||||
@ -119,7 +119,7 @@ bp_unpack_string (struct data_in *data_in, struct bitpack_d *bp)
|
||||
/* Read an unsigned HOST_WIDE_INT number from IB. */
|
||||
|
||||
unsigned HOST_WIDE_INT
|
||||
streamer_read_uhwi (struct lto_input_block *ib)
|
||||
streamer_read_uhwi (class lto_input_block *ib)
|
||||
{
|
||||
unsigned HOST_WIDE_INT result;
|
||||
int shift;
|
||||
@ -154,7 +154,7 @@ streamer_read_uhwi (struct lto_input_block *ib)
|
||||
/* Read a HOST_WIDE_INT number from IB. */
|
||||
|
||||
HOST_WIDE_INT
|
||||
streamer_read_hwi (struct lto_input_block *ib)
|
||||
streamer_read_hwi (class lto_input_block *ib)
|
||||
{
|
||||
HOST_WIDE_INT result = 0;
|
||||
int shift = 0;
|
||||
@ -178,7 +178,7 @@ streamer_read_hwi (struct lto_input_block *ib)
|
||||
/* Read gcov_type value from IB. */
|
||||
|
||||
gcov_type
|
||||
streamer_read_gcov_count (struct lto_input_block *ib)
|
||||
streamer_read_gcov_count (class lto_input_block *ib)
|
||||
{
|
||||
gcov_type ret = streamer_read_hwi (ib);
|
||||
return ret;
|
||||
@ -188,7 +188,7 @@ streamer_read_gcov_count (struct lto_input_block *ib)
|
||||
input block IB. */
|
||||
|
||||
wide_int
|
||||
streamer_read_wide_int (struct lto_input_block *ib)
|
||||
streamer_read_wide_int (class lto_input_block *ib)
|
||||
{
|
||||
HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
|
||||
int i;
|
||||
@ -203,7 +203,7 @@ streamer_read_wide_int (struct lto_input_block *ib)
|
||||
input block IB. */
|
||||
|
||||
widest_int
|
||||
streamer_read_widest_int (struct lto_input_block *ib)
|
||||
streamer_read_widest_int (class lto_input_block *ib)
|
||||
{
|
||||
HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
|
||||
int i;
|
||||
|
@ -73,18 +73,18 @@ void streamer_write_wide_int (struct output_block *, const wide_int &);
|
||||
void streamer_write_widest_int (struct output_block *, const widest_int &);
|
||||
|
||||
/* In data-streamer-in.c */
|
||||
const char *streamer_read_string (struct data_in *, struct lto_input_block *);
|
||||
const char *streamer_read_indexed_string (struct data_in *,
|
||||
struct lto_input_block *,
|
||||
const char *streamer_read_string (class data_in *, class lto_input_block *);
|
||||
const char *streamer_read_indexed_string (class data_in *,
|
||||
class lto_input_block *,
|
||||
unsigned int *);
|
||||
const char *bp_unpack_indexed_string (struct data_in *, struct bitpack_d *,
|
||||
const char *bp_unpack_indexed_string (class data_in *, struct bitpack_d *,
|
||||
unsigned int *);
|
||||
const char *bp_unpack_string (struct data_in *, struct bitpack_d *);
|
||||
unsigned HOST_WIDE_INT streamer_read_uhwi (struct lto_input_block *);
|
||||
HOST_WIDE_INT streamer_read_hwi (struct lto_input_block *);
|
||||
gcov_type streamer_read_gcov_count (struct lto_input_block *);
|
||||
wide_int streamer_read_wide_int (struct lto_input_block *);
|
||||
widest_int streamer_read_widest_int (struct lto_input_block *);
|
||||
const char *bp_unpack_string (class data_in *, struct bitpack_d *);
|
||||
unsigned HOST_WIDE_INT streamer_read_uhwi (class lto_input_block *);
|
||||
HOST_WIDE_INT streamer_read_hwi (class lto_input_block *);
|
||||
gcov_type streamer_read_gcov_count (class lto_input_block *);
|
||||
wide_int streamer_read_wide_int (class lto_input_block *);
|
||||
widest_int streamer_read_widest_int (class lto_input_block *);
|
||||
|
||||
/* Returns a new bit-packing context for bit-packing into S. */
|
||||
static inline struct bitpack_d
|
||||
@ -149,7 +149,7 @@ streamer_write_bitpack (struct bitpack_d *bp)
|
||||
|
||||
/* Returns a new bit-packing context for bit-unpacking from IB. */
|
||||
static inline struct bitpack_d
|
||||
streamer_read_bitpack (struct lto_input_block *ib)
|
||||
streamer_read_bitpack (class lto_input_block *ib)
|
||||
{
|
||||
struct bitpack_d bp;
|
||||
bp.word = streamer_read_uhwi (ib);
|
||||
@ -174,7 +174,7 @@ bp_unpack_value (struct bitpack_d *bp, unsigned nbits)
|
||||
if (pos + nbits > BITS_PER_BITPACK_WORD)
|
||||
{
|
||||
bp->word = val
|
||||
= streamer_read_uhwi ((struct lto_input_block *)bp->stream);
|
||||
= streamer_read_uhwi ((class lto_input_block *)bp->stream);
|
||||
bp->pos = nbits;
|
||||
return val & mask;
|
||||
}
|
||||
@ -218,7 +218,7 @@ streamer_write_char_stream (struct lto_output_stream *obs, char c)
|
||||
/* Read byte from the input block. */
|
||||
|
||||
static inline unsigned char
|
||||
streamer_read_uchar (struct lto_input_block *ib)
|
||||
streamer_read_uchar (class lto_input_block *ib)
|
||||
{
|
||||
if (ib->p >= ib->len)
|
||||
lto_section_overrun (ib);
|
||||
@ -248,7 +248,7 @@ streamer_write_hwi_in_range (struct lto_output_stream *obs,
|
||||
to be compile time constant. PURPOSE is used for error reporting. */
|
||||
|
||||
static inline HOST_WIDE_INT
|
||||
streamer_read_hwi_in_range (struct lto_input_block *ib,
|
||||
streamer_read_hwi_in_range (class lto_input_block *ib,
|
||||
const char *purpose,
|
||||
HOST_WIDE_INT min,
|
||||
HOST_WIDE_INT max)
|
||||
@ -337,7 +337,7 @@ streamer_write_record_start (struct output_block *ob, enum LTO_tags tag)
|
||||
/* Return the next tag in the input block IB. */
|
||||
|
||||
static inline enum LTO_tags
|
||||
streamer_read_record_start (struct lto_input_block *ib)
|
||||
streamer_read_record_start (class lto_input_block *ib)
|
||||
{
|
||||
return streamer_read_enum (ib, LTO_tags, LTO_NUM_TAGS);
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
|
||||
{
|
||||
int regno = REGNO (SET_DEST (set));
|
||||
df_ref first_def;
|
||||
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
|
||||
class df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
|
||||
|
||||
first_def = df_bb_regno_first_def_find (g->bb, regno);
|
||||
gcc_assert (first_def);
|
||||
@ -288,7 +288,7 @@ add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
|
||||
|
||||
if (flag_checking && DF_REF_ID (last_def) != DF_REF_ID (first_def))
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
|
||||
class df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
|
||||
gcc_assert (!bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)));
|
||||
}
|
||||
|
||||
@ -369,7 +369,7 @@ static void
|
||||
build_inter_loop_deps (ddg_ptr g)
|
||||
{
|
||||
unsigned rd_num;
|
||||
struct df_rd_bb_info *rd_bb_info;
|
||||
class df_rd_bb_info *rd_bb_info;
|
||||
bitmap_iterator bi;
|
||||
|
||||
rd_bb_info = DF_RD_BB_INFO (g->bb);
|
||||
@ -475,7 +475,7 @@ build_intra_loop_deps (ddg_ptr g)
|
||||
{
|
||||
int i;
|
||||
/* Hold the dependency analysis state during dependency calculations. */
|
||||
struct deps_desc tmp_deps;
|
||||
class deps_desc tmp_deps;
|
||||
rtx_insn *head, *tail;
|
||||
|
||||
/* Build the dependence information, using the sched_analyze function. */
|
||||
|
@ -407,7 +407,7 @@ bitmap_obstack df_bitmap_obstack;
|
||||
Functions to create, destroy and manipulate an instance of df.
|
||||
----------------------------------------------------------------------------*/
|
||||
|
||||
struct df_d *df;
|
||||
class df_d *df;
|
||||
|
||||
/* Add PROBLEM (and any dependent problems) to the DF instance. */
|
||||
|
||||
@ -684,7 +684,7 @@ static unsigned int
|
||||
rest_of_handle_df_initialize (void)
|
||||
{
|
||||
gcc_assert (!df);
|
||||
df = XCNEW (struct df_d);
|
||||
df = XCNEW (class df_d);
|
||||
df->changeable_flags = 0;
|
||||
|
||||
bitmap_obstack_initialize (&df_bitmap_obstack);
|
||||
@ -1293,7 +1293,7 @@ df_analyze (void)
|
||||
Returns the number of blocks which is always loop->num_nodes. */
|
||||
|
||||
static int
|
||||
loop_post_order_compute (int *post_order, struct loop *loop)
|
||||
loop_post_order_compute (int *post_order, class loop *loop)
|
||||
{
|
||||
edge_iterator *stack;
|
||||
int sp;
|
||||
@ -1354,7 +1354,7 @@ loop_post_order_compute (int *post_order, struct loop *loop)
|
||||
by LOOP. Returns the number of blocks which is always loop->num_nodes. */
|
||||
|
||||
static void
|
||||
loop_inverted_post_order_compute (vec<int> *post_order, struct loop *loop)
|
||||
loop_inverted_post_order_compute (vec<int> *post_order, class loop *loop)
|
||||
{
|
||||
basic_block bb;
|
||||
edge_iterator *stack;
|
||||
@ -1419,7 +1419,7 @@ loop_inverted_post_order_compute (vec<int> *post_order, struct loop *loop)
|
||||
/* Analyze dataflow info for the basic blocks contained in LOOP. */
|
||||
|
||||
void
|
||||
df_analyze_loop (struct loop *loop)
|
||||
df_analyze_loop (class loop *loop)
|
||||
{
|
||||
free (df->postorder);
|
||||
|
||||
|
@ -162,7 +162,7 @@ static void
|
||||
df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
||||
void *vbb_info)
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
|
||||
class df_rd_bb_info *bb_info = (class df_rd_bb_info *) vbb_info;
|
||||
if (bb_info)
|
||||
{
|
||||
bitmap_clear (&bb_info->kill);
|
||||
@ -182,17 +182,17 @@ df_rd_alloc (bitmap all_blocks)
|
||||
{
|
||||
unsigned int bb_index;
|
||||
bitmap_iterator bi;
|
||||
struct df_rd_problem_data *problem_data;
|
||||
class df_rd_problem_data *problem_data;
|
||||
|
||||
if (df_rd->problem_data)
|
||||
{
|
||||
problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
|
||||
problem_data = (class df_rd_problem_data *) df_rd->problem_data;
|
||||
bitmap_clear (&problem_data->sparse_invalidated_by_call);
|
||||
bitmap_clear (&problem_data->dense_invalidated_by_call);
|
||||
}
|
||||
else
|
||||
{
|
||||
problem_data = XNEW (struct df_rd_problem_data);
|
||||
problem_data = XNEW (class df_rd_problem_data);
|
||||
df_rd->problem_data = problem_data;
|
||||
|
||||
bitmap_obstack_initialize (&problem_data->rd_bitmaps);
|
||||
@ -209,7 +209,7 @@ df_rd_alloc (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
|
||||
/* When bitmaps are already initialized, just clear them. */
|
||||
if (bb_info->kill.obstack)
|
||||
@ -283,7 +283,7 @@ df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
|
||||
of kill sets. */
|
||||
|
||||
static void
|
||||
df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
|
||||
df_rd_bb_local_compute_process_def (class df_rd_bb_info *bb_info,
|
||||
df_ref def,
|
||||
int top_flag)
|
||||
{
|
||||
@ -340,7 +340,7 @@ static void
|
||||
df_rd_bb_local_compute (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
|
||||
bitmap_clear (&seen_in_block);
|
||||
@ -390,8 +390,8 @@ df_rd_local_compute (bitmap all_blocks)
|
||||
unsigned int bb_index;
|
||||
bitmap_iterator bi;
|
||||
unsigned int regno;
|
||||
struct df_rd_problem_data *problem_data
|
||||
= (struct df_rd_problem_data *) df_rd->problem_data;
|
||||
class df_rd_problem_data *problem_data
|
||||
= (class df_rd_problem_data *) df_rd->problem_data;
|
||||
bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
|
||||
bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
|
||||
|
||||
@ -435,7 +435,7 @@ df_rd_init_solution (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
|
||||
bitmap_copy (&bb_info->out, &bb_info->gen);
|
||||
bitmap_clear (&bb_info->in);
|
||||
@ -456,8 +456,8 @@ df_rd_confluence_n (edge e)
|
||||
|
||||
if (e->flags & EDGE_EH)
|
||||
{
|
||||
struct df_rd_problem_data *problem_data
|
||||
= (struct df_rd_problem_data *) df_rd->problem_data;
|
||||
class df_rd_problem_data *problem_data
|
||||
= (class df_rd_problem_data *) df_rd->problem_data;
|
||||
bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
|
||||
bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
|
||||
bitmap_iterator bi;
|
||||
@ -485,7 +485,7 @@ df_rd_confluence_n (edge e)
|
||||
static bool
|
||||
df_rd_transfer_function (int bb_index)
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
unsigned int regno;
|
||||
bitmap_iterator bi;
|
||||
bitmap in = &bb_info->in;
|
||||
@ -499,12 +499,12 @@ df_rd_transfer_function (int bb_index)
|
||||
changed = bitmap_ior_and_compl (out, gen, in, kill);
|
||||
else
|
||||
{
|
||||
struct df_rd_problem_data *problem_data;
|
||||
class df_rd_problem_data *problem_data;
|
||||
bitmap_head tmp;
|
||||
|
||||
/* Note that TMP is _not_ a temporary bitmap if we end up replacing
|
||||
OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
|
||||
problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
|
||||
problem_data = (class df_rd_problem_data *) df_rd->problem_data;
|
||||
bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
|
||||
|
||||
bitmap_and_compl (&tmp, in, kill);
|
||||
@ -528,7 +528,7 @@ df_rd_transfer_function (int bb_index)
|
||||
basic block, and mask out DEFs of registers that are not live.
|
||||
Computing the mask looks costly, but the benefit of the pruning
|
||||
outweighs the cost. */
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out;
|
||||
bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack);
|
||||
unsigned int regno;
|
||||
@ -550,8 +550,8 @@ df_rd_transfer_function (int bb_index)
|
||||
static void
|
||||
df_rd_free (void)
|
||||
{
|
||||
struct df_rd_problem_data *problem_data
|
||||
= (struct df_rd_problem_data *) df_rd->problem_data;
|
||||
class df_rd_problem_data *problem_data
|
||||
= (class df_rd_problem_data *) df_rd->problem_data;
|
||||
|
||||
if (problem_data)
|
||||
{
|
||||
@ -571,8 +571,8 @@ df_rd_free (void)
|
||||
static void
|
||||
df_rd_start_dump (FILE *file)
|
||||
{
|
||||
struct df_rd_problem_data *problem_data
|
||||
= (struct df_rd_problem_data *) df_rd->problem_data;
|
||||
class df_rd_problem_data *problem_data
|
||||
= (class df_rd_problem_data *) df_rd->problem_data;
|
||||
unsigned int m = DF_REG_SIZE (df);
|
||||
unsigned int regno;
|
||||
|
||||
@ -644,7 +644,7 @@ df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
|
||||
static void
|
||||
df_rd_top_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
|
||||
if (!bb_info)
|
||||
return;
|
||||
|
||||
@ -659,7 +659,7 @@ df_rd_top_dump (basic_block bb, FILE *file)
|
||||
static void
|
||||
df_rd_bottom_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
|
||||
if (!bb_info)
|
||||
return;
|
||||
|
||||
@ -692,7 +692,7 @@ static const struct df_problem problem_RD =
|
||||
NULL, /* Incremental solution verify start. */
|
||||
NULL, /* Incremental solution verify end. */
|
||||
NULL, /* Dependent problem. */
|
||||
sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
|
||||
sizeof (class df_rd_bb_info),/* Size of entry of block_info array. */
|
||||
TV_DF_RD, /* Timing variable. */
|
||||
true /* Reset blocks on dropping out of blocks_to_analyze. */
|
||||
};
|
||||
@ -734,7 +734,7 @@ static void
|
||||
df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
||||
void *vbb_info)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
|
||||
class df_lr_bb_info *bb_info = (class df_lr_bb_info *) vbb_info;
|
||||
if (bb_info)
|
||||
{
|
||||
bitmap_clear (&bb_info->use);
|
||||
@ -770,7 +770,7 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
|
||||
/* When bitmaps are already initialized, just clear them. */
|
||||
if (bb_info->use.obstack)
|
||||
@ -801,7 +801,7 @@ df_lr_reset (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
gcc_assert (bb_info);
|
||||
bitmap_clear (&bb_info->in);
|
||||
bitmap_clear (&bb_info->out);
|
||||
@ -815,7 +815,7 @@ static void
|
||||
df_lr_bb_local_compute (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
df_ref def, use;
|
||||
|
||||
@ -930,7 +930,7 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
|
||||
{
|
||||
/* The exit block is special for this problem and its bits are
|
||||
computed from thin air. */
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
|
||||
bitmap_copy (&bb_info->use, df->exit_block_uses);
|
||||
}
|
||||
else
|
||||
@ -951,7 +951,7 @@ df_lr_init (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
bitmap_copy (&bb_info->in, &bb_info->use);
|
||||
bitmap_clear (&bb_info->out);
|
||||
}
|
||||
@ -997,7 +997,7 @@ df_lr_confluence_n (edge e)
|
||||
static bool
|
||||
df_lr_transfer_function (int bb_index)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
|
||||
bitmap in = &bb_info->in;
|
||||
bitmap out = &bb_info->out;
|
||||
bitmap use = &bb_info->use;
|
||||
@ -1069,7 +1069,7 @@ df_lr_free (void)
|
||||
static void
|
||||
df_lr_top_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
|
||||
struct df_lr_problem_data *problem_data;
|
||||
if (!bb_info)
|
||||
return;
|
||||
@ -1097,7 +1097,7 @@ df_lr_top_dump (basic_block bb, FILE *file)
|
||||
static void
|
||||
df_lr_bottom_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
|
||||
struct df_lr_problem_data *problem_data;
|
||||
if (!bb_info)
|
||||
return;
|
||||
@ -1214,7 +1214,7 @@ static const struct df_problem problem_LR =
|
||||
df_lr_verify_solution_start,/* Incremental solution verify start. */
|
||||
df_lr_verify_solution_end, /* Incremental solution verify end. */
|
||||
NULL, /* Dependent problem. */
|
||||
sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
|
||||
sizeof (class df_lr_bb_info),/* Size of entry of block_info array. */
|
||||
TV_DF_LR, /* Timing variable. */
|
||||
false /* Reset blocks on dropping out of blocks_to_analyze. */
|
||||
};
|
||||
@ -1254,7 +1254,7 @@ df_lr_verify_transfer_functions (void)
|
||||
|
||||
FOR_ALL_BB_FN (bb, cfun)
|
||||
{
|
||||
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
|
||||
class df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
|
||||
bitmap_set_bit (&all_blocks, bb->index);
|
||||
|
||||
if (bb_info)
|
||||
@ -1340,7 +1340,7 @@ static void
|
||||
df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
||||
void *vbb_info)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
|
||||
class df_live_bb_info *bb_info = (class df_live_bb_info *) vbb_info;
|
||||
if (bb_info)
|
||||
{
|
||||
bitmap_clear (&bb_info->gen);
|
||||
@ -1378,7 +1378,7 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
|
||||
/* When bitmaps are already initialized, just clear them. */
|
||||
if (bb_info->kill.obstack)
|
||||
@ -1408,7 +1408,7 @@ df_live_reset (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
gcc_assert (bb_info);
|
||||
bitmap_clear (&bb_info->in);
|
||||
bitmap_clear (&bb_info->out);
|
||||
@ -1422,7 +1422,7 @@ static void
|
||||
df_live_bb_local_compute (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
df_ref def;
|
||||
int luid = 0;
|
||||
@ -1498,8 +1498,8 @@ df_live_init (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
|
||||
|
||||
/* No register may reach a location where it is not used. Thus
|
||||
we trim the rr result to the places where it is used. */
|
||||
@ -1528,8 +1528,8 @@ df_live_confluence_n (edge e)
|
||||
static bool
|
||||
df_live_transfer_function (int bb_index)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
|
||||
bitmap in = &bb_info->in;
|
||||
bitmap out = &bb_info->out;
|
||||
bitmap gen = &bb_info->gen;
|
||||
@ -1560,8 +1560,8 @@ df_live_finalize (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
|
||||
struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
|
||||
class df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
|
||||
|
||||
/* No register may reach a location where it is not used. Thus
|
||||
we trim the rr result to the places where it is used. */
|
||||
@ -1601,7 +1601,7 @@ df_live_free (void)
|
||||
static void
|
||||
df_live_top_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
|
||||
struct df_live_problem_data *problem_data;
|
||||
|
||||
if (!bb_info)
|
||||
@ -1630,7 +1630,7 @@ df_live_top_dump (basic_block bb, FILE *file)
|
||||
static void
|
||||
df_live_bottom_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
|
||||
struct df_live_problem_data *problem_data;
|
||||
|
||||
if (!bb_info)
|
||||
@ -1742,7 +1742,7 @@ static const struct df_problem problem_LIVE =
|
||||
df_live_verify_solution_start,/* Incremental solution verify start. */
|
||||
df_live_verify_solution_end, /* Incremental solution verify end. */
|
||||
&problem_LR, /* Dependent problem. */
|
||||
sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
|
||||
sizeof (class df_live_bb_info),/* Size of entry of block_info array. */
|
||||
TV_DF_LIVE, /* Timing variable. */
|
||||
false /* Reset blocks on dropping out of blocks_to_analyze. */
|
||||
};
|
||||
@ -1797,7 +1797,7 @@ df_live_verify_transfer_functions (void)
|
||||
|
||||
FOR_ALL_BB_FN (bb, cfun)
|
||||
{
|
||||
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
|
||||
class df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
|
||||
bitmap_set_bit (&all_blocks, bb->index);
|
||||
|
||||
if (bb_info)
|
||||
@ -1859,7 +1859,7 @@ static void
|
||||
df_mir_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
||||
void *vbb_info)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = (struct df_mir_bb_info *) vbb_info;
|
||||
class df_mir_bb_info *bb_info = (class df_mir_bb_info *) vbb_info;
|
||||
if (bb_info)
|
||||
{
|
||||
bitmap_clear (&bb_info->gen);
|
||||
@ -1896,7 +1896,7 @@ df_mir_alloc (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
|
||||
/* When bitmaps are already initialized, just clear them. */
|
||||
if (bb_info->kill.obstack)
|
||||
@ -1929,7 +1929,7 @@ df_mir_reset (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
|
||||
gcc_assert (bb_info);
|
||||
|
||||
@ -1947,7 +1947,7 @@ static void
|
||||
df_mir_bb_local_compute (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
int luid = 0;
|
||||
|
||||
@ -2011,7 +2011,7 @@ df_mir_init (bitmap all_blocks)
|
||||
static void
|
||||
df_mir_confluence_0 (basic_block bb)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
|
||||
|
||||
bitmap_clear (&bb_info->in);
|
||||
}
|
||||
@ -2039,7 +2039,7 @@ df_mir_confluence_n (edge e)
|
||||
static bool
|
||||
df_mir_transfer_function (int bb_index)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
|
||||
bitmap in = &bb_info->in;
|
||||
bitmap out = &bb_info->out;
|
||||
bitmap gen = &bb_info->gen;
|
||||
@ -2074,7 +2074,7 @@ df_mir_free (void)
|
||||
static void
|
||||
df_mir_top_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
|
||||
|
||||
if (!bb_info)
|
||||
return;
|
||||
@ -2092,7 +2092,7 @@ df_mir_top_dump (basic_block bb, FILE *file)
|
||||
static void
|
||||
df_mir_bottom_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
|
||||
class df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
|
||||
|
||||
if (!bb_info)
|
||||
return;
|
||||
@ -2193,7 +2193,7 @@ static const struct df_problem problem_MIR =
|
||||
df_mir_verify_solution_start, /* Incremental solution verify start. */
|
||||
df_mir_verify_solution_end, /* Incremental solution verify end. */
|
||||
NULL, /* Dependent problem. */
|
||||
sizeof (struct df_mir_bb_info),/* Size of entry of block_info array. */
|
||||
sizeof (class df_mir_bb_info),/* Size of entry of block_info array. */
|
||||
TV_DF_MIR, /* Timing variable. */
|
||||
false /* Reset blocks on dropping out of blocks_to_analyze. */
|
||||
};
|
||||
@ -2456,7 +2456,7 @@ static void
|
||||
df_chain_create_bb (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
class df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
bitmap_head cpy;
|
||||
|
||||
@ -2711,7 +2711,7 @@ static void
|
||||
df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
||||
void *vbb_info)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
|
||||
class df_word_lr_bb_info *bb_info = (class df_word_lr_bb_info *) vbb_info;
|
||||
if (bb_info)
|
||||
{
|
||||
bitmap_clear (&bb_info->use);
|
||||
@ -2754,7 +2754,7 @@ df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
|
||||
/* When bitmaps are already initialized, just clear them. */
|
||||
if (bb_info->use.obstack)
|
||||
@ -2785,7 +2785,7 @@ df_word_lr_reset (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
gcc_assert (bb_info);
|
||||
bitmap_clear (&bb_info->in);
|
||||
bitmap_clear (&bb_info->out);
|
||||
@ -2851,7 +2851,7 @@ static void
|
||||
df_word_lr_bb_local_compute (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
df_ref def, use;
|
||||
|
||||
@ -2918,7 +2918,7 @@ df_word_lr_init (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
bitmap_copy (&bb_info->in, &bb_info->use);
|
||||
bitmap_clear (&bb_info->out);
|
||||
}
|
||||
@ -2942,7 +2942,7 @@ df_word_lr_confluence_n (edge e)
|
||||
static bool
|
||||
df_word_lr_transfer_function (int bb_index)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
|
||||
bitmap in = &bb_info->in;
|
||||
bitmap out = &bb_info->out;
|
||||
bitmap use = &bb_info->use;
|
||||
@ -2979,7 +2979,7 @@ df_word_lr_free (void)
|
||||
static void
|
||||
df_word_lr_top_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
|
||||
if (!bb_info)
|
||||
return;
|
||||
|
||||
@ -2997,7 +2997,7 @@ df_word_lr_top_dump (basic_block bb, FILE *file)
|
||||
static void
|
||||
df_word_lr_bottom_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
|
||||
class df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
|
||||
if (!bb_info)
|
||||
return;
|
||||
|
||||
@ -3032,7 +3032,7 @@ static const struct df_problem problem_WORD_LR =
|
||||
NULL, /* Incremental solution verify start. */
|
||||
NULL, /* Incremental solution verify end. */
|
||||
NULL, /* Dependent problem. */
|
||||
sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
|
||||
sizeof (class df_word_lr_bb_info),/* Size of entry of block_info array. */
|
||||
TV_DF_WORD_LR, /* Timing variable. */
|
||||
false /* Reset blocks on dropping out of blocks_to_analyze. */
|
||||
};
|
||||
@ -4348,7 +4348,7 @@ static void
|
||||
df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
|
||||
void *vbb_info)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
|
||||
class df_md_bb_info *bb_info = (class df_md_bb_info *) vbb_info;
|
||||
if (bb_info)
|
||||
{
|
||||
bitmap_clear (&bb_info->kill);
|
||||
@ -4383,7 +4383,7 @@ df_md_alloc (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
/* When bitmaps are already initialized, just clear them. */
|
||||
if (bb_info->init.obstack)
|
||||
{
|
||||
@ -4452,7 +4452,7 @@ df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
|
||||
}
|
||||
|
||||
static void
|
||||
df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
|
||||
df_md_bb_local_compute_process_def (class df_md_bb_info *bb_info,
|
||||
df_ref def,
|
||||
int top_flag)
|
||||
{
|
||||
@ -4493,7 +4493,7 @@ static void
|
||||
df_md_bb_local_compute (unsigned int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
|
||||
/* Artificials are only hard regs. */
|
||||
@ -4571,7 +4571,7 @@ df_md_reset (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
gcc_assert (bb_info);
|
||||
bitmap_clear (&bb_info->in);
|
||||
bitmap_clear (&bb_info->out);
|
||||
@ -4582,7 +4582,7 @@ static bool
|
||||
df_md_transfer_function (int bb_index)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
bitmap in = &bb_info->in;
|
||||
bitmap out = &bb_info->out;
|
||||
bitmap gen = &bb_info->gen;
|
||||
@ -4610,7 +4610,7 @@ df_md_init (bitmap all_blocks)
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
|
||||
|
||||
bitmap_copy (&bb_info->in, &bb_info->init);
|
||||
df_md_transfer_function (bb_index);
|
||||
@ -4620,7 +4620,7 @@ df_md_init (bitmap all_blocks)
|
||||
static void
|
||||
df_md_confluence_0 (basic_block bb)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
|
||||
bitmap_copy (&bb_info->in, &bb_info->init);
|
||||
}
|
||||
|
||||
@ -4667,7 +4667,7 @@ df_md_free (void)
|
||||
static void
|
||||
df_md_top_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
|
||||
if (!bb_info)
|
||||
return;
|
||||
|
||||
@ -4686,7 +4686,7 @@ df_md_top_dump (basic_block bb, FILE *file)
|
||||
static void
|
||||
df_md_bottom_dump (basic_block bb, FILE *file)
|
||||
{
|
||||
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
|
||||
class df_md_bb_info *bb_info = df_md_get_bb_info (bb->index);
|
||||
if (!bb_info)
|
||||
return;
|
||||
|
||||
@ -4718,7 +4718,7 @@ static const struct df_problem problem_MD =
|
||||
NULL, /* Incremental solution verify start. */
|
||||
NULL, /* Incremental solution verify end. */
|
||||
NULL, /* Dependent problem. */
|
||||
sizeof (struct df_md_bb_info),/* Size of entry of block_info array. */
|
||||
sizeof (class df_md_bb_info),/* Size of entry of block_info array. */
|
||||
TV_DF_MD, /* Timing variable. */
|
||||
false /* Reset blocks on dropping out of blocks_to_analyze. */
|
||||
};
|
||||
|
@ -53,25 +53,25 @@ public:
|
||||
auto_vec<df_mw_hardreg *, 32> mw_vec;
|
||||
};
|
||||
|
||||
static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
|
||||
static void df_ref_record (enum df_ref_class, class df_collection_rec *,
|
||||
rtx, rtx *,
|
||||
basic_block, struct df_insn_info *,
|
||||
enum df_ref_type, int ref_flags);
|
||||
static void df_def_record_1 (struct df_collection_rec *, rtx *,
|
||||
static void df_def_record_1 (class df_collection_rec *, rtx *,
|
||||
basic_block, struct df_insn_info *,
|
||||
int ref_flags);
|
||||
static void df_defs_record (struct df_collection_rec *, rtx,
|
||||
static void df_defs_record (class df_collection_rec *, rtx,
|
||||
basic_block, struct df_insn_info *,
|
||||
int ref_flags);
|
||||
static void df_uses_record (struct df_collection_rec *,
|
||||
static void df_uses_record (class df_collection_rec *,
|
||||
rtx *, enum df_ref_type,
|
||||
basic_block, struct df_insn_info *,
|
||||
int ref_flags);
|
||||
|
||||
static void df_install_ref_incremental (df_ref);
|
||||
static void df_insn_refs_collect (struct df_collection_rec*,
|
||||
static void df_insn_refs_collect (class df_collection_rec*,
|
||||
basic_block, struct df_insn_info *);
|
||||
static void df_canonize_collection_rec (struct df_collection_rec *);
|
||||
static void df_canonize_collection_rec (class df_collection_rec *);
|
||||
|
||||
static void df_get_regular_block_artificial_uses (bitmap);
|
||||
static void df_get_eh_block_artificial_uses (bitmap);
|
||||
@ -84,13 +84,13 @@ static void df_grow_ref_info (struct df_ref_info *, unsigned int);
|
||||
static void df_ref_chain_delete_du_chain (df_ref);
|
||||
static void df_ref_chain_delete (df_ref);
|
||||
|
||||
static void df_refs_add_to_chains (struct df_collection_rec *,
|
||||
static void df_refs_add_to_chains (class df_collection_rec *,
|
||||
basic_block, rtx_insn *, unsigned int);
|
||||
|
||||
static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
|
||||
static bool df_insn_refs_verify (class df_collection_rec *, basic_block,
|
||||
rtx_insn *, bool);
|
||||
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
|
||||
static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
|
||||
static void df_entry_block_defs_collect (class df_collection_rec *, bitmap);
|
||||
static void df_exit_block_uses_collect (class df_collection_rec *, bitmap);
|
||||
static void df_install_ref (df_ref, struct df_reg_info *,
|
||||
struct df_ref_info *, bool);
|
||||
|
||||
@ -983,7 +983,7 @@ df_insn_delete (rtx_insn *insn)
|
||||
/* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
|
||||
|
||||
static void
|
||||
df_free_collection_rec (struct df_collection_rec *collection_rec)
|
||||
df_free_collection_rec (class df_collection_rec *collection_rec)
|
||||
{
|
||||
unsigned int ix;
|
||||
struct df_scan_problem_data *problem_data
|
||||
@ -1014,7 +1014,7 @@ df_insn_rescan (rtx_insn *insn)
|
||||
unsigned int uid = INSN_UID (insn);
|
||||
struct df_insn_info *insn_info = NULL;
|
||||
basic_block bb = BLOCK_FOR_INSN (insn);
|
||||
struct df_collection_rec collection_rec;
|
||||
class df_collection_rec collection_rec;
|
||||
|
||||
if ((!df) || (!INSN_P (insn)))
|
||||
return false;
|
||||
@ -1976,7 +1976,7 @@ df_notes_rescan (rtx_insn *insn)
|
||||
{
|
||||
basic_block bb = BLOCK_FOR_INSN (insn);
|
||||
rtx note;
|
||||
struct df_collection_rec collection_rec;
|
||||
class df_collection_rec collection_rec;
|
||||
unsigned int i;
|
||||
|
||||
df_mw_hardreg_chain_delete_eq_uses (insn_info);
|
||||
@ -2269,7 +2269,7 @@ df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
|
||||
/* Sort and remove duplicates from the COLLECTION_REC. */
|
||||
|
||||
static void
|
||||
df_canonize_collection_rec (struct df_collection_rec *collection_rec)
|
||||
df_canonize_collection_rec (class df_collection_rec *collection_rec)
|
||||
{
|
||||
df_sort_and_compress_refs (&collection_rec->def_vec);
|
||||
df_sort_and_compress_refs (&collection_rec->use_vec);
|
||||
@ -2405,7 +2405,7 @@ df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
|
||||
chains and update other necessary information. */
|
||||
|
||||
static void
|
||||
df_refs_add_to_chains (struct df_collection_rec *collection_rec,
|
||||
df_refs_add_to_chains (class df_collection_rec *collection_rec,
|
||||
basic_block bb, rtx_insn *insn, unsigned int flags)
|
||||
{
|
||||
if (insn)
|
||||
@ -2467,7 +2467,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
|
||||
|
||||
static df_ref
|
||||
df_ref_create_structure (enum df_ref_class cl,
|
||||
struct df_collection_rec *collection_rec,
|
||||
class df_collection_rec *collection_rec,
|
||||
rtx reg, rtx *loc,
|
||||
basic_block bb, struct df_insn_info *info,
|
||||
enum df_ref_type ref_type,
|
||||
@ -2553,7 +2553,7 @@ df_ref_create_structure (enum df_ref_class cl,
|
||||
|
||||
static void
|
||||
df_ref_record (enum df_ref_class cl,
|
||||
struct df_collection_rec *collection_rec,
|
||||
class df_collection_rec *collection_rec,
|
||||
rtx reg, rtx *loc,
|
||||
basic_block bb, struct df_insn_info *insn_info,
|
||||
enum df_ref_type ref_type,
|
||||
@ -2625,7 +2625,7 @@ df_ref_record (enum df_ref_class cl,
|
||||
Any change here has to be matched in df_find_hard_reg_defs_1. */
|
||||
|
||||
static void
|
||||
df_def_record_1 (struct df_collection_rec *collection_rec,
|
||||
df_def_record_1 (class df_collection_rec *collection_rec,
|
||||
rtx *loc, basic_block bb, struct df_insn_info *insn_info,
|
||||
int flags)
|
||||
{
|
||||
@ -2690,7 +2690,7 @@ df_def_record_1 (struct df_collection_rec *collection_rec,
|
||||
here has to be matched in df_find_hard_reg_defs. */
|
||||
|
||||
static void
|
||||
df_defs_record (struct df_collection_rec *collection_rec,
|
||||
df_defs_record (class df_collection_rec *collection_rec,
|
||||
rtx x, basic_block bb, struct df_insn_info *insn_info,
|
||||
int flags)
|
||||
{
|
||||
@ -2796,7 +2796,7 @@ df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
|
||||
/* Process all the registers used in the rtx at address LOC. */
|
||||
|
||||
static void
|
||||
df_uses_record (struct df_collection_rec *collection_rec,
|
||||
df_uses_record (class df_collection_rec *collection_rec,
|
||||
rtx *loc, enum df_ref_type ref_type,
|
||||
basic_block bb, struct df_insn_info *insn_info,
|
||||
int flags)
|
||||
@ -3055,7 +3055,7 @@ df_uses_record (struct df_collection_rec *collection_rec,
|
||||
/* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
|
||||
|
||||
static void
|
||||
df_get_conditional_uses (struct df_collection_rec *collection_rec)
|
||||
df_get_conditional_uses (class df_collection_rec *collection_rec)
|
||||
{
|
||||
unsigned int ix;
|
||||
df_ref ref;
|
||||
@ -3079,7 +3079,7 @@ df_get_conditional_uses (struct df_collection_rec *collection_rec)
|
||||
/* Get call's extra defs and uses (track caller-saved registers). */
|
||||
|
||||
static void
|
||||
df_get_call_refs (struct df_collection_rec *collection_rec,
|
||||
df_get_call_refs (class df_collection_rec *collection_rec,
|
||||
basic_block bb,
|
||||
struct df_insn_info *insn_info,
|
||||
int flags)
|
||||
@ -3162,7 +3162,7 @@ df_get_call_refs (struct df_collection_rec *collection_rec,
|
||||
and reg chains. */
|
||||
|
||||
static void
|
||||
df_insn_refs_collect (struct df_collection_rec *collection_rec,
|
||||
df_insn_refs_collect (class df_collection_rec *collection_rec,
|
||||
basic_block bb, struct df_insn_info *insn_info)
|
||||
{
|
||||
rtx note;
|
||||
@ -3258,7 +3258,7 @@ df_recompute_luids (basic_block bb)
|
||||
to COLLECTION_REC. */
|
||||
|
||||
static void
|
||||
df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
|
||||
df_bb_refs_collect (class df_collection_rec *collection_rec, basic_block bb)
|
||||
{
|
||||
collection_rec->def_vec.truncate (0);
|
||||
collection_rec->use_vec.truncate (0);
|
||||
@ -3558,7 +3558,7 @@ df_get_entry_block_def_set (bitmap entry_block_defs)
|
||||
reference to include. */
|
||||
|
||||
static void
|
||||
df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
|
||||
df_entry_block_defs_collect (class df_collection_rec *collection_rec,
|
||||
bitmap entry_block_defs)
|
||||
{
|
||||
unsigned int i;
|
||||
@ -3580,7 +3580,7 @@ df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
|
||||
static void
|
||||
df_record_entry_block_defs (bitmap entry_block_defs)
|
||||
{
|
||||
struct df_collection_rec collection_rec;
|
||||
class df_collection_rec collection_rec;
|
||||
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
|
||||
|
||||
/* Process bb_refs chain */
|
||||
@ -3715,7 +3715,7 @@ df_get_exit_block_use_set (bitmap exit_block_uses)
|
||||
It uses df->exit_block_uses to determine register to include. */
|
||||
|
||||
static void
|
||||
df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
|
||||
df_exit_block_uses_collect (class df_collection_rec *collection_rec, bitmap exit_block_uses)
|
||||
{
|
||||
unsigned int i;
|
||||
bitmap_iterator bi;
|
||||
@ -3744,7 +3744,7 @@ df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exi
|
||||
static void
|
||||
df_record_exit_block_uses (bitmap exit_block_uses)
|
||||
{
|
||||
struct df_collection_rec collection_rec;
|
||||
class df_collection_rec collection_rec;
|
||||
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
|
||||
|
||||
/* Process bb_refs chain */
|
||||
@ -4052,7 +4052,7 @@ df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
|
||||
If ABORT_IF_FAIL is set, this function never returns false. */
|
||||
|
||||
static bool
|
||||
df_insn_refs_verify (struct df_collection_rec *collection_rec,
|
||||
df_insn_refs_verify (class df_collection_rec *collection_rec,
|
||||
basic_block bb,
|
||||
rtx_insn *insn,
|
||||
bool abort_if_fail)
|
||||
@ -4093,7 +4093,7 @@ df_bb_verify (basic_block bb)
|
||||
{
|
||||
rtx_insn *insn;
|
||||
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
|
||||
struct df_collection_rec collection_rec;
|
||||
class df_collection_rec collection_rec;
|
||||
|
||||
gcc_assert (bb_info);
|
||||
|
||||
|
30
gcc/df.h
30
gcc/df.h
@ -30,7 +30,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
#include "timevar.h"
|
||||
|
||||
struct dataflow;
|
||||
struct df_d;
|
||||
class df_d;
|
||||
struct df_problem;
|
||||
struct df_link;
|
||||
struct df_insn_info;
|
||||
@ -935,7 +935,7 @@ public:
|
||||
/* This is used for debugging and for the dumpers to find the latest
|
||||
instance so that the df info can be added to the dumps. This
|
||||
should not be used by regular code. */
|
||||
extern struct df_d *df;
|
||||
extern class df_d *df;
|
||||
#define df_scan (df->problems_by_index[DF_SCAN])
|
||||
#define df_rd (df->problems_by_index[DF_RD])
|
||||
#define df_lr (df->problems_by_index[DF_LR])
|
||||
@ -968,7 +968,7 @@ extern void df_remove_problem (struct dataflow *);
|
||||
extern void df_finish_pass (bool);
|
||||
extern void df_analyze_problem (struct dataflow *, bitmap, int *, int);
|
||||
extern void df_analyze ();
|
||||
extern void df_analyze_loop (struct loop *);
|
||||
extern void df_analyze_loop (class loop *);
|
||||
extern int df_get_n_blocks (enum df_flow_dir);
|
||||
extern int *df_get_postorder (enum df_flow_dir);
|
||||
extern void df_simple_dataflow (enum df_flow_dir, df_init_function,
|
||||
@ -1103,56 +1103,56 @@ df_scan_get_bb_info (unsigned int index)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline struct df_rd_bb_info *
|
||||
static inline class df_rd_bb_info *
|
||||
df_rd_get_bb_info (unsigned int index)
|
||||
{
|
||||
if (index < df_rd->block_info_size)
|
||||
return &((struct df_rd_bb_info *) df_rd->block_info)[index];
|
||||
return &((class df_rd_bb_info *) df_rd->block_info)[index];
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline struct df_lr_bb_info *
|
||||
static inline class df_lr_bb_info *
|
||||
df_lr_get_bb_info (unsigned int index)
|
||||
{
|
||||
if (index < df_lr->block_info_size)
|
||||
return &((struct df_lr_bb_info *) df_lr->block_info)[index];
|
||||
return &((class df_lr_bb_info *) df_lr->block_info)[index];
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline struct df_md_bb_info *
|
||||
static inline class df_md_bb_info *
|
||||
df_md_get_bb_info (unsigned int index)
|
||||
{
|
||||
if (index < df_md->block_info_size)
|
||||
return &((struct df_md_bb_info *) df_md->block_info)[index];
|
||||
return &((class df_md_bb_info *) df_md->block_info)[index];
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline struct df_live_bb_info *
|
||||
static inline class df_live_bb_info *
|
||||
df_live_get_bb_info (unsigned int index)
|
||||
{
|
||||
if (index < df_live->block_info_size)
|
||||
return &((struct df_live_bb_info *) df_live->block_info)[index];
|
||||
return &((class df_live_bb_info *) df_live->block_info)[index];
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline struct df_word_lr_bb_info *
|
||||
static inline class df_word_lr_bb_info *
|
||||
df_word_lr_get_bb_info (unsigned int index)
|
||||
{
|
||||
if (index < df_word_lr->block_info_size)
|
||||
return &((struct df_word_lr_bb_info *) df_word_lr->block_info)[index];
|
||||
return &((class df_word_lr_bb_info *) df_word_lr->block_info)[index];
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static inline struct df_mir_bb_info *
|
||||
static inline class df_mir_bb_info *
|
||||
df_mir_get_bb_info (unsigned int index)
|
||||
{
|
||||
if (index < df_mir->block_info_size)
|
||||
return &((struct df_mir_bb_info *) df_mir->block_info)[index];
|
||||
return &((class df_mir_bb_info *) df_mir->block_info)[index];
|
||||
else
|
||||
return NULL;
|
||||
}
|
||||
|
@ -4301,7 +4301,7 @@ with machine mode @var{mode}. The default version of this
|
||||
hook returns true for both @code{ptr_mode} and @code{Pmode}.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} bool TARGET_REF_MAY_ALIAS_ERRNO (struct ao_ref *@var{ref})
|
||||
@deftypefn {Target Hook} bool TARGET_REF_MAY_ALIAS_ERRNO (ao_ref *@var{ref})
|
||||
Define this to return nonzero if the memory reference @var{ref} may alias with the system C library errno location. The default version of this hook assumes the system C library errno location is either a declaration of type int or accessed by dereferencing a pointer to int.
|
||||
@end deftypefn
|
||||
|
||||
@ -6052,11 +6052,11 @@ type @code{internal_fn}) should be considered expensive when the mask is
|
||||
all zeros. GCC can then try to branch around the instruction instead.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} {void *} TARGET_VECTORIZE_INIT_COST (struct loop *@var{loop_info})
|
||||
@deftypefn {Target Hook} {void *} TARGET_VECTORIZE_INIT_COST (class loop *@var{loop_info})
|
||||
This hook should initialize target-specific data structures in preparation for modeling the costs of vectorizing a loop or basic block. The default allocates three unsigned integers for accumulating costs for the prologue, body, and epilogue of the loop or basic block. If @var{loop_info} is non-NULL, it identifies the loop being vectorized; otherwise a single block is being vectorized.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} unsigned TARGET_VECTORIZE_ADD_STMT_COST (void *@var{data}, int @var{count}, enum vect_cost_for_stmt @var{kind}, struct _stmt_vec_info *@var{stmt_info}, int @var{misalign}, enum vect_cost_model_location @var{where})
|
||||
@deftypefn {Target Hook} unsigned TARGET_VECTORIZE_ADD_STMT_COST (void *@var{data}, int @var{count}, enum vect_cost_for_stmt @var{kind}, class _stmt_vec_info *@var{stmt_info}, int @var{misalign}, enum vect_cost_model_location @var{where})
|
||||
This hook should update the target-specific @var{data} in response to adding @var{count} copies of the given @var{kind} of statement to a loop or basic block. The default adds the builtin vectorizer cost for the copies of the statement to the accumulator specified by @var{where}, (the prologue, body, or epilogue) and returns the amount added. The return value should be viewed as a tentative cost that may later be revised.
|
||||
@end deftypefn
|
||||
|
||||
@ -11610,7 +11610,7 @@ function version at run-time for a given set of function versions.
|
||||
body must be generated.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} bool TARGET_PREDICT_DOLOOP_P (struct loop *@var{loop})
|
||||
@deftypefn {Target Hook} bool TARGET_PREDICT_DOLOOP_P (class loop *@var{loop})
|
||||
Return true if we can predict it is possible to use a low-overhead loop
|
||||
for a particular loop. The parameter @var{loop} is a pointer to the loop.
|
||||
This target hook is required only when the target supports low-overhead
|
||||
@ -11815,7 +11815,7 @@ This function prepares to emit a conditional comparison within a sequence
|
||||
@var{bit_code} is @code{AND} or @code{IOR}, which is the op on the compares.
|
||||
@end deftypefn
|
||||
|
||||
@deftypefn {Target Hook} unsigned TARGET_LOOP_UNROLL_ADJUST (unsigned @var{nunroll}, struct loop *@var{loop})
|
||||
@deftypefn {Target Hook} unsigned TARGET_LOOP_UNROLL_ADJUST (unsigned @var{nunroll}, class loop *@var{loop})
|
||||
This target hook returns a new value for the number of times @var{loop}
|
||||
should be unrolled. The parameter @var{nunroll} is the number of times
|
||||
the loop is to be unrolled. The parameter @var{loop} is a pointer to
|
||||
|
@ -278,7 +278,7 @@ public:
|
||||
} positions_needed;
|
||||
|
||||
/* The next store info for this insn. */
|
||||
struct store_info *next;
|
||||
class store_info *next;
|
||||
|
||||
/* The right hand side of the store. This is used if there is a
|
||||
subsequent reload of the mems address somewhere later in the
|
||||
@ -326,9 +326,9 @@ public:
|
||||
rtx mem;
|
||||
|
||||
/* The next read_info for this insn. */
|
||||
struct read_info_type *next;
|
||||
class read_info_type *next;
|
||||
};
|
||||
typedef struct read_info_type *read_info_t;
|
||||
typedef class read_info_type *read_info_t;
|
||||
|
||||
static object_allocator<read_info_type> read_info_type_pool ("read_info_pool");
|
||||
|
||||
@ -1509,7 +1509,7 @@ record_store (rtx body, bb_info_t bb_info)
|
||||
while (ptr)
|
||||
{
|
||||
insn_info_t next = ptr->next_local_store;
|
||||
struct store_info *s_info = ptr->store_rec;
|
||||
class store_info *s_info = ptr->store_rec;
|
||||
bool del = true;
|
||||
|
||||
/* Skip the clobbers. We delete the active insn if this insn
|
||||
|
@ -648,7 +648,7 @@ extern void dump_combine_total_stats (FILE *);
|
||||
/* In cfghooks.c */
|
||||
extern void dump_bb (FILE *, basic_block, int, dump_flags_t);
|
||||
|
||||
struct opt_pass;
|
||||
class opt_pass;
|
||||
|
||||
namespace gcc {
|
||||
|
||||
|
@ -351,7 +351,7 @@ const_fixed_hasher::equal (rtx x, rtx y)
|
||||
/* Return true if the given memory attributes are equal. */
|
||||
|
||||
bool
|
||||
mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
|
||||
mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
|
||||
{
|
||||
if (p == q)
|
||||
return true;
|
||||
@ -1924,7 +1924,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
|
||||
{
|
||||
poly_int64 apply_bitpos = 0;
|
||||
tree type;
|
||||
struct mem_attrs attrs, *defattrs, *refattrs;
|
||||
class mem_attrs attrs, *defattrs, *refattrs;
|
||||
addr_space_t as;
|
||||
|
||||
/* It can happen that type_for_mode was given a mode for which there
|
||||
@ -2334,7 +2334,7 @@ change_address (rtx memref, machine_mode mode, rtx addr)
|
||||
{
|
||||
rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
|
||||
machine_mode mmode = GET_MODE (new_rtx);
|
||||
struct mem_attrs *defattrs;
|
||||
class mem_attrs *defattrs;
|
||||
|
||||
mem_attrs attrs (*get_mem_attrs (memref));
|
||||
defattrs = mode_mem_attrs[(int) mmode];
|
||||
@ -2378,7 +2378,7 @@ adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
|
||||
rtx addr = XEXP (memref, 0);
|
||||
rtx new_rtx;
|
||||
scalar_int_mode address_mode;
|
||||
struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
|
||||
class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
|
||||
unsigned HOST_WIDE_INT max_align;
|
||||
#ifdef POINTERS_EXTEND_UNSIGNED
|
||||
scalar_int_mode pointer_mode
|
||||
@ -2524,7 +2524,7 @@ offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
|
||||
{
|
||||
rtx new_rtx, addr = XEXP (memref, 0);
|
||||
machine_mode address_mode;
|
||||
struct mem_attrs *defattrs;
|
||||
class mem_attrs *defattrs;
|
||||
|
||||
mem_attrs attrs (*get_mem_attrs (memref));
|
||||
address_mode = get_address_mode (memref);
|
||||
|
@ -20,8 +20,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
#ifndef GCC_EMIT_RTL_H
|
||||
#define GCC_EMIT_RTL_H
|
||||
|
||||
struct temp_slot;
|
||||
typedef struct temp_slot *temp_slot_p;
|
||||
class temp_slot;
|
||||
typedef class temp_slot *temp_slot_p;
|
||||
|
||||
/* Information mainlined about RTL representation of incoming arguments. */
|
||||
struct GTY(()) incoming_args {
|
||||
@ -110,7 +110,7 @@ struct GTY(()) rtl_data {
|
||||
vec<rtx, va_gc> *x_stack_slot_list;
|
||||
|
||||
/* List of empty areas in the stack frame. */
|
||||
struct frame_space *frame_space_list;
|
||||
class frame_space *frame_space_list;
|
||||
|
||||
/* Place after which to insert the tail_recursion_label if we need one. */
|
||||
rtx_note *x_stack_check_probe_note;
|
||||
@ -136,7 +136,7 @@ struct GTY(()) rtl_data {
|
||||
vec<temp_slot_p, va_gc> *x_used_temp_slots;
|
||||
|
||||
/* List of available temp slots. */
|
||||
struct temp_slot *x_avail_temp_slots;
|
||||
class temp_slot *x_avail_temp_slots;
|
||||
|
||||
/* Current nesting level for temporaries. */
|
||||
int x_temp_slot_level;
|
||||
@ -319,7 +319,7 @@ extern GTY(()) struct rtl_data x_rtl;
|
||||
#define crtl (&x_rtl)
|
||||
|
||||
/* Return whether two MEM_ATTRs are equal. */
|
||||
bool mem_attrs_eq_p (const struct mem_attrs *, const struct mem_attrs *);
|
||||
bool mem_attrs_eq_p (const class mem_attrs *, const class mem_attrs *);
|
||||
|
||||
/* Set the alias set of MEM to SET. */
|
||||
extern void set_mem_alias_set (rtx, alias_set_type);
|
||||
|
@ -1015,7 +1015,7 @@ dw2_build_landing_pads (void)
|
||||
make_single_succ_edge (bb, bb->next_bb, e_flags);
|
||||
if (current_loops)
|
||||
{
|
||||
struct loop *loop = bb->next_bb->loop_father;
|
||||
class loop *loop = bb->next_bb->loop_father;
|
||||
/* If we created a pre-header block, add the new block to the
|
||||
outer loop, otherwise to the loop itself. */
|
||||
if (bb->next_bb == loop->header)
|
||||
@ -1389,7 +1389,7 @@ sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
|
||||
make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
|
||||
if (current_loops)
|
||||
{
|
||||
struct loop *loop = bb->next_bb->loop_father;
|
||||
class loop *loop = bb->next_bb->loop_father;
|
||||
/* If we created a pre-header block, add the new block to the
|
||||
outer loop, otherwise to the loop itself. */
|
||||
if (bb->next_bb == loop->header)
|
||||
@ -1427,7 +1427,7 @@ sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
|
||||
make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
|
||||
if (current_loops)
|
||||
{
|
||||
struct loop *loop = bb->next_bb->loop_father;
|
||||
class loop *loop = bb->next_bb->loop_father;
|
||||
/* If we created a pre-header block, add the new block to the
|
||||
outer loop, otherwise to the loop itself. */
|
||||
if (bb->next_bb == loop->header)
|
||||
|
@ -1489,7 +1489,7 @@ allocate_dynamic_stack_space (rtx size, unsigned size_align,
|
||||
stack pointer, such as acquiring the space by calling malloc(). */
|
||||
if (targetm.have_allocate_stack ())
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
/* We don't have to check against the predicate for operand 0 since
|
||||
TARGET is known to be a pseudo of the proper mode, which must
|
||||
be valid for the operand. */
|
||||
@ -1620,7 +1620,7 @@ emit_stack_probe (rtx address)
|
||||
{
|
||||
if (targetm.have_probe_stack_address ())
|
||||
{
|
||||
struct expand_operand ops[1];
|
||||
class expand_operand ops[1];
|
||||
insn_code icode = targetm.code_for_probe_stack_address;
|
||||
create_address_operand (ops, address);
|
||||
maybe_legitimize_operands (icode, 0, 1, ops);
|
||||
@ -1680,7 +1680,7 @@ probe_stack_range (HOST_WIDE_INT first, rtx size)
|
||||
/* Next see if we have an insn to check the stack. */
|
||||
else if (targetm.have_check_stack ())
|
||||
{
|
||||
struct expand_operand ops[1];
|
||||
class expand_operand ops[1];
|
||||
rtx addr = memory_address (Pmode,
|
||||
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||||
stack_pointer_rtx,
|
||||
|
14
gcc/expmed.c
14
gcc/expmed.c
@ -599,7 +599,7 @@ store_bit_field_using_insv (const extraction_insn *insv, rtx op0,
|
||||
unsigned HOST_WIDE_INT bitnum,
|
||||
rtx value, scalar_int_mode value_mode)
|
||||
{
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx value1;
|
||||
rtx xop0 = op0;
|
||||
rtx_insn *last = get_last_insn ();
|
||||
@ -759,7 +759,7 @@ store_bit_field_1 (rtx str_rtx, poly_uint64 bitsize, poly_uint64 bitnum,
|
||||
&& known_eq (bitsize, GET_MODE_BITSIZE (innermode))
|
||||
&& multiple_p (bitnum, GET_MODE_BITSIZE (innermode), &pos))
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
enum insn_code icode = optab_handler (vec_set_optab, outermode);
|
||||
|
||||
create_fixed_operand (&ops[0], op0);
|
||||
@ -870,7 +870,7 @@ store_integral_bit_field (rtx op0, opt_scalar_int_mode op0_mode,
|
||||
&& known_eq (bitsize, GET_MODE_BITSIZE (fieldmode))
|
||||
&& optab_handler (movstrict_optab, fieldmode) != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
enum insn_code icode = optab_handler (movstrict_optab, fieldmode);
|
||||
rtx arg0 = op0;
|
||||
unsigned HOST_WIDE_INT subreg_off;
|
||||
@ -1499,7 +1499,7 @@ extract_bit_field_using_extv (const extraction_insn *extv, rtx op0,
|
||||
int unsignedp, rtx target,
|
||||
machine_mode mode, machine_mode tmode)
|
||||
{
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx spec_target = target;
|
||||
rtx spec_target_subreg = 0;
|
||||
scalar_int_mode ext_mode = extv->field_mode;
|
||||
@ -1655,7 +1655,7 @@ extract_bit_field_1 (rtx str_rtx, poly_uint64 bitsize, poly_uint64 bitnum,
|
||||
!= CODE_FOR_nothing)
|
||||
&& multiple_p (bitnum, GET_MODE_BITSIZE (tmode), &pos))
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
machine_mode outermode = new_mode;
|
||||
machine_mode innermode = tmode;
|
||||
enum insn_code icode
|
||||
@ -1722,7 +1722,7 @@ extract_bit_field_1 (rtx str_rtx, poly_uint64 bitsize, poly_uint64 bitnum,
|
||||
&& known_eq (bitsize, GET_MODE_BITSIZE (innermode))
|
||||
&& multiple_p (bitnum, GET_MODE_BITSIZE (innermode), &pos))
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
|
||||
create_output_operand (&ops[0], target, innermode);
|
||||
ops[0].target = 1;
|
||||
@ -5428,7 +5428,7 @@ emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
|
||||
int unsignedp, rtx x, rtx y, int normalizep,
|
||||
machine_mode target_mode)
|
||||
{
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx op0, comparison, subtarget;
|
||||
rtx_insn *last;
|
||||
scalar_int_mode result_mode = targetm.cstore_mode (icode);
|
||||
|
22
gcc/expr.c
22
gcc/expr.c
@ -1769,7 +1769,7 @@ emit_block_move_via_cpymem (rtx x, rtx y, rtx size, unsigned int align,
|
||||
|| max_size <= (GET_MODE_MASK (mode) >> 1)
|
||||
|| GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
|
||||
{
|
||||
struct expand_operand ops[9];
|
||||
class expand_operand ops[9];
|
||||
unsigned int nops;
|
||||
|
||||
/* ??? When called via emit_block_move_for_call, it'd be
|
||||
@ -1932,7 +1932,7 @@ expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
|
||||
if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
|
||||
target = NULL_RTX;
|
||||
|
||||
struct expand_operand ops[5];
|
||||
class expand_operand ops[5];
|
||||
create_output_operand (&ops[0], target, insn_mode);
|
||||
create_fixed_operand (&ops[1], arg1_rtx);
|
||||
create_fixed_operand (&ops[2], arg2_rtx);
|
||||
@ -3137,7 +3137,7 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
|
||||
|| max_size <= (GET_MODE_MASK (mode) >> 1)
|
||||
|| GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
|
||||
{
|
||||
struct expand_operand ops[9];
|
||||
class expand_operand ops[9];
|
||||
unsigned int nops;
|
||||
|
||||
nops = insn_data[(int) code].n_generator_args;
|
||||
@ -4181,7 +4181,7 @@ emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
|
||||
icode = optab_handler (push_optab, mode);
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[1];
|
||||
class expand_operand ops[1];
|
||||
|
||||
create_input_operand (&ops[0], x, mode);
|
||||
if (maybe_expand_insn (icode, 1, ops))
|
||||
@ -5027,7 +5027,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
|
||||
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
|
||||
create_fixed_operand (&ops[0], mem);
|
||||
create_input_operand (&ops[1], reg, mode);
|
||||
@ -5456,7 +5456,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
|
||||
bool
|
||||
emit_storent_insn (rtx to, rtx from)
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
machine_mode mode = GET_MODE (to);
|
||||
enum insn_code code = optab_handler (storent_optab, mode);
|
||||
|
||||
@ -6759,7 +6759,7 @@ store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
|
||||
!= CODE_FOR_nothing)
|
||||
&& (elt = uniform_vector_p (exp)))
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
create_output_operand (&ops[0], target, mode);
|
||||
create_input_operand (&ops[1], expand_normal (elt), eltmode);
|
||||
expand_insn (icode, 2, ops);
|
||||
@ -9554,7 +9554,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
|
||||
&& mode == TYPE_MODE (TREE_TYPE (treeop0))
|
||||
&& SCALAR_INT_MODE_P (mode))
|
||||
{
|
||||
struct expand_operand eops[4];
|
||||
class expand_operand eops[4];
|
||||
machine_mode imode = TYPE_MODE (TREE_TYPE (treeop0));
|
||||
expand_operands (treeop0, treeop1,
|
||||
subtarget, &op0, &op1, EXPAND_NORMAL);
|
||||
@ -10292,7 +10292,7 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
|
||||
&& ((icode = optab_handler (movmisalign_optab, mode))
|
||||
!= CODE_FOR_nothing))
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
|
||||
/* We've already validated the memory, and we're creating a
|
||||
new pseudo destination. The predicates really can't fail,
|
||||
@ -10374,7 +10374,7 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
|
||||
if ((icode = optab_handler (movmisalign_optab, mode))
|
||||
!= CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
|
||||
/* We've already validated the memory, and we're creating a
|
||||
new pseudo destination. The predicates really can't fail,
|
||||
@ -12180,7 +12180,7 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
|
||||
rtx table_label, rtx default_label, rtx fallback_label,
|
||||
profile_probability default_probability)
|
||||
{
|
||||
struct expand_operand ops[5];
|
||||
class expand_operand ops[5];
|
||||
scalar_int_mode index_mode = SImode;
|
||||
rtx op1, op2, index;
|
||||
|
||||
|
@ -56,9 +56,9 @@ public:
|
||||
enum excess_precision x_flag_excess_precision;
|
||||
};
|
||||
|
||||
extern struct target_flag_state default_target_flag_state;
|
||||
extern class target_flag_state default_target_flag_state;
|
||||
#if SWITCHABLE_TARGET
|
||||
extern struct target_flag_state *this_target_flag_state;
|
||||
extern class target_flag_state *this_target_flag_state;
|
||||
#else
|
||||
#define this_target_flag_state (&default_target_flag_state)
|
||||
#endif
|
||||
|
@ -133,7 +133,7 @@ vec<tree, va_gc> *types_used_by_cur_var_decl;
|
||||
|
||||
/* Forward declarations. */
|
||||
|
||||
static struct temp_slot *find_temp_slot_from_address (rtx);
|
||||
static class temp_slot *find_temp_slot_from_address (rtx);
|
||||
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
|
||||
static void pad_below (struct args_size *, machine_mode, tree);
|
||||
static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
|
||||
@ -345,7 +345,7 @@ try_fit_stack_local (poly_int64 start, poly_int64 length,
|
||||
static void
|
||||
add_frame_space (poly_int64 start, poly_int64 end)
|
||||
{
|
||||
struct frame_space *space = ggc_alloc<frame_space> ();
|
||||
class frame_space *space = ggc_alloc<frame_space> ();
|
||||
space->next = crtl->frame_space_list;
|
||||
crtl->frame_space_list = space;
|
||||
space->start = start;
|
||||
@ -441,11 +441,11 @@ assign_stack_local_1 (machine_mode mode, poly_int64 size,
|
||||
{
|
||||
if (kind & ASLK_RECORD_PAD)
|
||||
{
|
||||
struct frame_space **psp;
|
||||
class frame_space **psp;
|
||||
|
||||
for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
|
||||
{
|
||||
struct frame_space *space = *psp;
|
||||
class frame_space *space = *psp;
|
||||
if (!try_fit_stack_local (space->start, space->length, size,
|
||||
alignment, &slot_offset))
|
||||
continue;
|
||||
@ -559,9 +559,9 @@ assign_stack_local (machine_mode mode, poly_int64 size, int align)
|
||||
class GTY(()) temp_slot {
|
||||
public:
|
||||
/* Points to next temporary slot. */
|
||||
struct temp_slot *next;
|
||||
class temp_slot *next;
|
||||
/* Points to previous temporary slot. */
|
||||
struct temp_slot *prev;
|
||||
class temp_slot *prev;
|
||||
/* The rtx to used to reference the slot. */
|
||||
rtx slot;
|
||||
/* The size, in units, of the slot. */
|
||||
@ -589,7 +589,7 @@ public:
|
||||
struct GTY((for_user)) temp_slot_address_entry {
|
||||
hashval_t hash;
|
||||
rtx address;
|
||||
struct temp_slot *temp_slot;
|
||||
class temp_slot *temp_slot;
|
||||
};
|
||||
|
||||
struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
|
||||
@ -606,7 +606,7 @@ static size_t n_temp_slots_in_use;
|
||||
/* Removes temporary slot TEMP from LIST. */
|
||||
|
||||
static void
|
||||
cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
|
||||
cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
|
||||
{
|
||||
if (temp->next)
|
||||
temp->next->prev = temp->prev;
|
||||
@ -621,7 +621,7 @@ cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
|
||||
/* Inserts temporary slot TEMP to LIST. */
|
||||
|
||||
static void
|
||||
insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
|
||||
insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
|
||||
{
|
||||
temp->next = *list;
|
||||
if (*list)
|
||||
@ -632,7 +632,7 @@ insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
|
||||
|
||||
/* Returns the list of used temp slots at LEVEL. */
|
||||
|
||||
static struct temp_slot **
|
||||
static class temp_slot **
|
||||
temp_slots_at_level (int level)
|
||||
{
|
||||
if (level >= (int) vec_safe_length (used_temp_slots))
|
||||
@ -655,7 +655,7 @@ max_slot_level (void)
|
||||
/* Moves temporary slot TEMP to LEVEL. */
|
||||
|
||||
static void
|
||||
move_slot_to_level (struct temp_slot *temp, int level)
|
||||
move_slot_to_level (class temp_slot *temp, int level)
|
||||
{
|
||||
cut_slot_from_list (temp, temp_slots_at_level (temp->level));
|
||||
insert_slot_to_list (temp, temp_slots_at_level (level));
|
||||
@ -665,7 +665,7 @@ move_slot_to_level (struct temp_slot *temp, int level)
|
||||
/* Make temporary slot TEMP available. */
|
||||
|
||||
static void
|
||||
make_slot_available (struct temp_slot *temp)
|
||||
make_slot_available (class temp_slot *temp)
|
||||
{
|
||||
cut_slot_from_list (temp, temp_slots_at_level (temp->level));
|
||||
insert_slot_to_list (temp, &avail_temp_slots);
|
||||
@ -701,7 +701,7 @@ temp_address_hasher::equal (temp_slot_address_entry *t1,
|
||||
|
||||
/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
|
||||
static void
|
||||
insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
|
||||
insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
|
||||
{
|
||||
struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
|
||||
t->address = address;
|
||||
@ -735,10 +735,10 @@ remove_unused_temp_slot_addresses (void)
|
||||
|
||||
/* Find the temp slot corresponding to the object at address X. */
|
||||
|
||||
static struct temp_slot *
|
||||
static class temp_slot *
|
||||
find_temp_slot_from_address (rtx x)
|
||||
{
|
||||
struct temp_slot *p;
|
||||
class temp_slot *p;
|
||||
struct temp_slot_address_entry tmp, *t;
|
||||
|
||||
/* First try the easy way:
|
||||
@ -787,7 +787,7 @@ rtx
|
||||
assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
|
||||
{
|
||||
unsigned int align;
|
||||
struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
|
||||
class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
|
||||
rtx slot;
|
||||
|
||||
gcc_assert (known_size_p (size));
|
||||
@ -1031,7 +1031,7 @@ assign_temp (tree type_or_decl, int memory_required,
|
||||
static void
|
||||
combine_temp_slots (void)
|
||||
{
|
||||
struct temp_slot *p, *q, *next, *next_q;
|
||||
class temp_slot *p, *q, *next, *next_q;
|
||||
int num_slots;
|
||||
|
||||
/* We can't combine slots, because the information about which slot
|
||||
@ -1095,7 +1095,7 @@ combine_temp_slots (void)
|
||||
void
|
||||
update_temp_slot_address (rtx old_rtx, rtx new_rtx)
|
||||
{
|
||||
struct temp_slot *p;
|
||||
class temp_slot *p;
|
||||
|
||||
if (rtx_equal_p (old_rtx, new_rtx))
|
||||
return;
|
||||
@ -1149,7 +1149,7 @@ update_temp_slot_address (rtx old_rtx, rtx new_rtx)
|
||||
void
|
||||
preserve_temp_slots (rtx x)
|
||||
{
|
||||
struct temp_slot *p = 0, *next;
|
||||
class temp_slot *p = 0, *next;
|
||||
|
||||
if (x == 0)
|
||||
return;
|
||||
@ -1189,7 +1189,7 @@ preserve_temp_slots (rtx x)
|
||||
void
|
||||
free_temp_slots (void)
|
||||
{
|
||||
struct temp_slot *p, *next;
|
||||
class temp_slot *p, *next;
|
||||
bool some_available = false;
|
||||
|
||||
for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
|
||||
|
@ -186,7 +186,7 @@ struct GTY(()) function_subsections {
|
||||
class GTY(()) frame_space
|
||||
{
|
||||
public:
|
||||
struct frame_space *next;
|
||||
class frame_space *next;
|
||||
|
||||
poly_int64 start;
|
||||
poly_int64 length;
|
||||
@ -243,7 +243,7 @@ struct GTY(()) function {
|
||||
char *pass_startwith;
|
||||
|
||||
/* The stack usage of this function. */
|
||||
struct stack_usage *su;
|
||||
class stack_usage *su;
|
||||
|
||||
/* Value histograms attached to particular statements. */
|
||||
htab_t GTY((skip)) value_histograms;
|
||||
|
@ -224,8 +224,8 @@ edge
|
||||
single_def_use_dom_walker::before_dom_children (basic_block bb)
|
||||
{
|
||||
int bb_index = bb->index;
|
||||
struct df_md_bb_info *md_bb_info = df_md_get_bb_info (bb_index);
|
||||
struct df_lr_bb_info *lr_bb_info = df_lr_get_bb_info (bb_index);
|
||||
class df_md_bb_info *md_bb_info = df_md_get_bb_info (bb_index);
|
||||
class df_lr_bb_info *lr_bb_info = df_lr_get_bb_info (bb_index);
|
||||
rtx_insn *insn;
|
||||
|
||||
bitmap_copy (local_md, &md_bb_info->in);
|
||||
|
@ -181,7 +181,7 @@ class maybe_range_label_for_tree_type_mismatch : public range_label
|
||||
tree m_other_expr;
|
||||
};
|
||||
|
||||
struct op_location_t;
|
||||
class op_location_t;
|
||||
|
||||
/* A subclass of rich_location for showing problems with binary operations.
|
||||
|
||||
|
14
gcc/gcov.c
14
gcc/gcov.c
@ -76,17 +76,17 @@ using namespace std;
|
||||
|
||||
/* This is the size of the buffer used to read in source file lines. */
|
||||
|
||||
struct function_info;
|
||||
struct block_info;
|
||||
struct source_info;
|
||||
class function_info;
|
||||
class block_info;
|
||||
class source_info;
|
||||
|
||||
/* Describes an arc between two basic blocks. */
|
||||
|
||||
struct arc_info
|
||||
{
|
||||
/* source and destination blocks. */
|
||||
struct block_info *src;
|
||||
struct block_info *dst;
|
||||
class block_info *src;
|
||||
class block_info *dst;
|
||||
|
||||
/* transition counts. */
|
||||
gcov_type count;
|
||||
@ -178,7 +178,7 @@ public:
|
||||
|
||||
/* Temporary chain for solving graph, and for chaining blocks on one
|
||||
line. */
|
||||
struct block_info *chain;
|
||||
class block_info *chain;
|
||||
|
||||
};
|
||||
|
||||
@ -297,7 +297,7 @@ public:
|
||||
vector<line_info> lines;
|
||||
|
||||
/* Next function. */
|
||||
struct function_info *next;
|
||||
class function_info *next;
|
||||
|
||||
/* Get demangled name of a function. The demangled name
|
||||
is converted when it is used for the first time. */
|
||||
|
136
gcc/genattrtab.c
136
gcc/genattrtab.c
@ -136,7 +136,7 @@ static struct obstack *temp_obstack = &obstack2;
|
||||
class insn_def
|
||||
{
|
||||
public:
|
||||
struct insn_def *next; /* Next insn in chain. */
|
||||
class insn_def *next; /* Next insn in chain. */
|
||||
rtx def; /* The DEFINE_... */
|
||||
int insn_code; /* Instruction number. */
|
||||
int insn_index; /* Expression number in file, for errors. */
|
||||
@ -152,7 +152,7 @@ public:
|
||||
struct insn_ent
|
||||
{
|
||||
struct insn_ent *next; /* Next in chain. */
|
||||
struct insn_def *def; /* Instruction definition. */
|
||||
class insn_def *def; /* Instruction definition. */
|
||||
};
|
||||
|
||||
/* Each value of an attribute (either constant or computed) is assigned a
|
||||
@ -175,7 +175,7 @@ class attr_desc
|
||||
public:
|
||||
char *name; /* Name of attribute. */
|
||||
const char *enum_name; /* Enum name for DEFINE_ENUM_NAME. */
|
||||
struct attr_desc *next; /* Next attribute. */
|
||||
class attr_desc *next; /* Next attribute. */
|
||||
struct attr_value *first_value; /* First value of this attribute. */
|
||||
struct attr_value *default_val; /* Default value for this attribute. */
|
||||
file_location loc; /* Where in the .md files it occurs. */
|
||||
@ -190,7 +190,7 @@ class delay_desc
|
||||
{
|
||||
public:
|
||||
rtx def; /* DEFINE_DELAY expression. */
|
||||
struct delay_desc *next; /* Next DEFINE_DELAY. */
|
||||
class delay_desc *next; /* Next DEFINE_DELAY. */
|
||||
file_location loc; /* Where in the .md files it occurs. */
|
||||
int num; /* Number of DEFINE_DELAY, starting at 1. */
|
||||
};
|
||||
@ -199,7 +199,7 @@ struct attr_value_list
|
||||
{
|
||||
struct attr_value *av;
|
||||
struct insn_ent *ie;
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
struct attr_value_list *next;
|
||||
};
|
||||
|
||||
@ -207,9 +207,9 @@ struct attr_value_list
|
||||
|
||||
/* This one is indexed by the first character of the attribute name. */
|
||||
#define MAX_ATTRS_INDEX 256
|
||||
static struct attr_desc *attrs[MAX_ATTRS_INDEX];
|
||||
static struct insn_def *defs;
|
||||
static struct delay_desc *delays;
|
||||
static class attr_desc *attrs[MAX_ATTRS_INDEX];
|
||||
static class insn_def *defs;
|
||||
static class delay_desc *delays;
|
||||
struct attr_value_list **insn_code_values;
|
||||
|
||||
/* Other variables. */
|
||||
@ -260,7 +260,7 @@ static char *attr_string (const char *, int);
|
||||
static char *attr_printf (unsigned int, const char *, ...)
|
||||
ATTRIBUTE_PRINTF_2;
|
||||
static rtx make_numeric_value (int);
|
||||
static struct attr_desc *find_attr (const char **, int);
|
||||
static class attr_desc *find_attr (const char **, int);
|
||||
static rtx mk_attr_alt (alternative_mask);
|
||||
static char *next_comma_elt (const char **);
|
||||
static rtx insert_right_side (enum rtx_code, rtx, rtx, int, int);
|
||||
@ -278,15 +278,15 @@ static rtx copy_rtx_unchanging (rtx);
|
||||
static bool attr_alt_subset_p (rtx, rtx);
|
||||
static bool attr_alt_subset_of_compl_p (rtx, rtx);
|
||||
static void clear_struct_flag (rtx);
|
||||
static void write_attr_valueq (FILE *, struct attr_desc *, const char *);
|
||||
static struct attr_value *find_most_used (struct attr_desc *);
|
||||
static void write_attr_set (FILE *, struct attr_desc *, int, rtx,
|
||||
static void write_attr_valueq (FILE *, class attr_desc *, const char *);
|
||||
static struct attr_value *find_most_used (class attr_desc *);
|
||||
static void write_attr_set (FILE *, class attr_desc *, int, rtx,
|
||||
const char *, const char *, rtx,
|
||||
int, int, unsigned int);
|
||||
static void write_attr_case (FILE *, struct attr_desc *,
|
||||
static void write_attr_case (FILE *, class attr_desc *,
|
||||
struct attr_value *,
|
||||
int, const char *, const char *, int, rtx);
|
||||
static void write_attr_value (FILE *, struct attr_desc *, rtx);
|
||||
static void write_attr_value (FILE *, class attr_desc *, rtx);
|
||||
static void write_upcase (FILE *, const char *);
|
||||
static void write_indent (FILE *, int);
|
||||
static rtx identity_fn (rtx);
|
||||
@ -847,7 +847,7 @@ check_attr_test (file_location loc, rtx exp, attr_desc *attr)
|
||||
Return a perhaps modified replacement expression for the value. */
|
||||
|
||||
static rtx
|
||||
check_attr_value (file_location loc, rtx exp, struct attr_desc *attr)
|
||||
check_attr_value (file_location loc, rtx exp, class attr_desc *attr)
|
||||
{
|
||||
struct attr_value *av;
|
||||
const char *p;
|
||||
@ -957,7 +957,7 @@ check_attr_value (file_location loc, rtx exp, struct attr_desc *attr)
|
||||
|
||||
case ATTR:
|
||||
{
|
||||
struct attr_desc *attr2 = find_attr (&XSTR (exp, 0), 0);
|
||||
class attr_desc *attr2 = find_attr (&XSTR (exp, 0), 0);
|
||||
if (attr2 == NULL)
|
||||
error_at (loc, "unknown attribute `%s' in ATTR",
|
||||
XSTR (exp, 0));
|
||||
@ -991,7 +991,7 @@ check_attr_value (file_location loc, rtx exp, struct attr_desc *attr)
|
||||
It becomes a COND with each test being (eq_attr "alternative" "n") */
|
||||
|
||||
static rtx
|
||||
convert_set_attr_alternative (rtx exp, struct insn_def *id)
|
||||
convert_set_attr_alternative (rtx exp, class insn_def *id)
|
||||
{
|
||||
int num_alt = id->num_alternatives;
|
||||
rtx condexp;
|
||||
@ -1027,7 +1027,7 @@ convert_set_attr_alternative (rtx exp, struct insn_def *id)
|
||||
list of values is given, convert to SET_ATTR_ALTERNATIVE first. */
|
||||
|
||||
static rtx
|
||||
convert_set_attr (rtx exp, struct insn_def *id)
|
||||
convert_set_attr (rtx exp, class insn_def *id)
|
||||
{
|
||||
rtx newexp;
|
||||
const char *name_ptr;
|
||||
@ -1061,8 +1061,8 @@ convert_set_attr (rtx exp, struct insn_def *id)
|
||||
static void
|
||||
check_defs (void)
|
||||
{
|
||||
struct insn_def *id;
|
||||
struct attr_desc *attr;
|
||||
class insn_def *id;
|
||||
class attr_desc *attr;
|
||||
int i;
|
||||
rtx value;
|
||||
|
||||
@ -1119,7 +1119,7 @@ check_defs (void)
|
||||
value. LOC is the location to use for error reporting. */
|
||||
|
||||
static rtx
|
||||
make_canonical (file_location loc, struct attr_desc *attr, rtx exp)
|
||||
make_canonical (file_location loc, class attr_desc *attr, rtx exp)
|
||||
{
|
||||
int i;
|
||||
rtx newexp;
|
||||
@ -1226,7 +1226,7 @@ copy_boolean (rtx exp)
|
||||
alternatives. LOC is the location to use for error reporting. */
|
||||
|
||||
static struct attr_value *
|
||||
get_attr_value (file_location loc, rtx value, struct attr_desc *attr,
|
||||
get_attr_value (file_location loc, rtx value, class attr_desc *attr,
|
||||
int insn_code)
|
||||
{
|
||||
struct attr_value *av;
|
||||
@ -1276,7 +1276,7 @@ get_attr_value (file_location loc, rtx value, struct attr_desc *attr,
|
||||
static void
|
||||
expand_delays (void)
|
||||
{
|
||||
struct delay_desc *delay;
|
||||
class delay_desc *delay;
|
||||
rtx condexp;
|
||||
rtx newexp;
|
||||
int i;
|
||||
@ -1362,11 +1362,11 @@ expand_delays (void)
|
||||
the attribute. */
|
||||
|
||||
static void
|
||||
fill_attr (struct attr_desc *attr)
|
||||
fill_attr (class attr_desc *attr)
|
||||
{
|
||||
struct attr_value *av;
|
||||
struct insn_ent *ie;
|
||||
struct insn_def *id;
|
||||
class insn_def *id;
|
||||
int i;
|
||||
rtx value;
|
||||
|
||||
@ -1491,7 +1491,7 @@ make_length_attrs (void)
|
||||
static rtx (*const address_fn[]) (rtx)
|
||||
= {max_fn, min_fn, one_fn, identity_fn};
|
||||
size_t i;
|
||||
struct attr_desc *length_attr, *new_attr;
|
||||
class attr_desc *length_attr, *new_attr;
|
||||
struct attr_value *av, *new_av;
|
||||
struct insn_ent *ie, *new_ie;
|
||||
|
||||
@ -1565,7 +1565,7 @@ min_fn (rtx exp)
|
||||
static void
|
||||
write_length_unit_log (FILE *outf)
|
||||
{
|
||||
struct attr_desc *length_attr = find_attr (&length_str, 0);
|
||||
class attr_desc *length_attr = find_attr (&length_str, 0);
|
||||
struct attr_value *av;
|
||||
struct insn_ent *ie;
|
||||
unsigned int length_unit_log, length_or;
|
||||
@ -1924,7 +1924,7 @@ make_alternative_compare (alternative_mask mask)
|
||||
corresponding to INSN_CODE and INSN_INDEX. */
|
||||
|
||||
static rtx
|
||||
evaluate_eq_attr (rtx exp, struct attr_desc *attr, rtx value,
|
||||
evaluate_eq_attr (rtx exp, class attr_desc *attr, rtx value,
|
||||
int insn_code, int insn_index)
|
||||
{
|
||||
rtx orexp, andexp;
|
||||
@ -2417,7 +2417,7 @@ static rtx
|
||||
simplify_test_exp (rtx exp, int insn_code, int insn_index)
|
||||
{
|
||||
rtx left, right;
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
struct attr_value *av;
|
||||
struct insn_ent *ie;
|
||||
struct attr_value_list *iv;
|
||||
@ -2758,7 +2758,7 @@ simplify_test_exp (rtx exp, int insn_code, int insn_index)
|
||||
otherwise return 0. */
|
||||
|
||||
static int
|
||||
tests_attr_p (rtx p, struct attr_desc *attr)
|
||||
tests_attr_p (rtx p, class attr_desc *attr)
|
||||
{
|
||||
const char *fmt;
|
||||
int i, ie, j, je;
|
||||
@ -2799,18 +2799,18 @@ tests_attr_p (rtx p, struct attr_desc *attr)
|
||||
attr_desc pointers), and return the size of that array. */
|
||||
|
||||
static int
|
||||
get_attr_order (struct attr_desc ***ret)
|
||||
get_attr_order (class attr_desc ***ret)
|
||||
{
|
||||
int i, j;
|
||||
int num = 0;
|
||||
struct attr_desc *attr;
|
||||
struct attr_desc **all, **sorted;
|
||||
class attr_desc *attr;
|
||||
class attr_desc **all, **sorted;
|
||||
char *handled;
|
||||
for (i = 0; i < MAX_ATTRS_INDEX; i++)
|
||||
for (attr = attrs[i]; attr; attr = attr->next)
|
||||
num++;
|
||||
all = XNEWVEC (struct attr_desc *, num);
|
||||
sorted = XNEWVEC (struct attr_desc *, num);
|
||||
all = XNEWVEC (class attr_desc *, num);
|
||||
sorted = XNEWVEC (class attr_desc *, num);
|
||||
handled = XCNEWVEC (char, num);
|
||||
num = 0;
|
||||
for (i = 0; i < MAX_ATTRS_INDEX; i++)
|
||||
@ -2858,7 +2858,7 @@ get_attr_order (struct attr_desc ***ret)
|
||||
if (DEBUG)
|
||||
for (j = 0; j < num; j++)
|
||||
{
|
||||
struct attr_desc *attr2;
|
||||
class attr_desc *attr2;
|
||||
struct attr_value *av;
|
||||
|
||||
attr = sorted[j];
|
||||
@ -2889,14 +2889,14 @@ get_attr_order (struct attr_desc ***ret)
|
||||
static void
|
||||
optimize_attrs (int num_insn_codes)
|
||||
{
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
struct attr_value *av;
|
||||
struct insn_ent *ie;
|
||||
rtx newexp;
|
||||
int i;
|
||||
struct attr_value_list *ivbuf;
|
||||
struct attr_value_list *iv;
|
||||
struct attr_desc **topsort;
|
||||
class attr_desc **topsort;
|
||||
int topnum;
|
||||
|
||||
/* For each insn code, make a list of all the insn_ent's for it,
|
||||
@ -3044,7 +3044,7 @@ clear_struct_flag (rtx x)
|
||||
/* Add attribute value NAME to the beginning of ATTR's list. */
|
||||
|
||||
static void
|
||||
add_attr_value (struct attr_desc *attr, const char *name)
|
||||
add_attr_value (class attr_desc *attr, const char *name)
|
||||
{
|
||||
struct attr_value *av;
|
||||
|
||||
@ -3064,7 +3064,7 @@ gen_attr (md_rtx_info *info)
|
||||
{
|
||||
struct enum_type *et;
|
||||
struct enum_value *ev;
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
const char *name_ptr;
|
||||
char *p;
|
||||
rtx def = info->def;
|
||||
@ -3195,10 +3195,10 @@ compares_alternatives_p (rtx exp)
|
||||
static void
|
||||
gen_insn (md_rtx_info *info)
|
||||
{
|
||||
struct insn_def *id;
|
||||
class insn_def *id;
|
||||
rtx def = info->def;
|
||||
|
||||
id = oballoc (struct insn_def);
|
||||
id = oballoc (class insn_def);
|
||||
id->next = defs;
|
||||
defs = id;
|
||||
id->def = def;
|
||||
@ -3243,7 +3243,7 @@ gen_insn (md_rtx_info *info)
|
||||
static void
|
||||
gen_delay (md_rtx_info *info)
|
||||
{
|
||||
struct delay_desc *delay;
|
||||
class delay_desc *delay;
|
||||
int i;
|
||||
|
||||
rtx def = info->def;
|
||||
@ -3262,7 +3262,7 @@ gen_delay (md_rtx_info *info)
|
||||
have_annul_false = 1;
|
||||
}
|
||||
|
||||
delay = oballoc (struct delay_desc);
|
||||
delay = oballoc (class delay_desc);
|
||||
delay->def = def;
|
||||
delay->num = ++num_delays;
|
||||
delay->next = delays;
|
||||
@ -3289,7 +3289,7 @@ find_attrs_to_cache (rtx exp, bool create)
|
||||
{
|
||||
int i;
|
||||
const char *name;
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
|
||||
if (exp == NULL)
|
||||
return;
|
||||
@ -3369,7 +3369,7 @@ write_test_expr (FILE *outf, rtx exp, unsigned int attrs_cached, int flags,
|
||||
{
|
||||
int comparison_operator = 0;
|
||||
RTX_CODE code;
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
|
||||
if (emit_parens)
|
||||
fprintf (outf, "(");
|
||||
@ -4042,7 +4042,7 @@ walk_attr_value (rtx exp)
|
||||
/* Write out a function to obtain the attribute for a given INSN. */
|
||||
|
||||
static void
|
||||
write_attr_get (FILE *outf, struct attr_desc *attr)
|
||||
write_attr_get (FILE *outf, class attr_desc *attr)
|
||||
{
|
||||
struct attr_value *av, *common_av;
|
||||
int i, j;
|
||||
@ -4099,7 +4099,7 @@ write_attr_get (FILE *outf, struct attr_desc *attr)
|
||||
if ((attrs_seen_more_than_once & (1U << i)) != 0)
|
||||
{
|
||||
const char *name = cached_attrs[i];
|
||||
struct attr_desc *cached_attr;
|
||||
class attr_desc *cached_attr;
|
||||
if (i != j)
|
||||
cached_attrs[j] = name;
|
||||
cached_attr = find_attr (&name, 0);
|
||||
@ -4163,7 +4163,7 @@ eliminate_known_true (rtx known_true, rtx exp, int insn_code, int insn_index)
|
||||
and ";"). */
|
||||
|
||||
static void
|
||||
write_attr_set (FILE *outf, struct attr_desc *attr, int indent, rtx value,
|
||||
write_attr_set (FILE *outf, class attr_desc *attr, int indent, rtx value,
|
||||
const char *prefix, const char *suffix, rtx known_true,
|
||||
int insn_code, int insn_index, unsigned int attrs_cached)
|
||||
{
|
||||
@ -4291,7 +4291,7 @@ write_insn_cases (FILE *outf, struct insn_ent *ie, int indent)
|
||||
/* Write out the computation for one attribute value. */
|
||||
|
||||
static void
|
||||
write_attr_case (FILE *outf, struct attr_desc *attr, struct attr_value *av,
|
||||
write_attr_case (FILE *outf, class attr_desc *attr, struct attr_value *av,
|
||||
int write_case_lines, const char *prefix, const char *suffix,
|
||||
int indent, rtx known_true)
|
||||
{
|
||||
@ -4355,7 +4355,7 @@ write_attr_case (FILE *outf, struct attr_desc *attr, struct attr_value *av,
|
||||
/* Utilities to write in various forms. */
|
||||
|
||||
static void
|
||||
write_attr_valueq (FILE *outf, struct attr_desc *attr, const char *s)
|
||||
write_attr_valueq (FILE *outf, class attr_desc *attr, const char *s)
|
||||
{
|
||||
if (attr->is_numeric)
|
||||
{
|
||||
@ -4375,7 +4375,7 @@ write_attr_valueq (FILE *outf, struct attr_desc *attr, const char *s)
|
||||
}
|
||||
|
||||
static void
|
||||
write_attr_value (FILE *outf, struct attr_desc *attr, rtx value)
|
||||
write_attr_value (FILE *outf, class attr_desc *attr, rtx value)
|
||||
{
|
||||
int op;
|
||||
|
||||
@ -4395,7 +4395,7 @@ write_attr_value (FILE *outf, struct attr_desc *attr, rtx value)
|
||||
|
||||
case ATTR:
|
||||
{
|
||||
struct attr_desc *attr2 = find_attr (&XSTR (value, 0), 0);
|
||||
class attr_desc *attr2 = find_attr (&XSTR (value, 0), 0);
|
||||
if (attr->enum_name)
|
||||
fprintf (outf, "(enum %s)", attr->enum_name);
|
||||
else if (!attr->is_numeric)
|
||||
@ -4503,11 +4503,11 @@ write_dummy_eligible_delay (FILE *outf, const char *kind)
|
||||
static void
|
||||
write_eligible_delay (FILE *outf, const char *kind)
|
||||
{
|
||||
struct delay_desc *delay;
|
||||
class delay_desc *delay;
|
||||
int max_slots;
|
||||
char str[50];
|
||||
const char *pstr;
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
struct attr_value *av, *common_av;
|
||||
int i;
|
||||
|
||||
@ -4639,14 +4639,14 @@ next_comma_elt (const char **pstr)
|
||||
return attr_string (start, *pstr - start);
|
||||
}
|
||||
|
||||
/* Return a `struct attr_desc' pointer for a given named attribute. If CREATE
|
||||
/* Return a `class attr_desc' pointer for a given named attribute. If CREATE
|
||||
is nonzero, build a new attribute, if one does not exist. *NAME_P is
|
||||
replaced by a pointer to a canonical copy of the string. */
|
||||
|
||||
static struct attr_desc *
|
||||
static class attr_desc *
|
||||
find_attr (const char **name_p, int create)
|
||||
{
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
int index;
|
||||
const char *name = *name_p;
|
||||
|
||||
@ -4671,7 +4671,7 @@ find_attr (const char **name_p, int create)
|
||||
if (! create)
|
||||
return NULL;
|
||||
|
||||
attr = oballoc (struct attr_desc);
|
||||
attr = oballoc (class attr_desc);
|
||||
attr->name = DEF_ATTR_STRING (name);
|
||||
attr->enum_name = 0;
|
||||
attr->first_value = attr->default_val = NULL;
|
||||
@ -4689,7 +4689,7 @@ find_attr (const char **name_p, int create)
|
||||
static void
|
||||
make_internal_attr (const char *name, rtx value, int special)
|
||||
{
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
|
||||
attr = find_attr (&name, 1);
|
||||
gcc_assert (!attr->default_val);
|
||||
@ -4704,7 +4704,7 @@ make_internal_attr (const char *name, rtx value, int special)
|
||||
/* Find the most used value of an attribute. */
|
||||
|
||||
static struct attr_value *
|
||||
find_most_used (struct attr_desc *attr)
|
||||
find_most_used (class attr_desc *attr)
|
||||
{
|
||||
struct attr_value *av;
|
||||
struct attr_value *most_used;
|
||||
@ -4759,7 +4759,7 @@ copy_rtx_unchanging (rtx orig)
|
||||
static void
|
||||
write_const_num_delay_slots (FILE *outf)
|
||||
{
|
||||
struct attr_desc *attr = find_attr (&num_delay_slots_str, 0);
|
||||
class attr_desc *attr = find_attr (&num_delay_slots_str, 0);
|
||||
struct attr_value *av;
|
||||
|
||||
if (attr)
|
||||
@ -4815,7 +4815,7 @@ gen_insn_reserv (md_rtx_info *info)
|
||||
struct insn_reserv *decl = oballoc (struct insn_reserv);
|
||||
rtx def = info->def;
|
||||
|
||||
struct attr_desc attr = { };
|
||||
class attr_desc attr = { };
|
||||
|
||||
attr.name = DEF_ATTR_STRING (XSTR (def, 0));
|
||||
attr.loc = info->loc;
|
||||
@ -4932,10 +4932,10 @@ check_tune_attr (const char *name, rtx exp)
|
||||
|
||||
/* Try to find a const attribute (usually cpu or tune) that is used
|
||||
in all define_insn_reservation conditions. */
|
||||
static struct attr_desc *
|
||||
static class attr_desc *
|
||||
find_tune_attr (rtx exp)
|
||||
{
|
||||
struct attr_desc *attr;
|
||||
class attr_desc *attr;
|
||||
|
||||
switch (GET_CODE (exp))
|
||||
{
|
||||
@ -4979,7 +4979,7 @@ make_automaton_attrs (void)
|
||||
int i;
|
||||
struct insn_reserv *decl;
|
||||
rtx code_exp, lats_exp, byps_exp;
|
||||
struct attr_desc *tune_attr;
|
||||
class attr_desc *tune_attr;
|
||||
|
||||
if (n_insn_reservs == 0)
|
||||
return;
|
||||
@ -5245,8 +5245,8 @@ handle_arg (const char *arg)
|
||||
int
|
||||
main (int argc, const char **argv)
|
||||
{
|
||||
struct attr_desc *attr;
|
||||
struct insn_def *id;
|
||||
class attr_desc *attr;
|
||||
class insn_def *id;
|
||||
int i;
|
||||
|
||||
progname = "genattrtab";
|
||||
|
@ -80,7 +80,7 @@ public:
|
||||
};
|
||||
|
||||
/* Forward declarations. */
|
||||
static void walk_rtx (md_rtx_info *, rtx, struct accum_extract *);
|
||||
static void walk_rtx (md_rtx_info *, rtx, class accum_extract *);
|
||||
|
||||
#define UPPER_OFFSET ('A' - ('z' - 'a' + 1))
|
||||
|
||||
@ -89,7 +89,7 @@ static void walk_rtx (md_rtx_info *, rtx, struct accum_extract *);
|
||||
in ACC. */
|
||||
static void
|
||||
push_pathstr_operand (int operand, bool is_vector,
|
||||
struct accum_extract *acc)
|
||||
class accum_extract *acc)
|
||||
{
|
||||
if (is_vector && 'a' + operand > 'z')
|
||||
acc->pathstr.safe_push (operand + UPPER_OFFSET);
|
||||
@ -106,7 +106,7 @@ gen_insn (md_rtx_info *info)
|
||||
unsigned int op_count, dup_count, j;
|
||||
struct extraction *p;
|
||||
struct code_ptr *link;
|
||||
struct accum_extract acc;
|
||||
class accum_extract acc;
|
||||
|
||||
/* Walk the insn's pattern, remembering at all times the path
|
||||
down to the walking point. */
|
||||
@ -224,7 +224,7 @@ VEC_char_to_string (vec<char> v)
|
||||
}
|
||||
|
||||
static void
|
||||
walk_rtx (md_rtx_info *info, rtx x, struct accum_extract *acc)
|
||||
walk_rtx (md_rtx_info *info, rtx x, class accum_extract *acc)
|
||||
{
|
||||
RTX_CODE code;
|
||||
int i, len;
|
||||
|
@ -50,7 +50,7 @@ unsigned verbose;
|
||||
|
||||
/* libccp helpers. */
|
||||
|
||||
static struct line_maps *line_table;
|
||||
static class line_maps *line_table;
|
||||
|
||||
/* The rich_location class within libcpp requires a way to expand
|
||||
location_t instances, and relies on the client code
|
||||
@ -416,7 +416,7 @@ public:
|
||||
unsigned int fn;
|
||||
};
|
||||
|
||||
struct simplify;
|
||||
class simplify;
|
||||
|
||||
/* Identifier that maps to a user-defined predicate. */
|
||||
|
||||
@ -665,8 +665,8 @@ typedef hash_map<nofree_string_hash, unsigned> cid_map_t;
|
||||
|
||||
/* The AST produced by parsing of the pattern definitions. */
|
||||
|
||||
struct dt_operand;
|
||||
struct capture_info;
|
||||
class dt_operand;
|
||||
class capture_info;
|
||||
|
||||
/* The base class for operands. */
|
||||
|
||||
@ -880,7 +880,7 @@ public:
|
||||
produced when the pattern applies in the leafs.
|
||||
For a (match ...) the leafs are either empty if it is a simple predicate
|
||||
or the single expression specifying the matched operands. */
|
||||
struct operand *result;
|
||||
class operand *result;
|
||||
/* Collected 'for' expression operators that have to be replaced
|
||||
in the lowering phase. */
|
||||
vec<vec<user_id *> > for_vec;
|
||||
@ -933,7 +933,7 @@ print_operand (operand *o, FILE *f = stderr, bool flattened = false)
|
||||
}
|
||||
|
||||
DEBUG_FUNCTION void
|
||||
print_matches (struct simplify *s, FILE *f = stderr)
|
||||
print_matches (class simplify *s, FILE *f = stderr)
|
||||
{
|
||||
fprintf (f, "for expression: ");
|
||||
print_operand (s->match, f);
|
||||
@ -1583,7 +1583,7 @@ lower (vec<simplify *>& simplifiers, bool gimple)
|
||||
matching code. It represents the 'match' expression of all
|
||||
simplifies and has those as its leafs. */
|
||||
|
||||
struct dt_simplify;
|
||||
class dt_simplify;
|
||||
|
||||
/* A hash-map collecting semantically equivalent leafs in the decision
|
||||
tree for splitting out to separate functions. */
|
||||
@ -1719,7 +1719,7 @@ class decision_tree
|
||||
public:
|
||||
dt_node *root;
|
||||
|
||||
void insert (struct simplify *, unsigned);
|
||||
void insert (class simplify *, unsigned);
|
||||
void gen (FILE *f, bool gimple);
|
||||
void print (FILE *f = stderr);
|
||||
|
||||
@ -2025,7 +2025,7 @@ at_assert_elm:
|
||||
/* Insert S into the decision tree. */
|
||||
|
||||
void
|
||||
decision_tree::insert (struct simplify *s, unsigned pattern_no)
|
||||
decision_tree::insert (class simplify *s, unsigned pattern_no)
|
||||
{
|
||||
current_id = s->id;
|
||||
dt_operand **indexes = XCNEWVEC (dt_operand *, s->capture_max + 1);
|
||||
@ -4190,7 +4190,7 @@ parser::parse_operation ()
|
||||
/* Parse a capture.
|
||||
capture = '@'<number> */
|
||||
|
||||
struct operand *
|
||||
class operand *
|
||||
parser::parse_capture (operand *op, bool require_existing)
|
||||
{
|
||||
location_t src_loc = eat_token (CPP_ATSIGN)->src_loc;
|
||||
@ -4227,7 +4227,7 @@ parser::parse_capture (operand *op, bool require_existing)
|
||||
/* Parse an expression
|
||||
expr = '(' <operation>[capture][flag][type] <operand>... ')' */
|
||||
|
||||
struct operand *
|
||||
class operand *
|
||||
parser::parse_expr ()
|
||||
{
|
||||
const cpp_token *token = peek ();
|
||||
@ -4395,11 +4395,11 @@ parser::parse_c_expr (cpp_ttype start)
|
||||
a standalone capture.
|
||||
op = predicate | expr | c_expr | capture */
|
||||
|
||||
struct operand *
|
||||
class operand *
|
||||
parser::parse_op ()
|
||||
{
|
||||
const cpp_token *token = peek ();
|
||||
struct operand *op = NULL;
|
||||
class operand *op = NULL;
|
||||
if (token->type == CPP_OPEN_PAREN)
|
||||
{
|
||||
eat_token (CPP_OPEN_PAREN);
|
||||
@ -4618,7 +4618,7 @@ parser::parse_simplify (simplify::simplify_kind kind,
|
||||
|
||||
const cpp_token *loc = peek ();
|
||||
parsing_match_operand = true;
|
||||
struct operand *match = parse_op ();
|
||||
class operand *match = parse_op ();
|
||||
finish_match_operand (match);
|
||||
parsing_match_operand = false;
|
||||
if (match->type == operand::OP_CAPTURE && !matcher)
|
||||
@ -5090,7 +5090,7 @@ main (int argc, char **argv)
|
||||
}
|
||||
}
|
||||
|
||||
line_table = XCNEW (struct line_maps);
|
||||
line_table = XCNEW (class line_maps);
|
||||
linemap_init (line_table, 0);
|
||||
line_table->reallocator = xrealloc;
|
||||
line_table->round_alloc_size = round_alloc_size;
|
||||
|
@ -146,7 +146,7 @@ static struct operand_data **odata_end = &null_operand.next;
|
||||
class data
|
||||
{
|
||||
public:
|
||||
struct data *next;
|
||||
class data *next;
|
||||
const char *name;
|
||||
const char *template_code;
|
||||
file_location loc;
|
||||
@ -161,29 +161,29 @@ public:
|
||||
};
|
||||
|
||||
/* This variable points to the first link in the insn chain. */
|
||||
static struct data *idata;
|
||||
static class data *idata;
|
||||
|
||||
/* This variable points to the end of the insn chain. This is where
|
||||
everything relevant from the machien description is appended to. */
|
||||
static struct data **idata_end;
|
||||
static class data **idata_end;
|
||||
|
||||
|
||||
static void output_prologue (void);
|
||||
static void output_operand_data (void);
|
||||
static void output_insn_data (void);
|
||||
static void output_get_insn_name (void);
|
||||
static void scan_operands (struct data *, rtx, int, int);
|
||||
static void scan_operands (class data *, rtx, int, int);
|
||||
static int compare_operands (struct operand_data *,
|
||||
struct operand_data *);
|
||||
static void place_operands (struct data *);
|
||||
static void process_template (struct data *, const char *);
|
||||
static void validate_insn_alternatives (struct data *);
|
||||
static void validate_insn_operands (struct data *);
|
||||
static void place_operands (class data *);
|
||||
static void process_template (class data *, const char *);
|
||||
static void validate_insn_alternatives (class data *);
|
||||
static void validate_insn_operands (class data *);
|
||||
|
||||
class constraint_data
|
||||
{
|
||||
public:
|
||||
struct constraint_data *next_this_letter;
|
||||
class constraint_data *next_this_letter;
|
||||
file_location loc;
|
||||
unsigned int namelen;
|
||||
char name[1];
|
||||
@ -193,7 +193,7 @@ public:
|
||||
are handled outside the define*_constraint mechanism. */
|
||||
static const char indep_constraints[] = ",=+%*?!^$#&g";
|
||||
|
||||
static struct constraint_data *
|
||||
static class constraint_data *
|
||||
constraints_by_letter_table[1 << CHAR_BIT];
|
||||
|
||||
static int mdep_constraint_len (const char *, file_location, int);
|
||||
@ -277,12 +277,12 @@ output_operand_data (void)
|
||||
static void
|
||||
output_insn_data (void)
|
||||
{
|
||||
struct data *d;
|
||||
class data *d;
|
||||
int name_offset = 0;
|
||||
int next_name_offset;
|
||||
const char * last_name = 0;
|
||||
const char * next_name = 0;
|
||||
struct data *n;
|
||||
class data *n;
|
||||
|
||||
for (n = idata, next_name_offset = 1; n; n = n->next, next_name_offset++)
|
||||
if (n->name)
|
||||
@ -423,7 +423,7 @@ output_get_insn_name (void)
|
||||
THIS_STRICT_LOW is nonzero if the containing rtx was a STRICT_LOW_PART. */
|
||||
|
||||
static void
|
||||
scan_operands (struct data *d, rtx part, int this_address_p,
|
||||
scan_operands (class data *d, rtx part, int this_address_p,
|
||||
int this_strict_low)
|
||||
{
|
||||
int i, j;
|
||||
@ -565,7 +565,7 @@ compare_operands (struct operand_data *d0, struct operand_data *d1)
|
||||
find a subsequence that is the same, or allocate a new one at the end. */
|
||||
|
||||
static void
|
||||
place_operands (struct data *d)
|
||||
place_operands (class data *d)
|
||||
{
|
||||
struct operand_data *od, *od2;
|
||||
int i;
|
||||
@ -619,7 +619,7 @@ place_operands (struct data *d)
|
||||
templates, or C code to generate the assembler code template. */
|
||||
|
||||
static void
|
||||
process_template (struct data *d, const char *template_code)
|
||||
process_template (class data *d, const char *template_code)
|
||||
{
|
||||
const char *cp;
|
||||
int i;
|
||||
@ -742,7 +742,7 @@ process_template (struct data *d, const char *template_code)
|
||||
/* Check insn D for consistency in number of constraint alternatives. */
|
||||
|
||||
static void
|
||||
validate_insn_alternatives (struct data *d)
|
||||
validate_insn_alternatives (class data *d)
|
||||
{
|
||||
int n = 0, start;
|
||||
|
||||
@ -825,7 +825,7 @@ validate_insn_alternatives (struct data *d)
|
||||
/* Verify that there are no gaps in operand numbers for INSNs. */
|
||||
|
||||
static void
|
||||
validate_insn_operands (struct data *d)
|
||||
validate_insn_operands (class data *d)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -835,7 +835,7 @@ validate_insn_operands (struct data *d)
|
||||
}
|
||||
|
||||
static void
|
||||
validate_optab_operands (struct data *d)
|
||||
validate_optab_operands (class data *d)
|
||||
{
|
||||
if (!d->name || d->name[0] == '\0' || d->name[0] == '*')
|
||||
return;
|
||||
@ -980,7 +980,7 @@ gen_expand (md_rtx_info *info)
|
||||
static void
|
||||
init_insn_for_nothing (void)
|
||||
{
|
||||
idata = XCNEW (struct data);
|
||||
idata = XCNEW (class data);
|
||||
new (idata) data ();
|
||||
idata->name = "*placeholder_for_nothing";
|
||||
idata->loc = file_location ("<internal>", 0, 0);
|
||||
@ -1088,7 +1088,7 @@ note_constraint (md_rtx_info *info)
|
||||
{
|
||||
rtx exp = info->def;
|
||||
const char *name = XSTR (exp, 0);
|
||||
struct constraint_data **iter, **slot, *new_cdata;
|
||||
class constraint_data **iter, **slot, *new_cdata;
|
||||
|
||||
if (strcmp (name, "TARGET_MEM_CONSTRAINT") == 0)
|
||||
name = general_mem;
|
||||
@ -1138,8 +1138,8 @@ note_constraint (md_rtx_info *info)
|
||||
return;
|
||||
}
|
||||
}
|
||||
new_cdata = XNEWVAR (struct constraint_data,
|
||||
sizeof (struct constraint_data) + namelen);
|
||||
new_cdata = XNEWVAR (class constraint_data,
|
||||
sizeof (class constraint_data) + namelen);
|
||||
new (new_cdata) constraint_data ();
|
||||
strcpy (CONST_CAST (char *, new_cdata->name), name);
|
||||
new_cdata->namelen = namelen;
|
||||
@ -1155,7 +1155,7 @@ note_constraint (md_rtx_info *info)
|
||||
static int
|
||||
mdep_constraint_len (const char *s, file_location loc, int opno)
|
||||
{
|
||||
struct constraint_data *p;
|
||||
class constraint_data *p;
|
||||
|
||||
p = constraints_by_letter_table[(unsigned int)s[0]];
|
||||
|
||||
|
@ -669,8 +669,8 @@ write_one_predicate_function (struct pred_data *p)
|
||||
class constraint_data
|
||||
{
|
||||
public:
|
||||
struct constraint_data *next_this_letter;
|
||||
struct constraint_data *next_textual;
|
||||
class constraint_data *next_this_letter;
|
||||
class constraint_data *next_textual;
|
||||
const char *name;
|
||||
const char *c_name; /* same as .name unless mangling is necessary */
|
||||
file_location loc; /* location of definition */
|
||||
@ -690,13 +690,13 @@ public:
|
||||
|
||||
/* Overview of all constraints beginning with a given letter. */
|
||||
|
||||
static struct constraint_data *
|
||||
static class constraint_data *
|
||||
constraints_by_letter_table[1<<CHAR_BIT];
|
||||
|
||||
/* For looking up all the constraints in the order that they appeared
|
||||
in the machine description. */
|
||||
static struct constraint_data *first_constraint;
|
||||
static struct constraint_data **last_constraint_ptr = &first_constraint;
|
||||
static class constraint_data *first_constraint;
|
||||
static class constraint_data **last_constraint_ptr = &first_constraint;
|
||||
|
||||
#define FOR_ALL_CONSTRAINTS(iter_) \
|
||||
for (iter_ = first_constraint; iter_; iter_ = iter_->next_textual)
|
||||
@ -775,7 +775,7 @@ add_constraint (const char *name, const char *regclass,
|
||||
rtx exp, bool is_memory, bool is_special_memory,
|
||||
bool is_address, file_location loc)
|
||||
{
|
||||
struct constraint_data *c, **iter, **slot;
|
||||
class constraint_data *c, **iter, **slot;
|
||||
const char *p;
|
||||
bool need_mangled_name = false;
|
||||
bool is_const_int;
|
||||
@ -909,7 +909,7 @@ add_constraint (const char *name, const char *regclass,
|
||||
}
|
||||
|
||||
|
||||
c = XOBNEW (rtl_obstack, struct constraint_data);
|
||||
c = XOBNEW (rtl_obstack, class constraint_data);
|
||||
c->name = name;
|
||||
c->c_name = need_mangled_name ? mangle (name) : name;
|
||||
c->loc = loc;
|
||||
@ -980,7 +980,7 @@ process_define_register_constraint (md_rtx_info *info)
|
||||
static void
|
||||
choose_enum_order (void)
|
||||
{
|
||||
struct constraint_data *c;
|
||||
class constraint_data *c;
|
||||
|
||||
enum_order = XNEWVEC (const constraint_data *, num_constraints);
|
||||
unsigned int next = 0;
|
||||
@ -1077,7 +1077,7 @@ write_lookup_constraint_1 (void)
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE (constraints_by_letter_table); i++)
|
||||
{
|
||||
struct constraint_data *c = constraints_by_letter_table[i];
|
||||
class constraint_data *c = constraints_by_letter_table[i];
|
||||
if (!c)
|
||||
continue;
|
||||
|
||||
@ -1117,7 +1117,7 @@ write_lookup_constraint_array (void)
|
||||
{
|
||||
if (i != 0)
|
||||
printf (",\n ");
|
||||
struct constraint_data *c = constraints_by_letter_table[i];
|
||||
class constraint_data *c = constraints_by_letter_table[i];
|
||||
if (!c)
|
||||
printf ("CONSTRAINT__UNKNOWN");
|
||||
else if (c->namelen == 1)
|
||||
@ -1143,7 +1143,7 @@ write_insn_constraint_len (void)
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE (constraints_by_letter_table); i++)
|
||||
{
|
||||
struct constraint_data *c = constraints_by_letter_table[i];
|
||||
class constraint_data *c = constraints_by_letter_table[i];
|
||||
|
||||
if (!c
|
||||
|| c->namelen == 1)
|
||||
@ -1152,7 +1152,7 @@ write_insn_constraint_len (void)
|
||||
/* Constraints with multiple characters should have the same
|
||||
length. */
|
||||
{
|
||||
struct constraint_data *c2 = c->next_this_letter;
|
||||
class constraint_data *c2 = c->next_this_letter;
|
||||
size_t len = c->namelen;
|
||||
while (c2)
|
||||
{
|
||||
@ -1178,7 +1178,7 @@ write_insn_constraint_len (void)
|
||||
static void
|
||||
write_reg_class_for_constraint_1 (void)
|
||||
{
|
||||
struct constraint_data *c;
|
||||
class constraint_data *c;
|
||||
|
||||
puts ("enum reg_class\n"
|
||||
"reg_class_for_constraint_1 (enum constraint_num c)\n"
|
||||
@ -1201,7 +1201,7 @@ write_reg_class_for_constraint_1 (void)
|
||||
static void
|
||||
write_tm_constrs_h (void)
|
||||
{
|
||||
struct constraint_data *c;
|
||||
class constraint_data *c;
|
||||
|
||||
printf ("\
|
||||
/* Generated automatically by the program '%s'\n\
|
||||
@ -1288,7 +1288,7 @@ write_constraint_satisfied_p_array (void)
|
||||
static void
|
||||
write_insn_const_int_ok_for_constraint (void)
|
||||
{
|
||||
struct constraint_data *c;
|
||||
class constraint_data *c;
|
||||
|
||||
puts ("bool\n"
|
||||
"insn_const_int_ok_for_constraint (HOST_WIDE_INT ival, "
|
||||
|
@ -950,7 +950,7 @@ list_head <T>::singleton () const
|
||||
return first == last ? first : 0;
|
||||
}
|
||||
|
||||
struct state;
|
||||
class state;
|
||||
|
||||
/* Describes a possible successful return from a routine. */
|
||||
struct acceptance_type
|
||||
@ -1499,7 +1499,7 @@ operator != (const int_set &a, const int_set &b)
|
||||
return !operator == (a, b);
|
||||
}
|
||||
|
||||
struct decision;
|
||||
class decision;
|
||||
|
||||
/* Represents a transition between states, dependent on the result of
|
||||
a test T. */
|
||||
@ -2244,7 +2244,7 @@ optimize_subroutine_group (const char *type, state *root)
|
||||
st.longest_backtrack, st.longest_backtrack_code);
|
||||
}
|
||||
|
||||
struct merge_pattern_info;
|
||||
class merge_pattern_info;
|
||||
|
||||
/* Represents a transition from one pattern to another. */
|
||||
class merge_pattern_transition
|
||||
|
110
gcc/gensupport.c
110
gcc/gensupport.c
@ -70,55 +70,55 @@ class queue_elem
|
||||
public:
|
||||
rtx data;
|
||||
file_location loc;
|
||||
struct queue_elem *next;
|
||||
class queue_elem *next;
|
||||
/* In a DEFINE_INSN that came from a DEFINE_INSN_AND_SPLIT or
|
||||
DEFINE_INSN_AND_REWRITE, SPLIT points to the generated DEFINE_SPLIT. */
|
||||
struct queue_elem *split;
|
||||
class queue_elem *split;
|
||||
};
|
||||
|
||||
#define MNEMONIC_ATTR_NAME "mnemonic"
|
||||
#define MNEMONIC_HTAB_SIZE 1024
|
||||
|
||||
static struct queue_elem *define_attr_queue;
|
||||
static struct queue_elem **define_attr_tail = &define_attr_queue;
|
||||
static struct queue_elem *define_pred_queue;
|
||||
static struct queue_elem **define_pred_tail = &define_pred_queue;
|
||||
static struct queue_elem *define_insn_queue;
|
||||
static struct queue_elem **define_insn_tail = &define_insn_queue;
|
||||
static struct queue_elem *define_cond_exec_queue;
|
||||
static struct queue_elem **define_cond_exec_tail = &define_cond_exec_queue;
|
||||
static struct queue_elem *define_subst_queue;
|
||||
static struct queue_elem **define_subst_tail = &define_subst_queue;
|
||||
static struct queue_elem *other_queue;
|
||||
static struct queue_elem **other_tail = &other_queue;
|
||||
static struct queue_elem *define_subst_attr_queue;
|
||||
static struct queue_elem **define_subst_attr_tail = &define_subst_attr_queue;
|
||||
static class queue_elem *define_attr_queue;
|
||||
static class queue_elem **define_attr_tail = &define_attr_queue;
|
||||
static class queue_elem *define_pred_queue;
|
||||
static class queue_elem **define_pred_tail = &define_pred_queue;
|
||||
static class queue_elem *define_insn_queue;
|
||||
static class queue_elem **define_insn_tail = &define_insn_queue;
|
||||
static class queue_elem *define_cond_exec_queue;
|
||||
static class queue_elem **define_cond_exec_tail = &define_cond_exec_queue;
|
||||
static class queue_elem *define_subst_queue;
|
||||
static class queue_elem **define_subst_tail = &define_subst_queue;
|
||||
static class queue_elem *other_queue;
|
||||
static class queue_elem **other_tail = &other_queue;
|
||||
static class queue_elem *define_subst_attr_queue;
|
||||
static class queue_elem **define_subst_attr_tail = &define_subst_attr_queue;
|
||||
|
||||
/* Mapping from DEFINE_* rtxes to their location in the source file. */
|
||||
static hash_map <rtx, file_location> *rtx_locs;
|
||||
|
||||
static void remove_constraints (rtx);
|
||||
|
||||
static int is_predicable (struct queue_elem *);
|
||||
static int is_predicable (class queue_elem *);
|
||||
static void identify_predicable_attribute (void);
|
||||
static int n_alternatives (const char *);
|
||||
static void collect_insn_data (rtx, int *, int *);
|
||||
static const char *alter_test_for_insn (struct queue_elem *,
|
||||
struct queue_elem *);
|
||||
static const char *alter_test_for_insn (class queue_elem *,
|
||||
class queue_elem *);
|
||||
static char *shift_output_template (char *, const char *, int);
|
||||
static const char *alter_output_for_insn (struct queue_elem *,
|
||||
struct queue_elem *,
|
||||
static const char *alter_output_for_insn (class queue_elem *,
|
||||
class queue_elem *,
|
||||
int, int);
|
||||
static void process_one_cond_exec (struct queue_elem *);
|
||||
static void process_one_cond_exec (class queue_elem *);
|
||||
static void process_define_cond_exec (void);
|
||||
static void init_predicate_table (void);
|
||||
static void record_insn_name (int, const char *);
|
||||
|
||||
static bool has_subst_attribute (struct queue_elem *, struct queue_elem *);
|
||||
static bool has_subst_attribute (class queue_elem *, class queue_elem *);
|
||||
static const char * alter_output_for_subst_insn (rtx, int);
|
||||
static void alter_attrs_for_subst_insn (struct queue_elem *, int);
|
||||
static void process_substs_on_one_elem (struct queue_elem *,
|
||||
struct queue_elem *);
|
||||
static void alter_attrs_for_subst_insn (class queue_elem *, int);
|
||||
static void process_substs_on_one_elem (class queue_elem *,
|
||||
class queue_elem *);
|
||||
static rtx subst_dup (rtx, int, int);
|
||||
static void process_define_subst (void);
|
||||
|
||||
@ -400,11 +400,11 @@ process_define_predicate (rtx desc, file_location loc)
|
||||
/* Queue PATTERN on LIST_TAIL. Return the address of the new queue
|
||||
element. */
|
||||
|
||||
static struct queue_elem *
|
||||
queue_pattern (rtx pattern, struct queue_elem ***list_tail,
|
||||
static class queue_elem *
|
||||
queue_pattern (rtx pattern, class queue_elem ***list_tail,
|
||||
file_location loc)
|
||||
{
|
||||
struct queue_elem *e = XNEW (struct queue_elem);
|
||||
class queue_elem *e = XNEW (class queue_elem);
|
||||
e->data = pattern;
|
||||
e->loc = loc;
|
||||
e->next = NULL;
|
||||
@ -416,9 +416,9 @@ queue_pattern (rtx pattern, struct queue_elem ***list_tail,
|
||||
|
||||
/* Remove element ELEM from QUEUE. */
|
||||
static void
|
||||
remove_from_queue (struct queue_elem *elem, struct queue_elem **queue)
|
||||
remove_from_queue (class queue_elem *elem, class queue_elem **queue)
|
||||
{
|
||||
struct queue_elem *prev, *e;
|
||||
class queue_elem *prev, *e;
|
||||
prev = NULL;
|
||||
for (e = *queue; e ; e = e->next)
|
||||
{
|
||||
@ -440,7 +440,7 @@ remove_from_queue (struct queue_elem *elem, struct queue_elem **queue)
|
||||
static void
|
||||
add_define_attr (const char *name)
|
||||
{
|
||||
struct queue_elem *e = XNEW (struct queue_elem);
|
||||
class queue_elem *e = XNEW (class queue_elem);
|
||||
rtx t1 = rtx_alloc (DEFINE_ATTR);
|
||||
XSTR (t1, 0) = name;
|
||||
XSTR (t1, 1) = "no,yes";
|
||||
@ -591,8 +591,8 @@ process_rtx (rtx desc, file_location loc)
|
||||
rtx split;
|
||||
rtvec attr;
|
||||
int i;
|
||||
struct queue_elem *insn_elem;
|
||||
struct queue_elem *split_elem;
|
||||
class queue_elem *insn_elem;
|
||||
class queue_elem *split_elem;
|
||||
int split_code = (GET_CODE (desc) == DEFINE_INSN_AND_REWRITE ? 5 : 6);
|
||||
|
||||
/* Create a split with values from the insn_and_split. */
|
||||
@ -646,7 +646,7 @@ process_rtx (rtx desc, file_location loc)
|
||||
a DEFINE_INSN. */
|
||||
|
||||
static int
|
||||
is_predicable (struct queue_elem *elem)
|
||||
is_predicable (class queue_elem *elem)
|
||||
{
|
||||
rtvec vec = XVEC (elem->data, 4);
|
||||
const char *value;
|
||||
@ -716,8 +716,8 @@ is_predicable (struct queue_elem *elem)
|
||||
|
||||
/* Find attribute SUBST in ELEM and assign NEW_VALUE to it. */
|
||||
static void
|
||||
change_subst_attribute (struct queue_elem *elem,
|
||||
struct queue_elem *subst_elem,
|
||||
change_subst_attribute (class queue_elem *elem,
|
||||
class queue_elem *subst_elem,
|
||||
const char *new_value)
|
||||
{
|
||||
rtvec attrs_vec = XVEC (elem->data, 4);
|
||||
@ -746,7 +746,7 @@ change_subst_attribute (struct queue_elem *elem,
|
||||
words, we suppose the default value of the attribute to be 'no' since it is
|
||||
always generated automatically in read-rtl.c. */
|
||||
static bool
|
||||
has_subst_attribute (struct queue_elem *elem, struct queue_elem *subst_elem)
|
||||
has_subst_attribute (class queue_elem *elem, class queue_elem *subst_elem)
|
||||
{
|
||||
rtvec attrs_vec = XVEC (elem->data, 4);
|
||||
const char *value, *subst_name = XSTR (subst_elem->data, 0);
|
||||
@ -979,7 +979,7 @@ subst_pattern_match (rtx x, rtx pt, file_location loc)
|
||||
static void
|
||||
identify_predicable_attribute (void)
|
||||
{
|
||||
struct queue_elem *elem;
|
||||
class queue_elem *elem;
|
||||
char *p_true, *p_false;
|
||||
const char *value;
|
||||
|
||||
@ -1327,8 +1327,8 @@ alter_constraints (rtx pattern, int n_dup, constraints_handler_t alter)
|
||||
}
|
||||
|
||||
static const char *
|
||||
alter_test_for_insn (struct queue_elem *ce_elem,
|
||||
struct queue_elem *insn_elem)
|
||||
alter_test_for_insn (class queue_elem *ce_elem,
|
||||
class queue_elem *insn_elem)
|
||||
{
|
||||
return rtx_reader_ptr->join_c_conditions (XSTR (ce_elem->data, 1),
|
||||
XSTR (insn_elem->data, 2));
|
||||
@ -1439,7 +1439,7 @@ alter_attrs_for_insn (rtx insn)
|
||||
|
||||
if (!global_changes_made)
|
||||
{
|
||||
struct queue_elem *elem;
|
||||
class queue_elem *elem;
|
||||
|
||||
global_changes_made = true;
|
||||
add_define_attr ("ce_enabled");
|
||||
@ -1480,7 +1480,7 @@ alter_attrs_for_insn (rtx insn)
|
||||
ELEM is a queue element, containing our rtl-template,
|
||||
N_DUP - multiplication factor. */
|
||||
static void
|
||||
alter_attrs_for_subst_insn (struct queue_elem * elem, int n_dup)
|
||||
alter_attrs_for_subst_insn (class queue_elem * elem, int n_dup)
|
||||
{
|
||||
rtvec vec = XVEC (elem->data, 4);
|
||||
int num_elem;
|
||||
@ -1543,8 +1543,8 @@ shift_output_template (char *dest, const char *src, int disp)
|
||||
}
|
||||
|
||||
static const char *
|
||||
alter_output_for_insn (struct queue_elem *ce_elem,
|
||||
struct queue_elem *insn_elem,
|
||||
alter_output_for_insn (class queue_elem *ce_elem,
|
||||
class queue_elem *insn_elem,
|
||||
int alt, int max_op)
|
||||
{
|
||||
const char *ce_out, *insn_out;
|
||||
@ -1732,9 +1732,9 @@ alter_output_for_subst_insn (rtx insn, int alt)
|
||||
/* Replicate insns as appropriate for the given DEFINE_COND_EXEC. */
|
||||
|
||||
static void
|
||||
process_one_cond_exec (struct queue_elem *ce_elem)
|
||||
process_one_cond_exec (class queue_elem *ce_elem)
|
||||
{
|
||||
struct queue_elem *insn_elem;
|
||||
class queue_elem *insn_elem;
|
||||
for (insn_elem = define_insn_queue; insn_elem ; insn_elem = insn_elem->next)
|
||||
{
|
||||
int alternatives, max_operand;
|
||||
@ -1838,10 +1838,10 @@ process_one_cond_exec (struct queue_elem *ce_elem)
|
||||
was applied, ELEM would be deleted. */
|
||||
|
||||
static void
|
||||
process_substs_on_one_elem (struct queue_elem *elem,
|
||||
struct queue_elem *queue)
|
||||
process_substs_on_one_elem (class queue_elem *elem,
|
||||
class queue_elem *queue)
|
||||
{
|
||||
struct queue_elem *subst_elem;
|
||||
class queue_elem *subst_elem;
|
||||
int i, j, patterns_match;
|
||||
|
||||
for (subst_elem = define_subst_queue;
|
||||
@ -2248,7 +2248,7 @@ subst_dup (rtx pattern, int n_alt, int n_subst_alt)
|
||||
static void
|
||||
process_define_cond_exec (void)
|
||||
{
|
||||
struct queue_elem *elem;
|
||||
class queue_elem *elem;
|
||||
|
||||
identify_predicable_attribute ();
|
||||
if (have_error)
|
||||
@ -2264,7 +2264,7 @@ process_define_cond_exec (void)
|
||||
static void
|
||||
process_define_subst (void)
|
||||
{
|
||||
struct queue_elem *elem, *elem_attr;
|
||||
class queue_elem *elem, *elem_attr;
|
||||
|
||||
/* Check if each define_subst has corresponding define_subst_attr. */
|
||||
for (elem = define_subst_queue; elem ; elem = elem->next)
|
||||
@ -2475,7 +2475,7 @@ mnemonic_htab_callback (void **slot, void *info ATTRIBUTE_UNUSED)
|
||||
static void
|
||||
gen_mnemonic_attr (void)
|
||||
{
|
||||
struct queue_elem *elem;
|
||||
class queue_elem *elem;
|
||||
rtx mnemonic_attr = NULL;
|
||||
htab_t mnemonic_htab;
|
||||
const char *str, *p;
|
||||
@ -2552,7 +2552,7 @@ gen_mnemonic_attr (void)
|
||||
static void
|
||||
check_define_attr_duplicates ()
|
||||
{
|
||||
struct queue_elem *elem;
|
||||
class queue_elem *elem;
|
||||
htab_t attr_htab;
|
||||
char * attr_name;
|
||||
void **slot;
|
||||
@ -2648,7 +2648,7 @@ read_md_rtx (md_rtx_info *info)
|
||||
to use elided pattern numbers for anything. */
|
||||
do
|
||||
{
|
||||
struct queue_elem **queue, *elem;
|
||||
class queue_elem **queue, *elem;
|
||||
|
||||
/* Read all patterns from a given queue before moving on to the next. */
|
||||
if (define_attr_queue != NULL)
|
||||
|
@ -200,7 +200,7 @@ static const size_t extra_order_size_table[] = {
|
||||
sizeof (struct function),
|
||||
sizeof (struct basic_block_def),
|
||||
sizeof (struct cgraph_node),
|
||||
sizeof (struct loop),
|
||||
sizeof (class loop),
|
||||
};
|
||||
|
||||
/* The total number of orders. */
|
||||
|
@ -159,7 +159,7 @@ dump_reduction (reduction_p re)
|
||||
|
||||
/* Dump LOOP's induction IV. */
|
||||
static void
|
||||
dump_induction (struct loop *loop, induction_p iv)
|
||||
dump_induction (class loop *loop, induction_p iv)
|
||||
{
|
||||
fprintf (dump_file, " Induction: ");
|
||||
print_generic_expr (dump_file, iv->var, TDF_SLIM);
|
||||
@ -175,7 +175,7 @@ dump_induction (struct loop *loop, induction_p iv)
|
||||
class loop_cand
|
||||
{
|
||||
public:
|
||||
loop_cand (struct loop *, struct loop *);
|
||||
loop_cand (class loop *, class loop *);
|
||||
~loop_cand ();
|
||||
|
||||
reduction_p find_reduction_by_stmt (gimple *);
|
||||
@ -189,10 +189,10 @@ public:
|
||||
void undo_simple_reduction (reduction_p, bitmap);
|
||||
|
||||
/* The loop itself. */
|
||||
struct loop *m_loop;
|
||||
class loop *m_loop;
|
||||
/* The outer loop for interchange. It equals to loop if this loop cand
|
||||
itself represents the outer loop. */
|
||||
struct loop *m_outer;
|
||||
class loop *m_outer;
|
||||
/* Vector of induction variables in loop. */
|
||||
vec<induction_p> m_inductions;
|
||||
/* Vector of reduction variables in loop. */
|
||||
@ -211,7 +211,7 @@ public:
|
||||
|
||||
/* Constructor. */
|
||||
|
||||
loop_cand::loop_cand (struct loop *loop, struct loop *outer)
|
||||
loop_cand::loop_cand (class loop *loop, class loop *outer)
|
||||
: m_loop (loop), m_outer (outer), m_exit (single_exit (loop)),
|
||||
m_bbs (get_loop_body (loop)), m_num_stmts (0), m_const_init_reduc (0)
|
||||
{
|
||||
@ -241,7 +241,7 @@ loop_cand::~loop_cand ()
|
||||
/* Return single use stmt of VAR in LOOP, otherwise return NULL. */
|
||||
|
||||
static gimple *
|
||||
single_use_in_loop (tree var, struct loop *loop)
|
||||
single_use_in_loop (tree var, class loop *loop)
|
||||
{
|
||||
gimple *stmt, *res = NULL;
|
||||
use_operand_p use_p;
|
||||
@ -951,7 +951,7 @@ free_data_refs_with_aux (vec<data_reference_p> datarefs)
|
||||
class tree_loop_interchange
|
||||
{
|
||||
public:
|
||||
tree_loop_interchange (vec<struct loop *> loop_nest)
|
||||
tree_loop_interchange (vec<class loop *> loop_nest)
|
||||
: m_loop_nest (loop_nest), m_niters_iv_var (NULL_TREE),
|
||||
m_dce_seeds (BITMAP_ALLOC (NULL)) { }
|
||||
~tree_loop_interchange () { BITMAP_FREE (m_dce_seeds); }
|
||||
@ -962,10 +962,10 @@ private:
|
||||
bool valid_data_dependences (unsigned, unsigned, vec<ddr_p>);
|
||||
void interchange_loops (loop_cand &, loop_cand &);
|
||||
void map_inductions_to_loop (loop_cand &, loop_cand &);
|
||||
void move_code_to_inner_loop (struct loop *, struct loop *, basic_block *);
|
||||
void move_code_to_inner_loop (class loop *, class loop *, basic_block *);
|
||||
|
||||
/* The whole loop nest in which interchange is ongoing. */
|
||||
vec<struct loop *> m_loop_nest;
|
||||
vec<class loop *> m_loop_nest;
|
||||
/* We create new IV which is only used in loop's exit condition check.
|
||||
In case of 3-level loop nest interchange, when we interchange the
|
||||
innermost two loops, new IV created in the middle level loop does
|
||||
@ -1079,7 +1079,7 @@ tree_loop_interchange::interchange_loops (loop_cand &iloop, loop_cand &oloop)
|
||||
}
|
||||
|
||||
/* Prepare niters for both loops. */
|
||||
struct loop *loop_nest = m_loop_nest[0];
|
||||
class loop *loop_nest = m_loop_nest[0];
|
||||
edge instantiate_below = loop_preheader_edge (loop_nest);
|
||||
gsi = gsi_last_bb (loop_preheader_edge (loop_nest)->src);
|
||||
i_niters = number_of_latch_executions (iloop.m_loop);
|
||||
@ -1214,8 +1214,8 @@ tree_loop_interchange::map_inductions_to_loop (loop_cand &src, loop_cand &tgt)
|
||||
/* Move stmts of outer loop to inner loop. */
|
||||
|
||||
void
|
||||
tree_loop_interchange::move_code_to_inner_loop (struct loop *outer,
|
||||
struct loop *inner,
|
||||
tree_loop_interchange::move_code_to_inner_loop (class loop *outer,
|
||||
class loop *inner,
|
||||
basic_block *outer_bbs)
|
||||
{
|
||||
basic_block oloop_exit_bb = single_exit (outer)->src;
|
||||
@ -1276,7 +1276,7 @@ tree_loop_interchange::move_code_to_inner_loop (struct loop *outer,
|
||||
arr[i][j - 1][k] = 0; */
|
||||
|
||||
static void
|
||||
compute_access_stride (struct loop *loop_nest, struct loop *loop,
|
||||
compute_access_stride (class loop *loop_nest, class loop *loop,
|
||||
data_reference_p dr)
|
||||
{
|
||||
vec<tree> *strides = new vec<tree> ();
|
||||
@ -1320,10 +1320,10 @@ compute_access_stride (struct loop *loop_nest, struct loop *loop,
|
||||
if (! chrec_contains_undetermined (scev))
|
||||
{
|
||||
tree sl = scev;
|
||||
struct loop *expected = loop;
|
||||
class loop *expected = loop;
|
||||
while (TREE_CODE (sl) == POLYNOMIAL_CHREC)
|
||||
{
|
||||
struct loop *sl_loop = get_chrec_loop (sl);
|
||||
class loop *sl_loop = get_chrec_loop (sl);
|
||||
while (sl_loop != expected)
|
||||
{
|
||||
strides->safe_push (size_int (0));
|
||||
@ -1351,8 +1351,8 @@ compute_access_stride (struct loop *loop_nest, struct loop *loop,
|
||||
all data references. If access strides cannot be computed at least
|
||||
for two levels of loop for any data reference, it returns NULL. */
|
||||
|
||||
static struct loop *
|
||||
compute_access_strides (struct loop *loop_nest, struct loop *loop,
|
||||
static class loop *
|
||||
compute_access_strides (class loop *loop_nest, class loop *loop,
|
||||
vec<data_reference_p> datarefs)
|
||||
{
|
||||
unsigned i, j, num_loops = (unsigned) -1;
|
||||
@ -1390,8 +1390,8 @@ compute_access_strides (struct loop *loop_nest, struct loop *loop,
|
||||
of loops that isn't in current LOOP_NEST. */
|
||||
|
||||
static void
|
||||
prune_access_strides_not_in_loop (struct loop *loop_nest,
|
||||
struct loop *innermost,
|
||||
prune_access_strides_not_in_loop (class loop *loop_nest,
|
||||
class loop *innermost,
|
||||
vec<data_reference_p> datarefs)
|
||||
{
|
||||
data_reference_p dr;
|
||||
@ -1712,7 +1712,7 @@ public:
|
||||
nest with LOOP. */
|
||||
|
||||
static bool
|
||||
proper_loop_form_for_interchange (struct loop *loop, struct loop **min_outer)
|
||||
proper_loop_form_for_interchange (class loop *loop, class loop **min_outer)
|
||||
{
|
||||
edge e0, e1, exit;
|
||||
|
||||
@ -1811,14 +1811,14 @@ proper_loop_form_for_interchange (struct loop *loop, struct loop **min_outer)
|
||||
should be interchanged by looking into all DATAREFS. */
|
||||
|
||||
static bool
|
||||
should_interchange_loop_nest (struct loop *loop_nest, struct loop *innermost,
|
||||
should_interchange_loop_nest (class loop *loop_nest, class loop *innermost,
|
||||
vec<data_reference_p> datarefs)
|
||||
{
|
||||
unsigned idx = loop_depth (innermost) - loop_depth (loop_nest);
|
||||
gcc_assert (idx > 0);
|
||||
|
||||
/* Check if any two adjacent loops should be interchanged. */
|
||||
for (struct loop *loop = innermost;
|
||||
for (class loop *loop = innermost;
|
||||
loop != loop_nest; loop = loop_outer (loop), idx--)
|
||||
if (should_interchange_loops (idx, idx - 1, datarefs, 0, 0,
|
||||
loop == innermost, false))
|
||||
@ -1838,7 +1838,7 @@ tree_loop_interchange_compute_ddrs (vec<loop_p> loop_nest,
|
||||
vec<ddr_p> *ddrs)
|
||||
{
|
||||
struct data_reference *a, *b;
|
||||
struct loop *innermost = loop_nest.last ();
|
||||
class loop *innermost = loop_nest.last ();
|
||||
|
||||
for (unsigned i = 0; datarefs.iterate (i, &a); ++i)
|
||||
{
|
||||
@ -1880,7 +1880,7 @@ tree_loop_interchange_compute_ddrs (vec<loop_p> loop_nest,
|
||||
/* Prune DATAREFS by removing any data reference not inside of LOOP. */
|
||||
|
||||
static inline void
|
||||
prune_datarefs_not_in_loop (struct loop *loop, vec<data_reference_p> datarefs)
|
||||
prune_datarefs_not_in_loop (class loop *loop, vec<data_reference_p> datarefs)
|
||||
{
|
||||
unsigned i, j;
|
||||
struct data_reference *dr;
|
||||
@ -1907,10 +1907,10 @@ prune_datarefs_not_in_loop (struct loop *loop, vec<data_reference_p> datarefs)
|
||||
inner loop of that basic block's father loop. On success, return the
|
||||
outer loop of the result loop nest. */
|
||||
|
||||
static struct loop *
|
||||
prepare_data_references (struct loop *loop, vec<data_reference_p> *datarefs)
|
||||
static class loop *
|
||||
prepare_data_references (class loop *loop, vec<data_reference_p> *datarefs)
|
||||
{
|
||||
struct loop *loop_nest = loop;
|
||||
class loop *loop_nest = loop;
|
||||
vec<data_reference_p> *bb_refs;
|
||||
basic_block bb, *bbs = get_loop_body_in_dom_order (loop);
|
||||
|
||||
@ -1974,11 +1974,11 @@ prepare_data_references (struct loop *loop, vec<data_reference_p> *datarefs)
|
||||
in interchange. */
|
||||
|
||||
static bool
|
||||
prepare_perfect_loop_nest (struct loop *loop, vec<loop_p> *loop_nest,
|
||||
prepare_perfect_loop_nest (class loop *loop, vec<loop_p> *loop_nest,
|
||||
vec<data_reference_p> *datarefs, vec<ddr_p> *ddrs)
|
||||
{
|
||||
struct loop *start_loop = NULL, *innermost = loop;
|
||||
struct loop *outermost = loops_for_fn (cfun)->tree_root;
|
||||
class loop *start_loop = NULL, *innermost = loop;
|
||||
class loop *outermost = loops_for_fn (cfun)->tree_root;
|
||||
|
||||
/* Find loop nest from the innermost loop. The outermost is the innermost
|
||||
outer*/
|
||||
@ -2064,7 +2064,7 @@ pass_linterchange::execute (function *fun)
|
||||
return 0;
|
||||
|
||||
bool changed_p = false;
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
|
||||
{
|
||||
vec<loop_p> loop_nest = vNULL;
|
||||
|
@ -103,11 +103,11 @@ along with GCC; see the file COPYING3. If not see
|
||||
to the OLD loop or the outer loop of OLD now is inside LOOP. */
|
||||
|
||||
static void
|
||||
merge_loop_tree (struct loop *loop, struct loop *old)
|
||||
merge_loop_tree (class loop *loop, class loop *old)
|
||||
{
|
||||
basic_block *bbs;
|
||||
int i, n;
|
||||
struct loop *subloop;
|
||||
class loop *subloop;
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
@ -186,11 +186,11 @@ bb_prevents_fusion_p (basic_block bb)
|
||||
If so return true, otherwise return false. */
|
||||
|
||||
static bool
|
||||
unroll_jam_possible_p (struct loop *outer, struct loop *loop)
|
||||
unroll_jam_possible_p (class loop *outer, class loop *loop)
|
||||
{
|
||||
basic_block *bbs;
|
||||
int i, n;
|
||||
struct tree_niter_desc niter;
|
||||
class tree_niter_desc niter;
|
||||
|
||||
/* When fusing the loops we skip the latch block
|
||||
of the first one, so it mustn't have any effects to
|
||||
@ -301,9 +301,9 @@ unroll_jam_possible_p (struct loop *outer, struct loop *loop)
|
||||
be in appropriate form. */
|
||||
|
||||
static void
|
||||
fuse_loops (struct loop *loop)
|
||||
fuse_loops (class loop *loop)
|
||||
{
|
||||
struct loop *next = loop->next;
|
||||
class loop *next = loop->next;
|
||||
|
||||
while (next)
|
||||
{
|
||||
@ -353,7 +353,7 @@ fuse_loops (struct loop *loop)
|
||||
|
||||
merge_loop_tree (loop, next);
|
||||
gcc_assert (!next->num_nodes);
|
||||
struct loop *ln = next->next;
|
||||
class loop *ln = next->next;
|
||||
delete_loop (next);
|
||||
next = ln;
|
||||
}
|
||||
@ -422,7 +422,7 @@ adjust_unroll_factor (struct data_dependence_relation *ddr,
|
||||
static unsigned int
|
||||
tree_loop_unroll_and_jam (void)
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
bool changed = false;
|
||||
|
||||
gcc_assert (scev_initialized_p ());
|
||||
@ -430,7 +430,7 @@ tree_loop_unroll_and_jam (void)
|
||||
/* Go through all innermost loops. */
|
||||
FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
|
||||
{
|
||||
struct loop *outer = loop_outer (loop);
|
||||
class loop *outer = loop_outer (loop);
|
||||
|
||||
if (loop_depth (loop) < 2
|
||||
|| optimize_loop_nest_for_size_p (outer))
|
||||
@ -442,7 +442,7 @@ tree_loop_unroll_and_jam (void)
|
||||
vec<data_reference_p> datarefs;
|
||||
vec<ddr_p> dependences;
|
||||
unsigned unroll_factor, profit_unroll, removed;
|
||||
struct tree_niter_desc desc;
|
||||
class tree_niter_desc desc;
|
||||
bool unroll = false;
|
||||
|
||||
auto_vec<loop_p, 3> loop_nest;
|
||||
|
@ -190,7 +190,7 @@ public:
|
||||
|
||||
/* The loop containing STMT (cached for convenience). If multiple
|
||||
statements share the same address, they all belong to this loop. */
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
|
||||
/* A decomposition of the calculation into a sum of terms plus an
|
||||
optional base. When BASE is provided, it is never an SSA name.
|
||||
@ -229,7 +229,7 @@ public:
|
||||
|
||||
/* The outermost loop that can handle all the version checks
|
||||
described below. */
|
||||
struct loop *outermost;
|
||||
class loop *outermost;
|
||||
|
||||
/* The first entry in the list of blocks that belong to this loop
|
||||
(and not to subloops). m_next_block_in_loop provides the chain
|
||||
@ -242,7 +242,7 @@ public:
|
||||
|
||||
/* If versioning succeeds, this points the version of the loop that
|
||||
assumes the version conditions holds. */
|
||||
struct loop *optimized_loop;
|
||||
class loop *optimized_loop;
|
||||
};
|
||||
|
||||
/* The main pass structure. */
|
||||
@ -285,9 +285,9 @@ private:
|
||||
loop_info &m_li;
|
||||
};
|
||||
|
||||
loop_info &get_loop_info (struct loop *loop) { return m_loops[loop->num]; }
|
||||
loop_info &get_loop_info (class loop *loop) { return m_loops[loop->num]; }
|
||||
|
||||
unsigned int max_insns_for_loop (struct loop *);
|
||||
unsigned int max_insns_for_loop (class loop *);
|
||||
bool expensive_stmt_p (gimple *);
|
||||
|
||||
void version_for_unity (gimple *, tree);
|
||||
@ -298,7 +298,7 @@ private:
|
||||
inner_likelihood get_inner_likelihood (tree, unsigned HOST_WIDE_INT);
|
||||
void dump_inner_likelihood (address_info &, address_term_info &);
|
||||
void analyze_stride (address_info &, address_term_info &,
|
||||
tree, struct loop *);
|
||||
tree, class loop *);
|
||||
bool find_per_loop_multiplication (address_info &, address_term_info &);
|
||||
bool analyze_term_using_scevs (address_info &, address_term_info &);
|
||||
void analyze_arbitrary_term (address_info &, address_term_info &);
|
||||
@ -309,15 +309,15 @@ private:
|
||||
bool analyze_block (basic_block);
|
||||
bool analyze_blocks ();
|
||||
|
||||
void prune_loop_conditions (struct loop *, vr_values *);
|
||||
void prune_loop_conditions (class loop *, vr_values *);
|
||||
bool prune_conditions ();
|
||||
|
||||
void merge_loop_info (struct loop *, struct loop *);
|
||||
void add_loop_to_queue (struct loop *);
|
||||
bool decide_whether_loop_is_versionable (struct loop *);
|
||||
void merge_loop_info (class loop *, class loop *);
|
||||
void add_loop_to_queue (class loop *);
|
||||
bool decide_whether_loop_is_versionable (class loop *);
|
||||
bool make_versioning_decisions ();
|
||||
|
||||
bool version_loop (struct loop *);
|
||||
bool version_loop (class loop *);
|
||||
void implement_versioning_decisions ();
|
||||
|
||||
/* The function we're optimizing. */
|
||||
@ -348,7 +348,7 @@ private:
|
||||
auto_vec<basic_block> m_next_block_in_loop;
|
||||
|
||||
/* The list of loops that we've decided to version. */
|
||||
auto_vec<struct loop *> m_loops_to_version;
|
||||
auto_vec<class loop *> m_loops_to_version;
|
||||
|
||||
/* A table of addresses in the current loop, keyed off their values
|
||||
but not their offsets. */
|
||||
@ -602,7 +602,7 @@ loop_versioning::~loop_versioning ()
|
||||
interchange or outer-loop vectorization). */
|
||||
|
||||
unsigned int
|
||||
loop_versioning::max_insns_for_loop (struct loop *loop)
|
||||
loop_versioning::max_insns_for_loop (class loop *loop)
|
||||
{
|
||||
return (loop->inner
|
||||
? PARAM_VALUE (PARAM_LOOP_VERSIONING_MAX_OUTER_INSNS)
|
||||
@ -633,7 +633,7 @@ loop_versioning::expensive_stmt_p (gimple *stmt)
|
||||
void
|
||||
loop_versioning::version_for_unity (gimple *stmt, tree name)
|
||||
{
|
||||
struct loop *loop = loop_containing_stmt (stmt);
|
||||
class loop *loop = loop_containing_stmt (stmt);
|
||||
loop_info &li = get_loop_info (loop);
|
||||
|
||||
if (bitmap_set_bit (&li.unity_names, SSA_NAME_VERSION (name)))
|
||||
@ -641,7 +641,7 @@ loop_versioning::version_for_unity (gimple *stmt, tree name)
|
||||
/* This is the first time we've wanted to version LOOP for NAME.
|
||||
Keep track of the outermost loop that can handle all versioning
|
||||
checks in LI. */
|
||||
struct loop *outermost
|
||||
class loop *outermost
|
||||
= outermost_invariant_loop_for_expr (loop, name);
|
||||
if (loop_depth (li.outermost) < loop_depth (outermost))
|
||||
li.outermost = outermost;
|
||||
@ -834,7 +834,7 @@ loop_versioning::dump_inner_likelihood (address_info &address,
|
||||
void
|
||||
loop_versioning::analyze_stride (address_info &address,
|
||||
address_term_info &term,
|
||||
tree stride, struct loop *op_loop)
|
||||
tree stride, class loop *op_loop)
|
||||
{
|
||||
term.stride = stride;
|
||||
|
||||
@ -895,7 +895,7 @@ loop_versioning::find_per_loop_multiplication (address_info &address,
|
||||
if (!mult || gimple_assign_rhs_code (mult) != MULT_EXPR)
|
||||
return false;
|
||||
|
||||
struct loop *mult_loop = loop_containing_stmt (mult);
|
||||
class loop *mult_loop = loop_containing_stmt (mult);
|
||||
if (!loop_outer (mult_loop))
|
||||
return false;
|
||||
|
||||
@ -937,7 +937,7 @@ loop_versioning::analyze_term_using_scevs (address_info &address,
|
||||
if (!setter)
|
||||
return false;
|
||||
|
||||
struct loop *wrt_loop = loop_containing_stmt (setter);
|
||||
class loop *wrt_loop = loop_containing_stmt (setter);
|
||||
if (!loop_outer (wrt_loop))
|
||||
return false;
|
||||
|
||||
@ -1199,7 +1199,7 @@ loop_versioning::record_address_fragment (gimple *stmt,
|
||||
|
||||
/* Quick exit if no part of the address is calculated in STMT's loop,
|
||||
since such addresses have no versioning opportunities. */
|
||||
struct loop *loop = loop_containing_stmt (stmt);
|
||||
class loop *loop = loop_containing_stmt (stmt);
|
||||
if (expr_invariant_in_loop_p (loop, expr))
|
||||
return;
|
||||
|
||||
@ -1375,7 +1375,7 @@ loop_versioning::analyze_expr (gimple *stmt, tree expr)
|
||||
bool
|
||||
loop_versioning::analyze_block (basic_block bb)
|
||||
{
|
||||
struct loop *loop = bb->loop_father;
|
||||
class loop *loop = bb->loop_father;
|
||||
loop_info &li = get_loop_info (loop);
|
||||
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
|
||||
gsi_next (&gsi))
|
||||
@ -1424,7 +1424,7 @@ loop_versioning::analyze_blocks ()
|
||||
versioning at that level could be useful in some cases. */
|
||||
get_loop_info (get_loop (m_fn, 0)).rejected_p = true;
|
||||
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
|
||||
{
|
||||
loop_info &linfo = get_loop_info (loop);
|
||||
@ -1435,7 +1435,7 @@ loop_versioning::analyze_blocks ()
|
||||
|
||||
/* See whether an inner loop prevents versioning of this loop. */
|
||||
if (!linfo.rejected_p)
|
||||
for (struct loop *inner = loop->inner; inner; inner = inner->next)
|
||||
for (class loop *inner = loop->inner; inner; inner = inner->next)
|
||||
if (get_loop_info (inner).rejected_p)
|
||||
{
|
||||
linfo.rejected_p = true;
|
||||
@ -1479,7 +1479,7 @@ loop_versioning::analyze_blocks ()
|
||||
LOOP. */
|
||||
|
||||
void
|
||||
loop_versioning::prune_loop_conditions (struct loop *loop, vr_values *vrs)
|
||||
loop_versioning::prune_loop_conditions (class loop *loop, vr_values *vrs)
|
||||
{
|
||||
loop_info &li = get_loop_info (loop);
|
||||
|
||||
@ -1525,7 +1525,7 @@ loop_versioning::prune_conditions ()
|
||||
OUTER. */
|
||||
|
||||
void
|
||||
loop_versioning::merge_loop_info (struct loop *outer, struct loop *inner)
|
||||
loop_versioning::merge_loop_info (class loop *outer, class loop *inner)
|
||||
{
|
||||
loop_info &inner_li = get_loop_info (inner);
|
||||
loop_info &outer_li = get_loop_info (outer);
|
||||
@ -1549,7 +1549,7 @@ loop_versioning::merge_loop_info (struct loop *outer, struct loop *inner)
|
||||
/* Add LOOP to the queue of loops to version. */
|
||||
|
||||
void
|
||||
loop_versioning::add_loop_to_queue (struct loop *loop)
|
||||
loop_versioning::add_loop_to_queue (class loop *loop)
|
||||
{
|
||||
loop_info &li = get_loop_info (loop);
|
||||
|
||||
@ -1571,7 +1571,7 @@ loop_versioning::add_loop_to_queue (struct loop *loop)
|
||||
We have already made this decision for all inner loops of LOOP. */
|
||||
|
||||
bool
|
||||
loop_versioning::decide_whether_loop_is_versionable (struct loop *loop)
|
||||
loop_versioning::decide_whether_loop_is_versionable (class loop *loop)
|
||||
{
|
||||
loop_info &li = get_loop_info (loop);
|
||||
|
||||
@ -1579,7 +1579,7 @@ loop_versioning::decide_whether_loop_is_versionable (struct loop *loop)
|
||||
return false;
|
||||
|
||||
/* Examine the decisions made for inner loops. */
|
||||
for (struct loop *inner = loop->inner; inner; inner = inner->next)
|
||||
for (class loop *inner = loop->inner; inner; inner = inner->next)
|
||||
{
|
||||
loop_info &inner_li = get_loop_info (inner);
|
||||
if (inner_li.rejected_p)
|
||||
@ -1631,7 +1631,7 @@ loop_versioning::decide_whether_loop_is_versionable (struct loop *loop)
|
||||
}
|
||||
|
||||
/* Hoist all version checks from subloops to this loop. */
|
||||
for (struct loop *subloop = loop->inner; subloop; subloop = subloop->next)
|
||||
for (class loop *subloop = loop->inner; subloop; subloop = subloop->next)
|
||||
merge_loop_info (loop, subloop);
|
||||
|
||||
return true;
|
||||
@ -1646,7 +1646,7 @@ loop_versioning::make_versioning_decisions ()
|
||||
AUTO_DUMP_SCOPE ("make_versioning_decisions",
|
||||
dump_user_location_t::from_function_decl (m_fn->decl));
|
||||
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
|
||||
{
|
||||
loop_info &linfo = get_loop_info (loop);
|
||||
@ -1663,7 +1663,7 @@ loop_versioning::make_versioning_decisions ()
|
||||
/* We can't version this loop, so individually version any
|
||||
subloops that would benefit and haven't been versioned yet. */
|
||||
linfo.rejected_p = true;
|
||||
for (struct loop *subloop = loop->inner; subloop;
|
||||
for (class loop *subloop = loop->inner; subloop;
|
||||
subloop = subloop->next)
|
||||
if (get_loop_info (subloop).worth_versioning_p ())
|
||||
add_loop_to_queue (subloop);
|
||||
@ -1677,7 +1677,7 @@ loop_versioning::make_versioning_decisions ()
|
||||
cached in the associated loop_info. Return true on success. */
|
||||
|
||||
bool
|
||||
loop_versioning::version_loop (struct loop *loop)
|
||||
loop_versioning::version_loop (class loop *loop)
|
||||
{
|
||||
loop_info &li = get_loop_info (loop);
|
||||
|
||||
@ -1739,7 +1739,7 @@ loop_versioning::implement_versioning_decisions ()
|
||||
user-facing at this point. */
|
||||
|
||||
bool any_succeeded_p = false;
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
unsigned int i;
|
||||
FOR_EACH_VEC_ELT (m_loops_to_version, i, loop)
|
||||
if (version_loop (loop))
|
||||
|
@ -262,7 +262,7 @@ evrp_range_analyzer::record_ranges_from_phis (basic_block bb)
|
||||
use PHI arg ranges which may be still UNDEFINED but have
|
||||
to use VARYING for them. But we can still resort to
|
||||
SCEV for loop header PHIs. */
|
||||
struct loop *l;
|
||||
class loop *l;
|
||||
if (scev_initialized_p ()
|
||||
&& interesting
|
||||
&& (l = loop_containing_stmt (phi))
|
||||
|
@ -2159,7 +2159,7 @@ public:
|
||||
virtual unsigned int execute (function *);
|
||||
|
||||
private:
|
||||
hash_map<tree_operand_hash, struct imm_store_chain_info *> m_stores;
|
||||
hash_map<tree_operand_hash, class imm_store_chain_info *> m_stores;
|
||||
|
||||
/* Form a doubly-linked stack of the elements of m_stores, so that
|
||||
we can iterate over them in a predictable way. Using this order
|
||||
@ -3097,7 +3097,7 @@ split_store::split_store (unsigned HOST_WIDE_INT bp,
|
||||
if there is exactly one original store in the range. */
|
||||
|
||||
static store_immediate_info *
|
||||
find_constituent_stores (struct merged_store_group *group,
|
||||
find_constituent_stores (class merged_store_group *group,
|
||||
vec<store_immediate_info *> *stores,
|
||||
unsigned int *first,
|
||||
unsigned HOST_WIDE_INT bitpos,
|
||||
@ -3240,7 +3240,7 @@ count_multiple_uses (store_immediate_info *info)
|
||||
static unsigned int
|
||||
split_group (merged_store_group *group, bool allow_unaligned_store,
|
||||
bool allow_unaligned_load, bool bzero_first,
|
||||
vec<struct split_store *> *split_stores,
|
||||
vec<split_store *> *split_stores,
|
||||
unsigned *total_orig,
|
||||
unsigned *total_new)
|
||||
{
|
||||
@ -3277,7 +3277,7 @@ split_group (merged_store_group *group, bool allow_unaligned_store,
|
||||
if (align_bitpos)
|
||||
align = least_bit_hwi (align_bitpos);
|
||||
bytepos = group->start / BITS_PER_UNIT;
|
||||
struct split_store *store
|
||||
split_store *store
|
||||
= new split_store (bytepos, group->width, align);
|
||||
unsigned int first = 0;
|
||||
find_constituent_stores (group, &store->orig_stores,
|
||||
@ -3335,7 +3335,7 @@ split_group (merged_store_group *group, bool allow_unaligned_store,
|
||||
ret = 1;
|
||||
if (split_stores)
|
||||
{
|
||||
struct split_store *store
|
||||
split_store *store
|
||||
= new split_store (bytepos, group->stores[0]->bitsize, align_base);
|
||||
store->orig_stores.safe_push (group->stores[0]);
|
||||
store->orig = true;
|
||||
@ -3462,7 +3462,7 @@ split_group (merged_store_group *group, bool allow_unaligned_store,
|
||||
|
||||
if (split_stores)
|
||||
{
|
||||
struct split_store *store
|
||||
split_store *store
|
||||
= new split_store (try_pos, try_size, align);
|
||||
info = find_constituent_stores (group, &store->orig_stores,
|
||||
&first, try_bitpos, try_size);
|
||||
@ -3483,7 +3483,7 @@ split_group (merged_store_group *group, bool allow_unaligned_store,
|
||||
if (total_orig)
|
||||
{
|
||||
unsigned int i;
|
||||
struct split_store *store;
|
||||
split_store *store;
|
||||
/* If we are reusing some original stores and any of the
|
||||
original SSA_NAMEs had multiple uses, we need to subtract
|
||||
those now before we add the new ones. */
|
||||
@ -3650,7 +3650,7 @@ imm_store_chain_info::output_merged_store (merged_store_group *group)
|
||||
if (orig_num_stmts < 2)
|
||||
return false;
|
||||
|
||||
auto_vec<struct split_store *, 32> split_stores;
|
||||
auto_vec<class split_store *, 32> split_stores;
|
||||
bool allow_unaligned_store
|
||||
= !STRICT_ALIGNMENT && PARAM_VALUE (PARAM_STORE_MERGING_ALLOW_UNALIGNED);
|
||||
bool allow_unaligned_load = allow_unaligned_store;
|
||||
@ -4610,7 +4610,7 @@ pass_store_merging::process_store (gimple *stmt)
|
||||
if (!ins_stmt)
|
||||
memset (&n, 0, sizeof (n));
|
||||
|
||||
struct imm_store_chain_info **chain_info = NULL;
|
||||
class imm_store_chain_info **chain_info = NULL;
|
||||
if (base_addr)
|
||||
chain_info = m_stores.get (base_addr);
|
||||
|
||||
@ -4646,7 +4646,7 @@ pass_store_merging::process_store (gimple *stmt)
|
||||
/* Store aliases any existing chain? */
|
||||
terminate_all_aliasing_chains (NULL, stmt);
|
||||
/* Start a new chain. */
|
||||
struct imm_store_chain_info *new_chain
|
||||
class imm_store_chain_info *new_chain
|
||||
= new imm_store_chain_info (m_stores_head, base_addr);
|
||||
info = new store_immediate_info (const_bitsize, const_bitpos,
|
||||
const_bitregion_start,
|
||||
|
@ -297,8 +297,8 @@ public:
|
||||
tree cached_basis;
|
||||
};
|
||||
|
||||
typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
|
||||
typedef const struct slsr_cand_d *const_slsr_cand_t;
|
||||
typedef class slsr_cand_d slsr_cand, *slsr_cand_t;
|
||||
typedef const class slsr_cand_d *const_slsr_cand_t;
|
||||
|
||||
/* Pointers to candidates are chained together as part of a mapping
|
||||
from base expressions to the candidates that use them. */
|
||||
@ -354,7 +354,7 @@ public:
|
||||
basic_block init_bb;
|
||||
};
|
||||
|
||||
typedef struct incr_info_d incr_info, *incr_info_t;
|
||||
typedef class incr_info_d incr_info, *incr_info_t;
|
||||
|
||||
/* Candidates are maintained in a vector. If candidate X dominates
|
||||
candidate Y, then X appears before Y in the vector; but the
|
||||
@ -807,7 +807,7 @@ slsr_process_phi (gphi *phi, bool speed)
|
||||
unsigned i;
|
||||
tree arg0_base = NULL_TREE, base_type;
|
||||
slsr_cand_t c;
|
||||
struct loop *cand_loop = gimple_bb (phi)->loop_father;
|
||||
class loop *cand_loop = gimple_bb (phi)->loop_father;
|
||||
unsigned savings = 0;
|
||||
|
||||
/* A CAND_PHI requires each of its arguments to have the same
|
||||
|
@ -185,7 +185,7 @@ adjusted_warn_limit (bool idx)
|
||||
// MAX_SIZE is WARN_ALLOCA= adjusted for VLAs. It is the maximum size
|
||||
// in bytes we allow for arg.
|
||||
|
||||
static struct alloca_type_and_limit
|
||||
static class alloca_type_and_limit
|
||||
alloca_call_type_by_arg (tree arg, tree arg_casted, edge e,
|
||||
unsigned HOST_WIDE_INT max_size)
|
||||
{
|
||||
@ -326,7 +326,7 @@ is_max (tree x, wide_int max)
|
||||
// type to an unsigned type, set *INVALID_CASTED_TYPE to the
|
||||
// problematic signed type.
|
||||
|
||||
static struct alloca_type_and_limit
|
||||
static class alloca_type_and_limit
|
||||
alloca_call_type (gimple *stmt, bool is_vla, tree *invalid_casted_type)
|
||||
{
|
||||
gcc_assert (gimple_alloca_call_p (stmt));
|
||||
@ -459,7 +459,7 @@ alloca_call_type (gimple *stmt, bool is_vla, tree *invalid_casted_type)
|
||||
// If we couldn't find anything, try a few heuristics for things we
|
||||
// can easily determine. Check these misc cases but only accept
|
||||
// them if all predecessors have a known bound.
|
||||
struct alloca_type_and_limit ret = alloca_type_and_limit (ALLOCA_OK);
|
||||
class alloca_type_and_limit ret = alloca_type_and_limit (ALLOCA_OK);
|
||||
FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
|
||||
{
|
||||
gcc_assert (!len_casted || TYPE_UNSIGNED (TREE_TYPE (len_casted)));
|
||||
@ -536,7 +536,7 @@ pass_walloca::execute (function *fun)
|
||||
continue;
|
||||
|
||||
tree invalid_casted_type = NULL;
|
||||
struct alloca_type_and_limit t
|
||||
class alloca_type_and_limit t
|
||||
= alloca_call_type (stmt, is_vla, &invalid_casted_type);
|
||||
|
||||
unsigned HOST_WIDE_INT adjusted_alloca_limit
|
||||
|
@ -36,7 +36,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
the file being read. IB is the input block to use for reading. */
|
||||
|
||||
static gphi *
|
||||
input_phi (struct lto_input_block *ib, basic_block bb, struct data_in *data_in,
|
||||
input_phi (class lto_input_block *ib, basic_block bb, class data_in *data_in,
|
||||
struct function *fn)
|
||||
{
|
||||
unsigned HOST_WIDE_INT ix;
|
||||
@ -83,7 +83,7 @@ input_phi (struct lto_input_block *ib, basic_block bb, struct data_in *data_in,
|
||||
descriptors in DATA_IN. */
|
||||
|
||||
static gimple *
|
||||
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
|
||||
input_gimple_stmt (class lto_input_block *ib, class data_in *data_in,
|
||||
enum LTO_tags tag)
|
||||
{
|
||||
gimple *stmt;
|
||||
@ -249,8 +249,8 @@ input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
|
||||
FN is the function being processed. */
|
||||
|
||||
void
|
||||
input_bb (struct lto_input_block *ib, enum LTO_tags tag,
|
||||
struct data_in *data_in, struct function *fn,
|
||||
input_bb (class lto_input_block *ib, enum LTO_tags tag,
|
||||
class data_in *data_in, struct function *fn,
|
||||
int count_materialization_scale)
|
||||
{
|
||||
unsigned int index;
|
||||
|
@ -25,7 +25,7 @@ along with GCC; see the file COPYING3. If not see
|
||||
#include "tree-streamer.h"
|
||||
|
||||
/* In gimple-streamer-in.c */
|
||||
void input_bb (struct lto_input_block *, enum LTO_tags, struct data_in *,
|
||||
void input_bb (class lto_input_block *, enum LTO_tags, class data_in *,
|
||||
struct function *, int);
|
||||
|
||||
/* In gimple-streamer-out.c */
|
||||
|
16
gcc/godump.c
16
gcc/godump.c
@ -680,7 +680,7 @@ go_force_record_alignment (struct obstack *ob, const char *type_string,
|
||||
calls from go_format_type() itself. */
|
||||
|
||||
static bool
|
||||
go_format_type (struct godump_container *container, tree type,
|
||||
go_format_type (class godump_container *container, tree type,
|
||||
bool use_type_name, bool is_func_ok, unsigned int *p_art_i,
|
||||
bool is_anon_record_or_union)
|
||||
{
|
||||
@ -1092,7 +1092,7 @@ go_format_type (struct godump_container *container, tree type,
|
||||
it. */
|
||||
|
||||
static void
|
||||
go_output_type (struct godump_container *container)
|
||||
go_output_type (class godump_container *container)
|
||||
{
|
||||
struct obstack *ob;
|
||||
|
||||
@ -1105,7 +1105,7 @@ go_output_type (struct godump_container *container)
|
||||
/* Output a function declaration. */
|
||||
|
||||
static void
|
||||
go_output_fndecl (struct godump_container *container, tree decl)
|
||||
go_output_fndecl (class godump_container *container, tree decl)
|
||||
{
|
||||
if (!go_format_type (container, TREE_TYPE (decl), false, true, NULL, false))
|
||||
fprintf (go_dump_file, "// ");
|
||||
@ -1119,7 +1119,7 @@ go_output_fndecl (struct godump_container *container, tree decl)
|
||||
/* Output a typedef or something like a struct definition. */
|
||||
|
||||
static void
|
||||
go_output_typedef (struct godump_container *container, tree decl)
|
||||
go_output_typedef (class godump_container *container, tree decl)
|
||||
{
|
||||
/* If we have an enum type, output the enum constants
|
||||
separately. */
|
||||
@ -1246,7 +1246,7 @@ go_output_typedef (struct godump_container *container, tree decl)
|
||||
/* Output a variable. */
|
||||
|
||||
static void
|
||||
go_output_var (struct godump_container *container, tree decl)
|
||||
go_output_var (class godump_container *container, tree decl)
|
||||
{
|
||||
bool is_valid;
|
||||
tree type_name;
|
||||
@ -1335,7 +1335,7 @@ static const char * const keywords[] = {
|
||||
};
|
||||
|
||||
static void
|
||||
keyword_hash_init (struct godump_container *container)
|
||||
keyword_hash_init (class godump_container *container)
|
||||
{
|
||||
size_t i;
|
||||
size_t count = sizeof (keywords) / sizeof (keywords[0]);
|
||||
@ -1355,7 +1355,7 @@ keyword_hash_init (struct godump_container *container)
|
||||
bool
|
||||
find_dummy_types (const char *const &ptr, godump_container *adata)
|
||||
{
|
||||
struct godump_container *data = (struct godump_container *) adata;
|
||||
class godump_container *data = (class godump_container *) adata;
|
||||
const char *type = (const char *) ptr;
|
||||
void **slot;
|
||||
void **islot;
|
||||
@ -1372,7 +1372,7 @@ find_dummy_types (const char *const &ptr, godump_container *adata)
|
||||
static void
|
||||
go_finish (const char *filename)
|
||||
{
|
||||
struct godump_container container;
|
||||
class godump_container container;
|
||||
unsigned int ix;
|
||||
tree decl;
|
||||
|
||||
|
@ -197,7 +197,7 @@ draw_cfg_nodes_no_loops (pretty_printer *pp, struct function *fun)
|
||||
|
||||
static void
|
||||
draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
|
||||
struct loop *loop)
|
||||
class loop *loop)
|
||||
{
|
||||
basic_block *body;
|
||||
unsigned int i;
|
||||
@ -217,7 +217,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
|
||||
fillcolors[(loop_depth (loop) - 1) % 3],
|
||||
loop->num);
|
||||
|
||||
for (struct loop *inner = loop->inner; inner; inner = inner->next)
|
||||
for (class loop *inner = loop->inner; inner; inner = inner->next)
|
||||
draw_cfg_nodes_for_loop (pp, funcdef_no, inner);
|
||||
|
||||
if (loop->header == NULL)
|
||||
|
@ -613,8 +613,8 @@ hard_reg_set_iter_next (hard_reg_set_iterator *iter, unsigned *regno)
|
||||
|
||||
extern char global_regs[FIRST_PSEUDO_REGISTER];
|
||||
|
||||
struct simplifiable_subreg;
|
||||
struct subreg_shape;
|
||||
class simplifiable_subreg;
|
||||
class subreg_shape;
|
||||
|
||||
struct simplifiable_subregs_hasher : nofree_ptr_hash <simplifiable_subreg>
|
||||
{
|
||||
|
@ -150,9 +150,8 @@ struct hsa_brig_data_chunk
|
||||
|
||||
/* Structure representing a BRIG section, holding and writing its data. */
|
||||
|
||||
class hsa_brig_section
|
||||
struct hsa_brig_section
|
||||
{
|
||||
public:
|
||||
/* Section name that will be output to the BRIG. */
|
||||
const char *section_name;
|
||||
/* Size in bytes of all data stored in the section. */
|
||||
@ -579,7 +578,7 @@ static void emit_immediate_operand (hsa_op_immed *imm);
|
||||
Return the offset of the directive. */
|
||||
|
||||
static unsigned
|
||||
emit_directive_variable (struct hsa_symbol *symbol)
|
||||
emit_directive_variable (class hsa_symbol *symbol)
|
||||
{
|
||||
struct BrigDirectiveVariable dirvar;
|
||||
unsigned name_offset;
|
||||
|
@ -1068,7 +1068,7 @@ private:
|
||||
static inline hsa_bb *
|
||||
hsa_bb_for_bb (basic_block bb)
|
||||
{
|
||||
return (struct hsa_bb *) bb->aux;
|
||||
return (class hsa_bb *) bb->aux;
|
||||
}
|
||||
|
||||
/* Class for hashing local hsa_symbols. */
|
||||
@ -1150,14 +1150,14 @@ public:
|
||||
hash_map <tree, hsa_symbol *> m_string_constants_map;
|
||||
|
||||
/* Vector of pointers to spill symbols. */
|
||||
vec <struct hsa_symbol *> m_spill_symbols;
|
||||
vec <class hsa_symbol *> m_spill_symbols;
|
||||
|
||||
/* Vector of pointers to global variables and transformed string constants
|
||||
that are used by the function. */
|
||||
vec <struct hsa_symbol *> m_global_symbols;
|
||||
vec <class hsa_symbol *> m_global_symbols;
|
||||
|
||||
/* Private function artificial variables. */
|
||||
vec <struct hsa_symbol *> m_private_variables;
|
||||
vec <class hsa_symbol *> m_private_variables;
|
||||
|
||||
/* Vector of called function declarations. */
|
||||
vec <tree> m_called_functions;
|
||||
@ -1318,7 +1318,7 @@ hsa_internal_fn_hasher::equal (const value_type a, const compare_type b)
|
||||
}
|
||||
|
||||
/* in hsa-common.c */
|
||||
extern struct hsa_function_representation *hsa_cfun;
|
||||
extern class hsa_function_representation *hsa_cfun;
|
||||
extern hash_map <tree, vec <const char *> *> *hsa_decl_kernel_dependencies;
|
||||
extern hsa_summary_t *hsa_summaries;
|
||||
extern hsa_symbol *hsa_num_threads;
|
||||
|
@ -1229,7 +1229,7 @@ dump_hsa_cfun (FILE *f)
|
||||
|
||||
FOR_ALL_BB_FN (bb, cfun)
|
||||
{
|
||||
hsa_bb *hbb = (struct hsa_bb *) bb->aux;
|
||||
hsa_bb *hbb = (class hsa_bb *) bb->aux;
|
||||
dump_hsa_bb (f, hbb);
|
||||
}
|
||||
}
|
||||
|
@ -6070,7 +6070,7 @@ gen_function_def_parameters ()
|
||||
for (parm = DECL_ARGUMENTS (cfun->decl); parm;
|
||||
parm = DECL_CHAIN (parm))
|
||||
{
|
||||
struct hsa_symbol **slot;
|
||||
class hsa_symbol **slot;
|
||||
|
||||
hsa_symbol *arg
|
||||
= new hsa_symbol (BRIG_TYPE_NONE, hsa_cfun->m_kern_p
|
||||
@ -6128,7 +6128,7 @@ gen_function_def_parameters ()
|
||||
|
||||
if (!VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
|
||||
{
|
||||
struct hsa_symbol **slot;
|
||||
class hsa_symbol **slot;
|
||||
|
||||
hsa_cfun->m_output_arg = new hsa_symbol (BRIG_TYPE_NONE, BRIG_SEGMENT_ARG,
|
||||
BRIG_LINKAGE_FUNCTION);
|
||||
|
@ -256,7 +256,7 @@ dump_hsa_cfun_regalloc (FILE *f)
|
||||
|
||||
FOR_ALL_BB_FN (bb, cfun)
|
||||
{
|
||||
hsa_bb *hbb = (struct hsa_bb *) bb->aux;
|
||||
hsa_bb *hbb = (class hsa_bb *) bb->aux;
|
||||
bitmap_print (dump_file, hbb->m_livein, "m_livein ", "\n");
|
||||
dump_hsa_bb (f, hbb);
|
||||
bitmap_print (dump_file, hbb->m_liveout, "m_liveout ", "\n");
|
||||
|
@ -124,14 +124,14 @@ public:
|
||||
|
||||
location_t input_location = UNKNOWN_LOCATION;
|
||||
|
||||
struct line_maps *line_table;
|
||||
class line_maps *line_table;
|
||||
|
||||
/* A stashed copy of "line_table" for use by selftest::line_table_test.
|
||||
This needs to be a global so that it can be a GC root, and thus
|
||||
prevent the stashed copy from being garbage-collected if the GC runs
|
||||
during a line_table_test. */
|
||||
|
||||
struct line_maps *saved_line_table;
|
||||
class line_maps *saved_line_table;
|
||||
|
||||
static fcache *fcache_tab;
|
||||
static const size_t fcache_tab_size = 16;
|
||||
@ -980,7 +980,7 @@ dump_line_table_statistics (void)
|
||||
/* Get location one beyond the final location in ordinary map IDX. */
|
||||
|
||||
static location_t
|
||||
get_end_location (struct line_maps *set, unsigned int idx)
|
||||
get_end_location (class line_maps *set, unsigned int idx)
|
||||
{
|
||||
if (idx == LINEMAPS_ORDINARY_USED (set) - 1)
|
||||
return set->highest_location;
|
||||
@ -2051,7 +2051,7 @@ test_lexer (const line_table_case &case_)
|
||||
|
||||
/* Forward decls. */
|
||||
|
||||
struct lexer_test;
|
||||
class lexer_test;
|
||||
class lexer_test_options;
|
||||
|
||||
/* A class for specifying options of a lexer_test.
|
||||
|
@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see
|
||||
|
||||
#include "line-map.h"
|
||||
|
||||
extern GTY(()) struct line_maps *line_table;
|
||||
extern GTY(()) struct line_maps *saved_line_table;
|
||||
extern GTY(()) class line_maps *line_table;
|
||||
extern GTY(()) class line_maps *saved_line_table;
|
||||
|
||||
/* A value which will never be used to represent a real location. */
|
||||
#define UNKNOWN_LOCATION ((location_t) 0)
|
||||
|
@ -149,7 +149,7 @@ get_multi_vector_move (tree array_type, convert_optab optab)
|
||||
static void
|
||||
expand_load_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
tree type, lhs, rhs;
|
||||
rtx target, mem;
|
||||
|
||||
@ -173,7 +173,7 @@ expand_load_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
|
||||
static void
|
||||
expand_store_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
|
||||
{
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
tree type, lhs, rhs;
|
||||
rtx target, reg;
|
||||
|
||||
@ -227,7 +227,7 @@ expand_GOMP_SIMT_ENTER_ALLOC (internal_fn, gcall *stmt)
|
||||
target = gen_reg_rtx (Pmode);
|
||||
rtx size = expand_normal (gimple_call_arg (stmt, 0));
|
||||
rtx align = expand_normal (gimple_call_arg (stmt, 1));
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
create_output_operand (&ops[0], target, Pmode);
|
||||
create_input_operand (&ops[1], size, Pmode);
|
||||
create_input_operand (&ops[2], align, Pmode);
|
||||
@ -242,7 +242,7 @@ expand_GOMP_SIMT_EXIT (internal_fn, gcall *stmt)
|
||||
{
|
||||
gcc_checking_assert (!gimple_call_lhs (stmt));
|
||||
rtx arg = expand_normal (gimple_call_arg (stmt, 0));
|
||||
struct expand_operand ops[1];
|
||||
class expand_operand ops[1];
|
||||
create_input_operand (&ops[0], arg, Pmode);
|
||||
gcc_assert (targetm.have_omp_simt_exit ());
|
||||
expand_insn (targetm.code_for_omp_simt_exit, 1, ops);
|
||||
@ -285,7 +285,7 @@ expand_GOMP_SIMT_LAST_LANE (internal_fn, gcall *stmt)
|
||||
rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
||||
rtx cond = expand_normal (gimple_call_arg (stmt, 0));
|
||||
machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
create_output_operand (&ops[0], target, mode);
|
||||
create_input_operand (&ops[1], cond, mode);
|
||||
gcc_assert (targetm.have_omp_simt_last_lane ());
|
||||
@ -304,7 +304,7 @@ expand_GOMP_SIMT_ORDERED_PRED (internal_fn, gcall *stmt)
|
||||
rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
||||
rtx ctr = expand_normal (gimple_call_arg (stmt, 0));
|
||||
machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
create_output_operand (&ops[0], target, mode);
|
||||
create_input_operand (&ops[1], ctr, mode);
|
||||
gcc_assert (targetm.have_omp_simt_ordered ());
|
||||
@ -324,7 +324,7 @@ expand_GOMP_SIMT_VOTE_ANY (internal_fn, gcall *stmt)
|
||||
rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
||||
rtx cond = expand_normal (gimple_call_arg (stmt, 0));
|
||||
machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
|
||||
struct expand_operand ops[2];
|
||||
class expand_operand ops[2];
|
||||
create_output_operand (&ops[0], target, mode);
|
||||
create_input_operand (&ops[1], cond, mode);
|
||||
gcc_assert (targetm.have_omp_simt_vote_any ());
|
||||
@ -345,7 +345,7 @@ expand_GOMP_SIMT_XCHG_BFLY (internal_fn, gcall *stmt)
|
||||
rtx src = expand_normal (gimple_call_arg (stmt, 0));
|
||||
rtx idx = expand_normal (gimple_call_arg (stmt, 1));
|
||||
machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
create_output_operand (&ops[0], target, mode);
|
||||
create_input_operand (&ops[1], src, mode);
|
||||
create_input_operand (&ops[2], idx, SImode);
|
||||
@ -366,7 +366,7 @@ expand_GOMP_SIMT_XCHG_IDX (internal_fn, gcall *stmt)
|
||||
rtx src = expand_normal (gimple_call_arg (stmt, 0));
|
||||
rtx idx = expand_normal (gimple_call_arg (stmt, 1));
|
||||
machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
create_output_operand (&ops[0], target, mode);
|
||||
create_input_operand (&ops[1], src, mode);
|
||||
create_input_operand (&ops[2], idx, SImode);
|
||||
@ -774,7 +774,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
|
||||
: usubv4_optab, mode);
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx_insn *last = get_last_insn ();
|
||||
|
||||
res = gen_reg_rtx (mode);
|
||||
@ -995,7 +995,7 @@ expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
|
||||
: subv4_optab, mode);
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx_insn *last = get_last_insn ();
|
||||
|
||||
res = gen_reg_rtx (mode);
|
||||
@ -1146,7 +1146,7 @@ expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan,
|
||||
enum insn_code icode = optab_handler (negv3_optab, mode);
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
rtx_insn *last = get_last_insn ();
|
||||
|
||||
res = gen_reg_rtx (mode);
|
||||
@ -1539,7 +1539,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
|
||||
}
|
||||
if (icode != CODE_FOR_nothing)
|
||||
{
|
||||
struct expand_operand ops[4];
|
||||
class expand_operand ops[4];
|
||||
rtx_insn *last = get_last_insn ();
|
||||
|
||||
res = gen_reg_rtx (mode);
|
||||
@ -2475,7 +2475,7 @@ expand_call_mem_ref (tree type, gcall *stmt, int index)
|
||||
static void
|
||||
expand_mask_load_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
tree type, lhs, rhs, maskt;
|
||||
rtx mem, target, mask;
|
||||
insn_code icode;
|
||||
@ -2510,7 +2510,7 @@ expand_mask_load_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
|
||||
static void
|
||||
expand_mask_store_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
|
||||
{
|
||||
struct expand_operand ops[3];
|
||||
class expand_operand ops[3];
|
||||
tree type, lhs, rhs, maskt;
|
||||
rtx mem, reg, mask;
|
||||
insn_code icode;
|
||||
@ -2771,7 +2771,7 @@ expand_scatter_store_optab_fn (internal_fn, gcall *stmt, direct_optab optab)
|
||||
HOST_WIDE_INT scale_int = tree_to_shwi (scale);
|
||||
rtx rhs_rtx = expand_normal (rhs);
|
||||
|
||||
struct expand_operand ops[6];
|
||||
class expand_operand ops[6];
|
||||
int i = 0;
|
||||
create_address_operand (&ops[i++], base_rtx);
|
||||
create_input_operand (&ops[i++], offset_rtx, TYPE_MODE (TREE_TYPE (offset)));
|
||||
@ -2805,7 +2805,7 @@ expand_gather_load_optab_fn (internal_fn, gcall *stmt, direct_optab optab)
|
||||
HOST_WIDE_INT scale_int = tree_to_shwi (scale);
|
||||
|
||||
int i = 0;
|
||||
struct expand_operand ops[6];
|
||||
class expand_operand ops[6];
|
||||
create_output_operand (&ops[i++], lhs_rtx, TYPE_MODE (TREE_TYPE (lhs)));
|
||||
create_address_operand (&ops[i++], base_rtx);
|
||||
create_input_operand (&ops[i++], offset_rtx, TYPE_MODE (TREE_TYPE (offset)));
|
||||
|
172
gcc/ipa-cp.c
172
gcc/ipa-cp.c
@ -381,8 +381,8 @@ static hash_map<const char *, unsigned> *clone_num_suffixes;
|
||||
|
||||
/* Return the param lattices structure corresponding to the Ith formal
|
||||
parameter of the function described by INFO. */
|
||||
static inline struct ipcp_param_lattices *
|
||||
ipa_get_parm_lattices (struct ipa_node_params *info, int i)
|
||||
static inline class ipcp_param_lattices *
|
||||
ipa_get_parm_lattices (class ipa_node_params *info, int i)
|
||||
{
|
||||
gcc_assert (i >= 0 && i < ipa_get_param_count (info));
|
||||
gcc_checking_assert (!info->ipcp_orig_node);
|
||||
@ -393,18 +393,18 @@ ipa_get_parm_lattices (struct ipa_node_params *info, int i)
|
||||
/* Return the lattice corresponding to the scalar value of the Ith formal
|
||||
parameter of the function described by INFO. */
|
||||
static inline ipcp_lattice<tree> *
|
||||
ipa_get_scalar_lat (struct ipa_node_params *info, int i)
|
||||
ipa_get_scalar_lat (class ipa_node_params *info, int i)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
return &plats->itself;
|
||||
}
|
||||
|
||||
/* Return the lattice corresponding to the scalar value of the Ith formal
|
||||
parameter of the function described by INFO. */
|
||||
static inline ipcp_lattice<ipa_polymorphic_call_context> *
|
||||
ipa_get_poly_ctx_lat (struct ipa_node_params *info, int i)
|
||||
ipa_get_poly_ctx_lat (class ipa_node_params *info, int i)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
return &plats->ctxlat;
|
||||
}
|
||||
|
||||
@ -539,7 +539,7 @@ print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
|
||||
fprintf (f, "\nLattices:\n");
|
||||
FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
|
||||
{
|
||||
struct ipa_node_params *info;
|
||||
class ipa_node_params *info;
|
||||
|
||||
info = IPA_NODE_REF (node);
|
||||
/* Skip constprop clones since we don't make lattices for them. */
|
||||
@ -550,7 +550,7 @@ print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
struct ipcp_agg_lattice *aglat;
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
fprintf (f, " param [%d]: ", i);
|
||||
plats->itself.print (f, dump_sources, dump_benefits);
|
||||
fprintf (f, " ctxs: ");
|
||||
@ -585,7 +585,7 @@ print_all_lattices (FILE * f, bool dump_sources, bool dump_benefits)
|
||||
|
||||
static void
|
||||
determine_versionability (struct cgraph_node *node,
|
||||
struct ipa_node_params *info)
|
||||
class ipa_node_params *info)
|
||||
{
|
||||
const char *reason = NULL;
|
||||
|
||||
@ -823,7 +823,7 @@ ignore_edge_p (cgraph_edge *e)
|
||||
/* Allocate the arrays in TOPO and topologically sort the nodes into order. */
|
||||
|
||||
static void
|
||||
build_toporder_info (struct ipa_topo_info *topo)
|
||||
build_toporder_info (class ipa_topo_info *topo)
|
||||
{
|
||||
topo->order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
|
||||
topo->stack = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
|
||||
@ -837,7 +837,7 @@ build_toporder_info (struct ipa_topo_info *topo)
|
||||
TOPO. */
|
||||
|
||||
static void
|
||||
free_toporder_info (struct ipa_topo_info *topo)
|
||||
free_toporder_info (class ipa_topo_info *topo)
|
||||
{
|
||||
ipa_free_postorder_info ();
|
||||
free (topo->order);
|
||||
@ -847,9 +847,9 @@ free_toporder_info (struct ipa_topo_info *topo)
|
||||
/* Add NODE to the stack in TOPO, unless it is already there. */
|
||||
|
||||
static inline void
|
||||
push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
|
||||
push_node_to_stack (class ipa_topo_info *topo, struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
if (info->node_enqueued)
|
||||
return;
|
||||
info->node_enqueued = 1;
|
||||
@ -860,7 +860,7 @@ push_node_to_stack (struct ipa_topo_info *topo, struct cgraph_node *node)
|
||||
is empty. */
|
||||
|
||||
static struct cgraph_node *
|
||||
pop_node_from_stack (struct ipa_topo_info *topo)
|
||||
pop_node_from_stack (class ipa_topo_info *topo)
|
||||
{
|
||||
if (topo->stack_top)
|
||||
{
|
||||
@ -902,7 +902,7 @@ ipcp_lattice<valtype>::set_contains_variable ()
|
||||
not previously set as such. */
|
||||
|
||||
static inline bool
|
||||
set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
|
||||
set_agg_lats_to_bottom (class ipcp_param_lattices *plats)
|
||||
{
|
||||
bool ret = !plats->aggs_bottom;
|
||||
plats->aggs_bottom = true;
|
||||
@ -913,7 +913,7 @@ set_agg_lats_to_bottom (struct ipcp_param_lattices *plats)
|
||||
return true if they were not previously marked as such. */
|
||||
|
||||
static inline bool
|
||||
set_agg_lats_contain_variable (struct ipcp_param_lattices *plats)
|
||||
set_agg_lats_contain_variable (class ipcp_param_lattices *plats)
|
||||
{
|
||||
bool ret = !plats->aggs_contain_variable;
|
||||
plats->aggs_contain_variable = true;
|
||||
@ -1123,7 +1123,7 @@ ipcp_bits_lattice::meet_with (ipcp_bits_lattice& other, unsigned precision,
|
||||
return true is any of them has not been marked as such so far. */
|
||||
|
||||
static inline bool
|
||||
set_all_contains_variable (struct ipcp_param_lattices *plats)
|
||||
set_all_contains_variable (class ipcp_param_lattices *plats)
|
||||
{
|
||||
bool ret;
|
||||
ret = plats->itself.set_contains_variable ();
|
||||
@ -1173,7 +1173,7 @@ set_single_call_flag (cgraph_node *node, void *)
|
||||
static void
|
||||
initialize_node_lattices (struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
struct cgraph_edge *ie;
|
||||
bool disable = false, variable = false;
|
||||
int i;
|
||||
@ -1203,7 +1203,7 @@ initialize_node_lattices (struct cgraph_node *node)
|
||||
|
||||
for (i = 0; i < ipa_get_param_count (info); i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
plats->m_value_range.init ();
|
||||
}
|
||||
|
||||
@ -1211,7 +1211,7 @@ initialize_node_lattices (struct cgraph_node *node)
|
||||
{
|
||||
for (i = 0; i < ipa_get_param_count (info); i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
if (disable)
|
||||
{
|
||||
plats->itself.set_to_bottom ();
|
||||
@ -1304,7 +1304,7 @@ ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
|
||||
passed. */
|
||||
|
||||
tree
|
||||
ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc,
|
||||
ipa_value_from_jfunc (class ipa_node_params *info, struct ipa_jump_func *jfunc,
|
||||
tree parm_type)
|
||||
{
|
||||
if (jfunc->type == IPA_JF_CONST)
|
||||
@ -1422,7 +1422,7 @@ ipcp_verify_propagated_values (void)
|
||||
|
||||
FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
int i, count = ipa_get_param_count (info);
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
@ -1674,7 +1674,7 @@ propagate_scalar_across_jump_function (struct cgraph_edge *cs,
|
||||
else if (jfunc->type == IPA_JF_PASS_THROUGH
|
||||
|| jfunc->type == IPA_JF_ANCESTOR)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
ipcp_lattice<tree> *src_lat;
|
||||
int src_idx;
|
||||
bool ret;
|
||||
@ -1736,7 +1736,7 @@ propagate_context_across_jump_function (cgraph_edge *cs,
|
||||
if (jfunc->type == IPA_JF_PASS_THROUGH
|
||||
|| jfunc->type == IPA_JF_ANCESTOR)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
int src_idx;
|
||||
ipcp_lattice<ipa_polymorphic_call_context> *src_lat;
|
||||
|
||||
@ -1812,7 +1812,7 @@ propagate_bits_across_jump_function (cgraph_edge *cs, int idx,
|
||||
|
||||
enum availability availability;
|
||||
cgraph_node *callee = cs->callee->function_symbol (&availability);
|
||||
struct ipa_node_params *callee_info = IPA_NODE_REF (callee);
|
||||
class ipa_node_params *callee_info = IPA_NODE_REF (callee);
|
||||
tree parm_type = ipa_get_type (callee_info, idx);
|
||||
|
||||
/* For K&R C programs, ipa_get_type() could return NULL_TREE. Avoid the
|
||||
@ -1835,7 +1835,7 @@ propagate_bits_across_jump_function (cgraph_edge *cs, int idx,
|
||||
if (jfunc->type == IPA_JF_PASS_THROUGH
|
||||
|| jfunc->type == IPA_JF_ANCESTOR)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
tree operand = NULL_TREE;
|
||||
enum tree_code code;
|
||||
unsigned src_idx;
|
||||
@ -1855,7 +1855,7 @@ propagate_bits_across_jump_function (cgraph_edge *cs, int idx,
|
||||
operand = build_int_cstu (size_type_node, offset);
|
||||
}
|
||||
|
||||
struct ipcp_param_lattices *src_lats
|
||||
class ipcp_param_lattices *src_lats
|
||||
= ipa_get_parm_lattices (caller_info, src_idx);
|
||||
|
||||
/* Try to propagate bits if src_lattice is bottom, but jfunc is known.
|
||||
@ -1909,7 +1909,7 @@ ipa_vr_operation_and_type_effects (value_range_base *dst_vr,
|
||||
|
||||
static bool
|
||||
propagate_vr_across_jump_function (cgraph_edge *cs, ipa_jump_func *jfunc,
|
||||
struct ipcp_param_lattices *dest_plats,
|
||||
class ipcp_param_lattices *dest_plats,
|
||||
tree param_type)
|
||||
{
|
||||
ipcp_vr_lattice *dest_lat = &dest_plats->m_value_range;
|
||||
@ -1928,10 +1928,10 @@ propagate_vr_across_jump_function (cgraph_edge *cs, ipa_jump_func *jfunc,
|
||||
|
||||
if (TREE_CODE_CLASS (operation) == tcc_unary)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
|
||||
tree operand_type = ipa_get_type (caller_info, src_idx);
|
||||
struct ipcp_param_lattices *src_lats
|
||||
class ipcp_param_lattices *src_lats
|
||||
= ipa_get_parm_lattices (caller_info, src_idx);
|
||||
|
||||
if (src_lats->m_value_range.bottom_p ())
|
||||
@ -1974,7 +1974,7 @@ propagate_vr_across_jump_function (cgraph_edge *cs, ipa_jump_func *jfunc,
|
||||
aggs_by_ref to NEW_AGGS_BY_REF. */
|
||||
|
||||
static bool
|
||||
set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
|
||||
set_check_aggs_by_ref (class ipcp_param_lattices *dest_plats,
|
||||
bool new_aggs_by_ref)
|
||||
{
|
||||
if (dest_plats->aggs)
|
||||
@ -2001,7 +2001,7 @@ set_check_aggs_by_ref (struct ipcp_param_lattices *dest_plats,
|
||||
true. */
|
||||
|
||||
static bool
|
||||
merge_agg_lats_step (struct ipcp_param_lattices *dest_plats,
|
||||
merge_agg_lats_step (class ipcp_param_lattices *dest_plats,
|
||||
HOST_WIDE_INT offset, HOST_WIDE_INT val_size,
|
||||
struct ipcp_agg_lattice ***aglat,
|
||||
bool pre_existing, bool *change)
|
||||
@ -2079,8 +2079,8 @@ set_chain_of_aglats_contains_variable (struct ipcp_agg_lattice *aglat)
|
||||
|
||||
static bool
|
||||
merge_aggregate_lattices (struct cgraph_edge *cs,
|
||||
struct ipcp_param_lattices *dest_plats,
|
||||
struct ipcp_param_lattices *src_plats,
|
||||
class ipcp_param_lattices *dest_plats,
|
||||
class ipcp_param_lattices *src_plats,
|
||||
int src_idx, HOST_WIDE_INT offset_delta)
|
||||
{
|
||||
bool pre_existing = dest_plats->aggs != NULL;
|
||||
@ -2134,7 +2134,7 @@ merge_aggregate_lattices (struct cgraph_edge *cs,
|
||||
rules about propagating values passed by reference. */
|
||||
|
||||
static bool
|
||||
agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
|
||||
agg_pass_through_permissible_p (class ipcp_param_lattices *src_plats,
|
||||
struct ipa_jump_func *jfunc)
|
||||
{
|
||||
return src_plats->aggs
|
||||
@ -2148,7 +2148,7 @@ agg_pass_through_permissible_p (struct ipcp_param_lattices *src_plats,
|
||||
static bool
|
||||
propagate_aggs_across_jump_function (struct cgraph_edge *cs,
|
||||
struct ipa_jump_func *jfunc,
|
||||
struct ipcp_param_lattices *dest_plats)
|
||||
class ipcp_param_lattices *dest_plats)
|
||||
{
|
||||
bool ret = false;
|
||||
|
||||
@ -2158,9 +2158,9 @@ propagate_aggs_across_jump_function (struct cgraph_edge *cs,
|
||||
if (jfunc->type == IPA_JF_PASS_THROUGH
|
||||
&& ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
|
||||
struct ipcp_param_lattices *src_plats;
|
||||
class ipcp_param_lattices *src_plats;
|
||||
|
||||
src_plats = ipa_get_parm_lattices (caller_info, src_idx);
|
||||
if (agg_pass_through_permissible_p (src_plats, jfunc))
|
||||
@ -2177,9 +2177,9 @@ propagate_aggs_across_jump_function (struct cgraph_edge *cs,
|
||||
else if (jfunc->type == IPA_JF_ANCESTOR
|
||||
&& ipa_get_jf_ancestor_agg_preserved (jfunc))
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
|
||||
struct ipcp_param_lattices *src_plats;
|
||||
class ipcp_param_lattices *src_plats;
|
||||
|
||||
src_plats = ipa_get_parm_lattices (caller_info, src_idx);
|
||||
if (src_plats->aggs && src_plats->aggs_by_ref)
|
||||
@ -2250,10 +2250,10 @@ call_passes_through_thunk_p (cgraph_edge *cs)
|
||||
static bool
|
||||
propagate_constants_across_call (struct cgraph_edge *cs)
|
||||
{
|
||||
struct ipa_node_params *callee_info;
|
||||
class ipa_node_params *callee_info;
|
||||
enum availability availability;
|
||||
cgraph_node *callee;
|
||||
struct ipa_edge_args *args;
|
||||
class ipa_edge_args *args;
|
||||
bool ret = false;
|
||||
int i, args_count, parms_count;
|
||||
|
||||
@ -2284,7 +2284,7 @@ propagate_constants_across_call (struct cgraph_edge *cs)
|
||||
for (; (i < args_count) && (i < parms_count); i++)
|
||||
{
|
||||
struct ipa_jump_func *jump_func = ipa_get_ith_jump_func (args, i);
|
||||
struct ipcp_param_lattices *dest_plats;
|
||||
class ipcp_param_lattices *dest_plats;
|
||||
tree param_type = ipa_get_type (callee_info, i);
|
||||
|
||||
dest_plats = ipa_get_parm_lattices (callee_info, i);
|
||||
@ -2563,7 +2563,7 @@ devirtualization_time_bonus (struct cgraph_node *node,
|
||||
for (ie = node->indirect_calls; ie; ie = ie->next_callee)
|
||||
{
|
||||
struct cgraph_node *callee;
|
||||
struct ipa_fn_summary *isummary;
|
||||
class ipa_fn_summary *isummary;
|
||||
enum availability avail;
|
||||
tree target;
|
||||
bool speculative;
|
||||
@ -2645,7 +2645,7 @@ good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
|
||||
|
||||
gcc_assert (size_cost > 0);
|
||||
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
if (max_count > profile_count::zero ())
|
||||
{
|
||||
int factor = RDIV (count_sum.probability_in
|
||||
@ -2692,7 +2692,7 @@ good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
|
||||
vector. Return NULL if there are none. */
|
||||
|
||||
static vec<ipa_agg_jf_item, va_gc> *
|
||||
context_independent_aggregate_values (struct ipcp_param_lattices *plats)
|
||||
context_independent_aggregate_values (class ipcp_param_lattices *plats)
|
||||
{
|
||||
vec<ipa_agg_jf_item, va_gc> *res = NULL;
|
||||
|
||||
@ -2721,7 +2721,7 @@ context_independent_aggregate_values (struct ipcp_param_lattices *plats)
|
||||
it. */
|
||||
|
||||
static bool
|
||||
gather_context_independent_values (struct ipa_node_params *info,
|
||||
gather_context_independent_values (class ipa_node_params *info,
|
||||
vec<tree> *known_csts,
|
||||
vec<ipa_polymorphic_call_context>
|
||||
*known_contexts,
|
||||
@ -2746,7 +2746,7 @@ gather_context_independent_values (struct ipa_node_params *info,
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
ipcp_lattice<tree> *lat = &plats->itself;
|
||||
|
||||
if (lat->is_single_const ())
|
||||
@ -2863,7 +2863,7 @@ perform_estimation_of_a_value (cgraph_node *node, vec<tree> known_csts,
|
||||
static void
|
||||
estimate_local_effects (struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
int i, count = ipa_get_param_count (info);
|
||||
vec<tree> known_csts;
|
||||
vec<ipa_polymorphic_call_context> known_contexts;
|
||||
@ -2943,7 +2943,7 @@ estimate_local_effects (struct cgraph_node *node)
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
ipcp_lattice<tree> *lat = &plats->itself;
|
||||
ipcp_value<tree> *val;
|
||||
|
||||
@ -2977,7 +2977,7 @@ estimate_local_effects (struct cgraph_node *node)
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
|
||||
if (!plats->virt_call)
|
||||
continue;
|
||||
@ -3012,7 +3012,7 @@ estimate_local_effects (struct cgraph_node *node)
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
struct ipa_agg_jump_function *ajf;
|
||||
struct ipcp_agg_lattice *aglat;
|
||||
|
||||
@ -3129,12 +3129,12 @@ value_topo_info<valtype>::add_val (ipcp_value<valtype> *cur_val)
|
||||
static void
|
||||
add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
int i, count = ipa_get_param_count (info);
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
ipcp_lattice<tree> *lat = &plats->itself;
|
||||
struct ipcp_agg_lattice *aglat;
|
||||
|
||||
@ -3169,7 +3169,7 @@ add_all_node_vals_to_toposort (cgraph_node *node, ipa_topo_info *topo)
|
||||
connected components. */
|
||||
|
||||
static void
|
||||
propagate_constants_topo (struct ipa_topo_info *topo)
|
||||
propagate_constants_topo (class ipa_topo_info *topo)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -3272,7 +3272,7 @@ value_topo_info<valtype>::propagate_effects ()
|
||||
summaries interprocedurally. */
|
||||
|
||||
static void
|
||||
ipcp_propagate_stage (struct ipa_topo_info *topo)
|
||||
ipcp_propagate_stage (class ipa_topo_info *topo)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
|
||||
@ -3283,12 +3283,12 @@ ipcp_propagate_stage (struct ipa_topo_info *topo)
|
||||
|
||||
FOR_EACH_DEFINED_FUNCTION (node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
|
||||
determine_versionability (node, info);
|
||||
if (node->has_gimple_body_p ())
|
||||
{
|
||||
info->lattices = XCNEWVEC (struct ipcp_param_lattices,
|
||||
info->lattices = XCNEWVEC (class ipcp_param_lattices,
|
||||
ipa_get_param_count (info));
|
||||
initialize_node_lattices (node);
|
||||
}
|
||||
@ -3352,7 +3352,7 @@ ipcp_discover_new_direct_edges (struct cgraph_node *node,
|
||||
|
||||
if (cs && !agg_contents && !polymorphic)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
int c = ipa_get_controlled_uses (info, param_index);
|
||||
if (c != IPA_UNDESCRIBED_USE)
|
||||
{
|
||||
@ -3461,7 +3461,7 @@ same_node_or_its_all_contexts_clone_p (cgraph_node *node, cgraph_node *dest)
|
||||
if (node == dest)
|
||||
return true;
|
||||
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
return info->is_all_contexts_clone && info->ipcp_orig_node == dest;
|
||||
}
|
||||
|
||||
@ -3472,7 +3472,7 @@ static bool
|
||||
cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
|
||||
cgraph_node *dest, ipcp_value<tree> *dest_val)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
enum availability availability;
|
||||
cgraph_node *real_dest = cs->callee->function_symbol (&availability);
|
||||
|
||||
@ -3503,7 +3503,7 @@ cgraph_edge_brings_value_p (cgraph_edge *cs, ipcp_value_source<tree> *src,
|
||||
return true;
|
||||
|
||||
struct ipcp_agg_lattice *aglat;
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
|
||||
src->index);
|
||||
if (src->offset == -1)
|
||||
return (plats->itself.is_single_const ()
|
||||
@ -3532,7 +3532,7 @@ cgraph_edge_brings_value_p (cgraph_edge *cs,
|
||||
cgraph_node *dest,
|
||||
ipcp_value<ipa_polymorphic_call_context> *)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
cgraph_node *real_dest = cs->callee->function_symbol ();
|
||||
|
||||
if (!same_node_or_its_all_contexts_clone_p (real_dest, dest)
|
||||
@ -3546,7 +3546,7 @@ cgraph_edge_brings_value_p (cgraph_edge *cs,
|
||||
&& values_equal_for_ipcp_p (src->val->value,
|
||||
caller_info->known_contexts[src->index]);
|
||||
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (caller_info,
|
||||
src->index);
|
||||
return plats->ctxlat.is_single_const ()
|
||||
&& values_equal_for_ipcp_p (src->val->value,
|
||||
@ -3639,7 +3639,7 @@ gather_edges_for_value (ipcp_value<valtype> *val, cgraph_node *dest,
|
||||
Return it or NULL if for some reason it cannot be created. */
|
||||
|
||||
static struct ipa_replace_map *
|
||||
get_replacement_map (struct ipa_node_params *info, tree value, int parm_num)
|
||||
get_replacement_map (class ipa_node_params *info, tree value, int parm_num)
|
||||
{
|
||||
struct ipa_replace_map *replace_map;
|
||||
|
||||
@ -3808,7 +3808,7 @@ create_specialized_node (struct cgraph_node *node,
|
||||
struct ipa_agg_replacement_value *aggvals,
|
||||
vec<cgraph_edge *> callers)
|
||||
{
|
||||
struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *new_info, *info = IPA_NODE_REF (node);
|
||||
vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
|
||||
struct ipa_agg_replacement_value *av;
|
||||
struct cgraph_node *new_node;
|
||||
@ -3942,7 +3942,7 @@ find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
|
||||
vec<tree> known_csts,
|
||||
vec<cgraph_edge *> callers)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
int i, count = ipa_get_param_count (info);
|
||||
|
||||
for (i = 0; i < count; i++)
|
||||
@ -4075,7 +4075,7 @@ find_more_contexts_for_caller_subset (cgraph_node *node,
|
||||
offsets (minus OFFSET) of lattices that contain only a single value. */
|
||||
|
||||
static vec<ipa_agg_jf_item>
|
||||
copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
|
||||
copy_plats_to_inter (class ipcp_param_lattices *plats, HOST_WIDE_INT offset)
|
||||
{
|
||||
vec<ipa_agg_jf_item> res = vNULL;
|
||||
|
||||
@ -4097,7 +4097,7 @@ copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
|
||||
subtracting OFFSET). */
|
||||
|
||||
static void
|
||||
intersect_with_plats (struct ipcp_param_lattices *plats,
|
||||
intersect_with_plats (class ipcp_param_lattices *plats,
|
||||
vec<ipa_agg_jf_item> *inter,
|
||||
HOST_WIDE_INT offset)
|
||||
{
|
||||
@ -4217,13 +4217,13 @@ intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
|
||||
if (jfunc->type == IPA_JF_PASS_THROUGH
|
||||
&& ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
int src_idx = ipa_get_jf_pass_through_formal_id (jfunc);
|
||||
|
||||
if (caller_info->ipcp_orig_node)
|
||||
{
|
||||
struct cgraph_node *orig_node = caller_info->ipcp_orig_node;
|
||||
struct ipcp_param_lattices *orig_plats;
|
||||
class ipcp_param_lattices *orig_plats;
|
||||
orig_plats = ipa_get_parm_lattices (IPA_NODE_REF (orig_node),
|
||||
src_idx);
|
||||
if (agg_pass_through_permissible_p (orig_plats, jfunc))
|
||||
@ -4242,7 +4242,7 @@ intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
|
||||
}
|
||||
else
|
||||
{
|
||||
struct ipcp_param_lattices *src_plats;
|
||||
class ipcp_param_lattices *src_plats;
|
||||
src_plats = ipa_get_parm_lattices (caller_info, src_idx);
|
||||
if (agg_pass_through_permissible_p (src_plats, jfunc))
|
||||
{
|
||||
@ -4264,9 +4264,9 @@ intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
|
||||
else if (jfunc->type == IPA_JF_ANCESTOR
|
||||
&& ipa_get_jf_ancestor_agg_preserved (jfunc))
|
||||
{
|
||||
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
class ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
|
||||
int src_idx = ipa_get_jf_ancestor_formal_id (jfunc);
|
||||
struct ipcp_param_lattices *src_plats;
|
||||
class ipcp_param_lattices *src_plats;
|
||||
HOST_WIDE_INT delta = ipa_get_jf_ancestor_offset (jfunc);
|
||||
|
||||
if (caller_info->ipcp_orig_node)
|
||||
@ -4341,7 +4341,7 @@ static struct ipa_agg_replacement_value *
|
||||
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
|
||||
vec<cgraph_edge *> callers)
|
||||
{
|
||||
struct ipa_node_params *dest_info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *dest_info = IPA_NODE_REF (node);
|
||||
struct ipa_agg_replacement_value *res;
|
||||
struct ipa_agg_replacement_value **tail = &res;
|
||||
struct cgraph_edge *cs;
|
||||
@ -4359,7 +4359,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
|
||||
struct cgraph_edge *cs;
|
||||
vec<ipa_agg_jf_item> inter = vNULL;
|
||||
struct ipa_agg_jf_item *item;
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (dest_info, i);
|
||||
int j;
|
||||
|
||||
/* Among other things, the following check should deal with all by_ref
|
||||
@ -4412,10 +4412,10 @@ static bool
|
||||
cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
|
||||
struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *dest_info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *dest_info = IPA_NODE_REF (node);
|
||||
int count = ipa_get_param_count (dest_info);
|
||||
struct ipa_node_params *caller_info;
|
||||
struct ipa_edge_args *args;
|
||||
class ipa_node_params *caller_info;
|
||||
class ipa_edge_args *args;
|
||||
int i;
|
||||
|
||||
caller_info = IPA_NODE_REF (cs->caller);
|
||||
@ -4446,7 +4446,7 @@ static bool
|
||||
cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
|
||||
struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *orig_node_info;
|
||||
class ipa_node_params *orig_node_info;
|
||||
struct ipa_agg_replacement_value *aggval;
|
||||
int i, ec, count;
|
||||
|
||||
@ -4466,7 +4466,7 @@ cgraph_edge_brings_all_agg_vals_for_node (struct cgraph_edge *cs,
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
static vec<ipa_agg_jf_item> values = vec<ipa_agg_jf_item>();
|
||||
struct ipcp_param_lattices *plats;
|
||||
class ipcp_param_lattices *plats;
|
||||
bool interesting = false;
|
||||
for (struct ipa_agg_replacement_value *av = aggval; av; av = av->next)
|
||||
if (aggval->index == i)
|
||||
@ -4722,7 +4722,7 @@ decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
|
||||
static bool
|
||||
decide_whether_version_node (struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_node_params *info = IPA_NODE_REF (node);
|
||||
class ipa_node_params *info = IPA_NODE_REF (node);
|
||||
int i, count = ipa_get_param_count (info);
|
||||
vec<tree> known_csts;
|
||||
vec<ipa_polymorphic_call_context> known_contexts;
|
||||
@ -4742,7 +4742,7 @@ decide_whether_version_node (struct cgraph_node *node)
|
||||
|
||||
for (i = 0; i < count;i++)
|
||||
{
|
||||
struct ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
class ipcp_param_lattices *plats = ipa_get_parm_lattices (info, i);
|
||||
ipcp_lattice<tree> *lat = &plats->itself;
|
||||
ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
|
||||
|
||||
@ -4832,7 +4832,7 @@ spread_undeadness (struct cgraph_node *node)
|
||||
if (ipa_edge_within_scc (cs))
|
||||
{
|
||||
struct cgraph_node *callee;
|
||||
struct ipa_node_params *info;
|
||||
class ipa_node_params *info;
|
||||
|
||||
callee = cs->callee->function_symbol (NULL);
|
||||
info = IPA_NODE_REF (callee);
|
||||
@ -4895,7 +4895,7 @@ identify_dead_nodes (struct cgraph_node *node)
|
||||
TOPO and make specialized clones if deemed beneficial. */
|
||||
|
||||
static void
|
||||
ipcp_decision_stage (struct ipa_topo_info *topo)
|
||||
ipcp_decision_stage (class ipa_topo_info *topo)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -5069,7 +5069,7 @@ ipcp_store_vr_results (void)
|
||||
static unsigned int
|
||||
ipcp_driver (void)
|
||||
{
|
||||
struct ipa_topo_info topo;
|
||||
class ipa_topo_info topo;
|
||||
|
||||
if (edge_clone_summaries == NULL)
|
||||
edge_clone_summaries = new edge_clone_summary_t (symtab);
|
||||
|
@ -2974,7 +2974,7 @@ final_warning_record::grow_type_warnings (unsigned newlen)
|
||||
}
|
||||
}
|
||||
|
||||
struct final_warning_record *final_warning_records;
|
||||
class final_warning_record *final_warning_records;
|
||||
|
||||
/* Return vector containing possible targets of polymorphic call of type
|
||||
OTR_TYPE calling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
|
||||
|
@ -212,7 +212,7 @@ ipa_fn_summary::account_size_time (int size, sreal time,
|
||||
}
|
||||
if (!found)
|
||||
{
|
||||
struct size_time_entry new_entry;
|
||||
class size_time_entry new_entry;
|
||||
new_entry.size = size;
|
||||
new_entry.time = time;
|
||||
new_entry.exec_predicate = exec_pred;
|
||||
@ -241,7 +241,7 @@ redirect_to_unreachable (struct cgraph_edge *e)
|
||||
e->make_direct (target);
|
||||
else
|
||||
e->redirect_callee (target);
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
e->inline_failed = CIF_UNREACHABLE;
|
||||
e->count = profile_count::zero ();
|
||||
es->call_stmt_size = 0;
|
||||
@ -266,7 +266,7 @@ edge_set_predicate (struct cgraph_edge *e, predicate *predicate)
|
||||
&& (!e->speculative || e->callee))
|
||||
e = redirect_to_unreachable (e);
|
||||
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
if (predicate && *predicate != true)
|
||||
{
|
||||
if (!es->predicate)
|
||||
@ -328,7 +328,7 @@ evaluate_conditions_for_known_args (struct cgraph_node *node,
|
||||
{
|
||||
clause_t clause = inline_p ? 0 : 1 << predicate::not_inlined_condition;
|
||||
clause_t nonspec_clause = 1 << predicate::not_inlined_condition;
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get (node);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get (node);
|
||||
int i;
|
||||
struct condition *c;
|
||||
|
||||
@ -428,7 +428,7 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
|
||||
vec<ipa_agg_jump_function_p> *known_aggs_ptr)
|
||||
{
|
||||
struct cgraph_node *callee = e->callee->ultimate_alias_target ();
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get (callee);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get (callee);
|
||||
vec<tree> known_vals = vNULL;
|
||||
vec<ipa_agg_jump_function_p> known_aggs = vNULL;
|
||||
|
||||
@ -443,9 +443,9 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
|
||||
&& !e->call_stmt_cannot_inline_p
|
||||
&& ((clause_ptr && info->conds) || known_vals_ptr || known_contexts_ptr))
|
||||
{
|
||||
struct ipa_node_params *caller_parms_info, *callee_pi;
|
||||
struct ipa_edge_args *args = IPA_EDGE_REF (e);
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_node_params *caller_parms_info, *callee_pi;
|
||||
class ipa_edge_args *args = IPA_EDGE_REF (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
int i, count = ipa_get_cs_argument_count (args);
|
||||
|
||||
if (e->caller->global.inlined_to)
|
||||
@ -604,7 +604,7 @@ ipa_fn_summary_t::duplicate (cgraph_node *src,
|
||||
{
|
||||
vec<size_time_entry, va_gc> *entry = info->size_time_table;
|
||||
/* Use SRC parm info since it may not be copied yet. */
|
||||
struct ipa_node_params *parms_info = IPA_NODE_REF (src);
|
||||
class ipa_node_params *parms_info = IPA_NODE_REF (src);
|
||||
vec<tree> known_vals = vNULL;
|
||||
int count = ipa_get_param_count (parms_info);
|
||||
int i, j;
|
||||
@ -668,7 +668,7 @@ ipa_fn_summary_t::duplicate (cgraph_node *src,
|
||||
for (edge = dst->callees; edge; edge = next)
|
||||
{
|
||||
predicate new_predicate;
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get_create (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get_create (edge);
|
||||
next = edge->next_callee;
|
||||
|
||||
if (!edge->inline_failed)
|
||||
@ -687,7 +687,7 @@ ipa_fn_summary_t::duplicate (cgraph_node *src,
|
||||
for (edge = dst->indirect_calls; edge; edge = next)
|
||||
{
|
||||
predicate new_predicate;
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get_create (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get_create (edge);
|
||||
next = edge->next_callee;
|
||||
|
||||
gcc_checking_assert (edge->inline_failed);
|
||||
@ -744,8 +744,8 @@ ipa_fn_summary_t::duplicate (cgraph_node *src,
|
||||
void
|
||||
ipa_call_summary_t::duplicate (struct cgraph_edge *src,
|
||||
struct cgraph_edge *dst,
|
||||
struct ipa_call_summary *srcinfo,
|
||||
struct ipa_call_summary *info)
|
||||
class ipa_call_summary *srcinfo,
|
||||
class ipa_call_summary *info)
|
||||
{
|
||||
new (info) ipa_call_summary (*srcinfo);
|
||||
info->predicate = NULL;
|
||||
@ -765,12 +765,12 @@ ipa_call_summary_t::duplicate (struct cgraph_edge *src,
|
||||
|
||||
static void
|
||||
dump_ipa_call_summary (FILE *f, int indent, struct cgraph_node *node,
|
||||
struct ipa_fn_summary *info)
|
||||
class ipa_fn_summary *info)
|
||||
{
|
||||
struct cgraph_edge *edge;
|
||||
for (edge = node->callees; edge; edge = edge->next_callee)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
|
||||
int i;
|
||||
|
||||
@ -821,7 +821,7 @@ dump_ipa_call_summary (FILE *f, int indent, struct cgraph_node *node,
|
||||
}
|
||||
for (edge = node->indirect_calls; edge; edge = edge->next_callee)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
fprintf (f, "%*sindirect call loop depth:%2i freq:%4.2f size:%2i"
|
||||
" time: %2i",
|
||||
indent, "",
|
||||
@ -844,7 +844,7 @@ ipa_dump_fn_summary (FILE *f, struct cgraph_node *node)
|
||||
{
|
||||
if (node->definition)
|
||||
{
|
||||
struct ipa_fn_summary *s = ipa_fn_summaries->get (node);
|
||||
class ipa_fn_summary *s = ipa_fn_summaries->get (node);
|
||||
if (s != NULL)
|
||||
{
|
||||
size_time_entry *e;
|
||||
@ -1183,7 +1183,7 @@ eliminated_by_inlining_prob (ipa_func_body_info *fbi, gimple *stmt)
|
||||
|
||||
static void
|
||||
set_cond_stmt_execution_predicate (struct ipa_func_body_info *fbi,
|
||||
struct ipa_fn_summary *summary,
|
||||
class ipa_fn_summary *summary,
|
||||
basic_block bb)
|
||||
{
|
||||
gimple *last;
|
||||
@ -1268,7 +1268,7 @@ set_cond_stmt_execution_predicate (struct ipa_func_body_info *fbi,
|
||||
|
||||
static void
|
||||
set_switch_stmt_execution_predicate (struct ipa_func_body_info *fbi,
|
||||
struct ipa_fn_summary *summary,
|
||||
class ipa_fn_summary *summary,
|
||||
basic_block bb)
|
||||
{
|
||||
gimple *lastg;
|
||||
@ -1322,8 +1322,8 @@ set_switch_stmt_execution_predicate (struct ipa_func_body_info *fbi,
|
||||
unshare_expr_without_location (max));
|
||||
p = p1 & p2;
|
||||
}
|
||||
*(struct predicate *) e->aux
|
||||
= p.or_with (summary->conds, *(struct predicate *) e->aux);
|
||||
*(class predicate *) e->aux
|
||||
= p.or_with (summary->conds, *(class predicate *) e->aux);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1334,7 +1334,7 @@ set_switch_stmt_execution_predicate (struct ipa_func_body_info *fbi,
|
||||
static void
|
||||
compute_bb_predicates (struct ipa_func_body_info *fbi,
|
||||
struct cgraph_node *node,
|
||||
struct ipa_fn_summary *summary)
|
||||
class ipa_fn_summary *summary)
|
||||
{
|
||||
struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
|
||||
bool done = false;
|
||||
@ -1368,7 +1368,7 @@ compute_bb_predicates (struct ipa_func_body_info *fbi,
|
||||
predicate this_bb_predicate
|
||||
= *(predicate *) e->src->aux;
|
||||
if (e->aux)
|
||||
this_bb_predicate &= (*(struct predicate *) e->aux);
|
||||
this_bb_predicate &= (*(class predicate *) e->aux);
|
||||
p = p.or_with (summary->conds, this_bb_predicate);
|
||||
if (p == true)
|
||||
break;
|
||||
@ -1407,7 +1407,7 @@ compute_bb_predicates (struct ipa_func_body_info *fbi,
|
||||
|
||||
static predicate
|
||||
will_be_nonconstant_expr_predicate (ipa_func_body_info *fbi,
|
||||
struct ipa_fn_summary *summary,
|
||||
class ipa_fn_summary *summary,
|
||||
tree expr,
|
||||
vec<predicate> nonconstant_names)
|
||||
{
|
||||
@ -1478,7 +1478,7 @@ will_be_nonconstant_expr_predicate (ipa_func_body_info *fbi,
|
||||
|
||||
static predicate
|
||||
will_be_nonconstant_predicate (struct ipa_func_body_info *fbi,
|
||||
struct ipa_fn_summary *summary,
|
||||
class ipa_fn_summary *summary,
|
||||
gimple *stmt,
|
||||
vec<predicate> nonconstant_names)
|
||||
{
|
||||
@ -1586,7 +1586,7 @@ struct record_modified_bb_info
|
||||
static basic_block
|
||||
get_minimal_bb (basic_block init_bb, basic_block use_bb)
|
||||
{
|
||||
struct loop *l = find_common_loop (init_bb->loop_father, use_bb->loop_father);
|
||||
class loop *l = find_common_loop (init_bb->loop_father, use_bb->loop_father);
|
||||
if (l && l->header->count < init_bb->count)
|
||||
return l->header;
|
||||
return init_bb;
|
||||
@ -1797,7 +1797,7 @@ phi_result_unknown_predicate (ipa_func_body_info *fbi,
|
||||
NONCONSTANT_NAMES, if possible. */
|
||||
|
||||
static void
|
||||
predicate_for_phi_result (struct ipa_fn_summary *summary, gphi *phi,
|
||||
predicate_for_phi_result (class ipa_fn_summary *summary, gphi *phi,
|
||||
predicate *p,
|
||||
vec<predicate> nonconstant_names)
|
||||
{
|
||||
@ -1995,7 +1995,7 @@ analyze_function_body (struct cgraph_node *node, bool early)
|
||||
basic_block bb;
|
||||
struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
|
||||
sreal freq;
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get_create (node);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get_create (node);
|
||||
predicate bb_predicate;
|
||||
struct ipa_func_body_info fbi;
|
||||
vec<predicate> nonconstant_names = vNULL;
|
||||
@ -2236,7 +2236,7 @@ analyze_function_body (struct cgraph_node *node, bool early)
|
||||
if (prob == 2 && dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file, "\t\tWill be eliminated by inlining\n");
|
||||
|
||||
struct predicate p = bb_predicate & will_be_nonconstant;
|
||||
class predicate p = bb_predicate & will_be_nonconstant;
|
||||
|
||||
/* We can ignore statement when we proved it is never going
|
||||
to happen, but we cannot do that for call statements
|
||||
@ -2285,7 +2285,7 @@ analyze_function_body (struct cgraph_node *node, bool early)
|
||||
|
||||
if (nonconstant_names.exists () && !early)
|
||||
{
|
||||
struct loop *loop;
|
||||
class loop *loop;
|
||||
predicate loop_iterations = true;
|
||||
predicate loop_stride = true;
|
||||
|
||||
@ -2297,7 +2297,7 @@ analyze_function_body (struct cgraph_node *node, bool early)
|
||||
vec<edge> exits;
|
||||
edge ex;
|
||||
unsigned int j;
|
||||
struct tree_niter_desc niter_desc;
|
||||
class tree_niter_desc niter_desc;
|
||||
bb_predicate = *(predicate *) loop->header->aux;
|
||||
|
||||
exits = get_loop_exit_edges (loop);
|
||||
@ -2413,7 +2413,7 @@ compute_fn_summary (struct cgraph_node *node, bool early)
|
||||
{
|
||||
HOST_WIDE_INT self_stack_size;
|
||||
struct cgraph_edge *e;
|
||||
struct ipa_fn_summary *info;
|
||||
class ipa_fn_summary *info;
|
||||
|
||||
gcc_assert (!node->global.inlined_to);
|
||||
|
||||
@ -2539,7 +2539,7 @@ estimate_edge_devirt_benefit (struct cgraph_edge *ie,
|
||||
{
|
||||
tree target;
|
||||
struct cgraph_node *callee;
|
||||
struct ipa_fn_summary *isummary;
|
||||
class ipa_fn_summary *isummary;
|
||||
enum availability avail;
|
||||
bool speculative;
|
||||
|
||||
@ -2587,7 +2587,7 @@ estimate_edge_size_and_time (struct cgraph_edge *e, int *size, int *min_size,
|
||||
vec<ipa_agg_jump_function_p> known_aggs,
|
||||
ipa_hints *hints)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
int call_size = es->call_stmt_size;
|
||||
int call_time = es->call_stmt_time;
|
||||
int cur_size;
|
||||
@ -2624,7 +2624,7 @@ estimate_calls_size_and_time (struct cgraph_node *node, int *size,
|
||||
struct cgraph_edge *e;
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get_create (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get_create (e);
|
||||
|
||||
/* Do not care about zero sized builtins. */
|
||||
if (e->inline_failed && !es->call_stmt_size)
|
||||
@ -2655,7 +2655,7 @@ estimate_calls_size_and_time (struct cgraph_node *node, int *size,
|
||||
}
|
||||
for (e = node->indirect_calls; e; e = e->next_callee)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get_create (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get_create (e);
|
||||
if (!es->predicate
|
||||
|| es->predicate->evaluate (possible_truths))
|
||||
estimate_edge_size_and_time (e, size,
|
||||
@ -2690,7 +2690,7 @@ estimate_node_size_and_time (struct cgraph_node *node,
|
||||
vec<inline_param_summary>
|
||||
inline_param_summary)
|
||||
{
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get_create (node);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get_create (node);
|
||||
size_time_entry *e;
|
||||
int size = 0;
|
||||
sreal time = 0;
|
||||
@ -2881,9 +2881,9 @@ remap_edge_change_prob (struct cgraph_edge *inlined_edge,
|
||||
if (ipa_node_params_sum)
|
||||
{
|
||||
int i;
|
||||
struct ipa_edge_args *args = IPA_EDGE_REF (edge);
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
struct ipa_call_summary *inlined_es
|
||||
class ipa_edge_args *args = IPA_EDGE_REF (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
class ipa_call_summary *inlined_es
|
||||
= ipa_call_summaries->get (inlined_edge);
|
||||
|
||||
if (es->param.length () == 0)
|
||||
@ -2924,8 +2924,8 @@ remap_edge_change_prob (struct cgraph_edge *inlined_edge,
|
||||
static void
|
||||
remap_edge_summaries (struct cgraph_edge *inlined_edge,
|
||||
struct cgraph_node *node,
|
||||
struct ipa_fn_summary *info,
|
||||
struct ipa_fn_summary *callee_info,
|
||||
class ipa_fn_summary *info,
|
||||
class ipa_fn_summary *callee_info,
|
||||
vec<int> operand_map,
|
||||
vec<int> offset_map,
|
||||
clause_t possible_truths,
|
||||
@ -2934,7 +2934,7 @@ remap_edge_summaries (struct cgraph_edge *inlined_edge,
|
||||
struct cgraph_edge *e, *next;
|
||||
for (e = node->callees; e; e = next)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
predicate p;
|
||||
next = e->next_callee;
|
||||
|
||||
@ -2960,7 +2960,7 @@ remap_edge_summaries (struct cgraph_edge *inlined_edge,
|
||||
}
|
||||
for (e = node->indirect_calls; e; e = next)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
predicate p;
|
||||
next = e->next_callee;
|
||||
|
||||
@ -2980,8 +2980,8 @@ remap_edge_summaries (struct cgraph_edge *inlined_edge,
|
||||
/* Same as remap_predicate, but set result into hint *HINT. */
|
||||
|
||||
static void
|
||||
remap_hint_predicate (struct ipa_fn_summary *info,
|
||||
struct ipa_fn_summary *callee_info,
|
||||
remap_hint_predicate (class ipa_fn_summary *info,
|
||||
class ipa_fn_summary *callee_info,
|
||||
predicate **hint,
|
||||
vec<int> operand_map,
|
||||
vec<int> offset_map,
|
||||
@ -3013,7 +3013,7 @@ ipa_merge_fn_summary_after_inlining (struct cgraph_edge *edge)
|
||||
ipa_fn_summary *callee_info = ipa_fn_summaries->get (edge->callee);
|
||||
struct cgraph_node *to = (edge->caller->global.inlined_to
|
||||
? edge->caller->global.inlined_to : edge->caller);
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get (to);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get (to);
|
||||
clause_t clause = 0; /* not_inline is known to be false. */
|
||||
size_time_entry *e;
|
||||
vec<int> operand_map = vNULL;
|
||||
@ -3021,7 +3021,7 @@ ipa_merge_fn_summary_after_inlining (struct cgraph_edge *edge)
|
||||
int i;
|
||||
predicate toplev_predicate;
|
||||
predicate true_p = true;
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
|
||||
if (es->predicate)
|
||||
toplev_predicate = *es->predicate;
|
||||
@ -3034,7 +3034,7 @@ ipa_merge_fn_summary_after_inlining (struct cgraph_edge *edge)
|
||||
evaluate_properties_for_edge (edge, true, &clause, NULL, NULL, NULL, NULL);
|
||||
if (ipa_node_params_sum && callee_info->conds)
|
||||
{
|
||||
struct ipa_edge_args *args = IPA_EDGE_REF (edge);
|
||||
class ipa_edge_args *args = IPA_EDGE_REF (edge);
|
||||
int count = ipa_get_cs_argument_count (args);
|
||||
int i;
|
||||
|
||||
@ -3127,7 +3127,7 @@ ipa_merge_fn_summary_after_inlining (struct cgraph_edge *edge)
|
||||
void
|
||||
ipa_update_overall_fn_summary (struct cgraph_node *node)
|
||||
{
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get_create (node);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get_create (node);
|
||||
size_time_entry *e;
|
||||
int i;
|
||||
|
||||
@ -3223,10 +3223,10 @@ ipa_fn_summary_generate (void)
|
||||
/* Write inline summary for edge E to OB. */
|
||||
|
||||
static void
|
||||
read_ipa_call_summary (struct lto_input_block *ib, struct cgraph_edge *e,
|
||||
read_ipa_call_summary (class lto_input_block *ib, struct cgraph_edge *e,
|
||||
bool prevails)
|
||||
{
|
||||
struct ipa_call_summary *es = prevails
|
||||
class ipa_call_summary *es = prevails
|
||||
? ipa_call_summaries->get_create (e) : NULL;
|
||||
predicate p;
|
||||
int length, i;
|
||||
@ -3277,7 +3277,7 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
|
||||
const int cfg_offset = sizeof (struct lto_function_header);
|
||||
const int main_offset = cfg_offset + header->cfg_size;
|
||||
const int string_offset = main_offset + header->main_size;
|
||||
struct data_in *data_in;
|
||||
class data_in *data_in;
|
||||
unsigned int i, count2, j;
|
||||
unsigned int f_count;
|
||||
|
||||
@ -3292,7 +3292,7 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
|
||||
{
|
||||
unsigned int index;
|
||||
struct cgraph_node *node;
|
||||
struct ipa_fn_summary *info;
|
||||
class ipa_fn_summary *info;
|
||||
lto_symtab_encoder_t encoder;
|
||||
struct bitpack_d bp;
|
||||
struct cgraph_edge *e;
|
||||
@ -3349,7 +3349,7 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
|
||||
gcc_assert (!info || !info->size_time_table);
|
||||
for (j = 0; j < count2; j++)
|
||||
{
|
||||
struct size_time_entry e;
|
||||
class size_time_entry e;
|
||||
|
||||
e.size = streamer_read_uhwi (&ib);
|
||||
e.time = sreal::stream_in (&ib);
|
||||
@ -3423,7 +3423,7 @@ ipa_fn_summary_read (void)
|
||||
static void
|
||||
write_ipa_call_summary (struct output_block *ob, struct cgraph_edge *e)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (e);
|
||||
int i;
|
||||
|
||||
streamer_write_uhwi (ob, es->call_stmt_size);
|
||||
@ -3471,7 +3471,7 @@ ipa_fn_summary_write (void)
|
||||
cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
|
||||
if (cnode && cnode->definition && !cnode->alias)
|
||||
{
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get (cnode);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get (cnode);
|
||||
struct bitpack_d bp;
|
||||
struct cgraph_edge *edge;
|
||||
int i;
|
||||
|
@ -184,7 +184,7 @@ public:
|
||||
|
||||
static ipa_fn_summary_t *create_ggc (symbol_table *symtab)
|
||||
{
|
||||
struct ipa_fn_summary_t *summary = new (ggc_alloc <ipa_fn_summary_t> ())
|
||||
class ipa_fn_summary_t *summary = new (ggc_alloc <ipa_fn_summary_t> ())
|
||||
ipa_fn_summary_t (symtab);
|
||||
summary->disable_insertion_hook ();
|
||||
return summary;
|
||||
|
@ -221,7 +221,7 @@ ipa_hsa_read_section (struct lto_file_decl_data *file_data, const char *data,
|
||||
const int cfg_offset = sizeof (struct lto_function_header);
|
||||
const int main_offset = cfg_offset + header->cfg_size;
|
||||
const int string_offset = main_offset + header->main_size;
|
||||
struct data_in *data_in;
|
||||
class data_in *data_in;
|
||||
unsigned int i;
|
||||
unsigned int count;
|
||||
|
||||
|
@ -614,8 +614,8 @@ func_checker::compare_loops (basic_block bb1, basic_block bb2)
|
||||
if ((bb1->loop_father == NULL) != (bb2->loop_father == NULL))
|
||||
return return_false ();
|
||||
|
||||
struct loop *l1 = bb1->loop_father;
|
||||
struct loop *l2 = bb2->loop_father;
|
||||
class loop *l1 = bb1->loop_father;
|
||||
class loop *l2 = bb2->loop_father;
|
||||
if (l1 == NULL)
|
||||
return true;
|
||||
|
||||
|
@ -482,7 +482,7 @@ sem_function::param_used_p (unsigned int i)
|
||||
if (ipa_node_params_sum == NULL)
|
||||
return true;
|
||||
|
||||
struct ipa_node_params *parms_info = IPA_NODE_REF (get_node ());
|
||||
class ipa_node_params *parms_info = IPA_NODE_REF (get_node ());
|
||||
|
||||
if (vec_safe_length (parms_info->descriptors) <= i)
|
||||
return true;
|
||||
|
@ -128,7 +128,7 @@ do_estimate_edge_time (struct cgraph_edge *edge)
|
||||
vec<tree> known_vals;
|
||||
vec<ipa_polymorphic_call_context> known_contexts;
|
||||
vec<ipa_agg_jump_function_p> known_aggs;
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
int min_size;
|
||||
|
||||
callee = edge->callee->ultimate_alias_target ();
|
||||
@ -264,7 +264,7 @@ int
|
||||
estimate_size_after_inlining (struct cgraph_node *node,
|
||||
struct cgraph_edge *edge)
|
||||
{
|
||||
struct ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
class ipa_call_summary *es = ipa_call_summaries->get (edge);
|
||||
ipa_fn_summary *s = ipa_fn_summaries->get (node);
|
||||
if (!es->predicate || *es->predicate != false)
|
||||
{
|
||||
@ -321,7 +321,7 @@ int
|
||||
estimate_growth (struct cgraph_node *node)
|
||||
{
|
||||
struct growth_data d = { node, false, false, 0 };
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get (node);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get (node);
|
||||
|
||||
node->call_for_symbol_and_aliases (do_estimate_growth_1, &d, true);
|
||||
|
||||
|
@ -1037,7 +1037,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
|
||||
int growth;
|
||||
sreal edge_time, unspec_edge_time;
|
||||
struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
|
||||
struct ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
|
||||
class ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
|
||||
ipa_hints hints;
|
||||
cgraph_node *caller = (edge->caller->global.inlined_to
|
||||
? edge->caller->global.inlined_to
|
||||
@ -1799,7 +1799,7 @@ inline_small_functions (void)
|
||||
&& (node->has_gimple_body_p () || node->thunk.thunk_p)
|
||||
&& opt_for_fn (node->decl, optimize))
|
||||
{
|
||||
struct ipa_fn_summary *info = ipa_fn_summaries->get (node);
|
||||
class ipa_fn_summary *info = ipa_fn_summaries->get (node);
|
||||
struct ipa_dfs_info *dfs = (struct ipa_dfs_info *) node->aux;
|
||||
|
||||
/* Do not account external functions, they will be optimized out
|
||||
|
@ -688,8 +688,8 @@ ipa_polymorphic_call_context::stream_out (struct output_block *ob) const
|
||||
/* Stream in the context from IB and DATA_IN. */
|
||||
|
||||
void
|
||||
ipa_polymorphic_call_context::stream_in (struct lto_input_block *ib,
|
||||
struct data_in *data_in)
|
||||
ipa_polymorphic_call_context::stream_in (class lto_input_block *ib,
|
||||
class data_in *data_in)
|
||||
{
|
||||
struct bitpack_d bp = streamer_read_bitpack (ib);
|
||||
|
||||
|
@ -398,8 +398,8 @@ predicate::remap_after_duplication (clause_t possible_truths)
|
||||
for other purposes). */
|
||||
|
||||
predicate
|
||||
predicate::remap_after_inlining (struct ipa_fn_summary *info,
|
||||
struct ipa_fn_summary *callee_info,
|
||||
predicate::remap_after_inlining (class ipa_fn_summary *info,
|
||||
class ipa_fn_summary *callee_info,
|
||||
vec<int> operand_map,
|
||||
vec<int> offset_map,
|
||||
clause_t possible_truths,
|
||||
@ -483,7 +483,7 @@ predicate::remap_after_inlining (struct ipa_fn_summary *info,
|
||||
/* Read predicate from IB. */
|
||||
|
||||
void
|
||||
predicate::stream_in (struct lto_input_block *ib)
|
||||
predicate::stream_in (class lto_input_block *ib)
|
||||
{
|
||||
clause_t clause;
|
||||
int k = 0;
|
||||
@ -522,7 +522,7 @@ predicate::stream_out (struct output_block *ob)
|
||||
It can be NULL, which means this not a load from an aggregate. */
|
||||
|
||||
predicate
|
||||
add_condition (struct ipa_fn_summary *summary, int operand_num,
|
||||
add_condition (class ipa_fn_summary *summary, int operand_num,
|
||||
HOST_WIDE_INT size, struct agg_position_info *aggpos,
|
||||
enum tree_code code, tree val)
|
||||
{
|
||||
|
@ -205,11 +205,11 @@ public:
|
||||
predicate remap_after_duplication (clause_t);
|
||||
|
||||
/* Return predicate equal to THIS after inlining. */
|
||||
predicate remap_after_inlining (struct ipa_fn_summary *,
|
||||
struct ipa_fn_summary *,
|
||||
predicate remap_after_inlining (class ipa_fn_summary *,
|
||||
class ipa_fn_summary *,
|
||||
vec<int>, vec<int>, clause_t, const predicate &);
|
||||
|
||||
void stream_in (struct lto_input_block *);
|
||||
void stream_in (class lto_input_block *);
|
||||
void stream_out (struct output_block *);
|
||||
|
||||
private:
|
||||
@ -227,6 +227,6 @@ private:
|
||||
};
|
||||
|
||||
void dump_condition (FILE *f, conditions conditions, int cond);
|
||||
predicate add_condition (struct ipa_fn_summary *summary, int operand_num,
|
||||
predicate add_condition (class ipa_fn_summary *summary, int operand_num,
|
||||
HOST_WIDE_INT size, struct agg_position_info *aggpos,
|
||||
enum tree_code code, tree val);
|
||||
|
@ -258,7 +258,7 @@ ipa_profile_read_summary (void)
|
||||
{
|
||||
const char *data;
|
||||
size_t len;
|
||||
struct lto_input_block *ib
|
||||
class lto_input_block *ib
|
||||
= lto_create_simple_input_block (file_data,
|
||||
LTO_section_ipa_profile,
|
||||
&data, &len);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user