25
#include "precompiled.hpp"
26
#include "ci/ciReplay.hpp"
27
#include "classfile/vmSymbols.hpp"
28
#include "compiler/compilationPolicy.hpp"
29
#include "compiler/compileBroker.hpp"
30
#include "compiler/compilerEvent.hpp"
31
#include "compiler/compileLog.hpp"
32
#include "interpreter/linkResolver.hpp"
33
#include "jfr/jfrEvents.hpp"
34
#include "oops/objArrayKlass.hpp"
35
#include "opto/callGenerator.hpp"
36
#include "opto/parse.hpp"
37
#include "runtime/handles.inline.hpp"
38
#include "utilities/events.hpp"
42
InlineTree::InlineTree(Compile* c,
43
const InlineTree *caller_tree, ciMethod* callee,
44
JVMState* caller_jvms, int caller_bci,
45
int max_inline_level) :
47
_caller_jvms(nullptr),
50
_caller_tree((InlineTree*) caller_tree),
51
_count_inline_bcs(method()->code_size_for_inlining()),
52
_max_inline_level(max_inline_level),
53
_subtrees(c->comp_arena(), 2, 0, nullptr),
58
_forced_inline = false;
60
if (caller_jvms != nullptr) {
62
_caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
63
_caller_jvms->set_bci(caller_jvms->bci());
64
assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
65
assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
67
assert((caller_tree == nullptr ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
68
assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
70
InlineTree *caller = (InlineTree *)caller_tree;
71
for( ; caller != nullptr; caller = ((InlineTree *)(caller->caller_tree())) ) {
72
caller->_count_inline_bcs += count_inline_bcs();
73
NOT_PRODUCT(caller->_count_inlines++;)
84
static bool is_init_with_ea(ciMethod* callee_method,
85
ciMethod* caller_method, Compile* C) {
86
if (!C->do_escape_analysis() || !EliminateAllocations) {
89
if (callee_method->is_object_initializer()) {
92
if (caller_method->is_object_initializer() &&
93
caller_method != C->method() &&
94
caller_method->holder()->is_subclass_of(callee_method->holder())) {
97
if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
100
ciType *retType = callee_method->signature()->return_type();
101
ciKlass *iter = C->env()->Iterator_klass();
102
if(retType->is_loaded() && iter->is_loaded() && retType->is_subtype_of(iter)) {
111
static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
112
return C->eliminate_boxing() && callee_method->is_unboxing_method();
116
bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
117
int caller_bci, bool& should_delay, ciCallProfile& profile) {
119
if (C->directive()->should_inline(callee_method)) {
120
set_msg("force inline by CompileCommand");
121
_forced_inline = true;
125
if (callee_method->force_inline()) {
126
set_msg("force inline by annotation");
127
_forced_inline = true;
131
int inline_depth = inline_level() + 1;
132
if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
134
set_msg("force (incremental) inline by ciReplay");
136
set_msg("force inline by ciReplay");
138
_forced_inline = true;
142
int size = callee_method->code_size_for_inlining();
145
if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
146
size < InlineThrowMaxSize ) {
147
if (C->print_inlining() && Verbose) {
148
CompileTask::print_inline_indent(inline_level());
149
tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
151
set_msg("many throws");
155
int default_max_inline_size = C->max_inline_size();
156
int inline_small_code_size = InlineSmallCode / 4;
157
int max_inline_size = default_max_inline_size;
159
int call_site_count = caller_method->scale_count(profile.count());
160
int invoke_count = caller_method->interpreter_invocation_count();
162
assert(invoke_count != 0, "require invocation count greater than zero");
163
double freq = (double)call_site_count / (double)invoke_count;
166
if ((freq >= InlineFrequencyRatio) ||
167
is_unboxing_method(callee_method, C) ||
168
is_init_with_ea(callee_method, caller_method, C)) {
170
max_inline_size = C->freq_inline_size();
171
if (size <= max_inline_size && TraceFrequencyInlining) {
172
CompileTask::print_inline_indent(inline_level());
173
tty->print_cr("Inlined frequent method (freq=%lf):", freq);
174
CompileTask::print_inline_indent(inline_level());
175
callee_method->print();
180
if (callee_method->has_compiled_code() &&
181
callee_method->inline_instructions_size() > inline_small_code_size) {
182
set_msg("already compiled into a medium method");
186
if (size > max_inline_size) {
187
if (max_inline_size > default_max_inline_size) {
188
set_msg("hot method too big");
199
bool InlineTree::should_not_inline(ciMethod* callee_method, ciMethod* caller_method,
200
int caller_bci, bool& should_delay, ciCallProfile& profile) {
201
const char* fail_msg = nullptr;
204
if (callee_method->is_abstract()) {
205
fail_msg = "abstract method";
206
} else if (!callee_method->holder()->is_initialized() &&
208
C->needs_clinit_barrier(callee_method->holder(), caller_method)) {
209
fail_msg = "method holder not initialized";
210
} else if (callee_method->is_native()) {
211
fail_msg = "native method";
212
} else if (callee_method->dont_inline()) {
213
fail_msg = "don't inline by annotation";
218
if (callee_method->changes_current_thread()
219
&& ! C->method()->changes_current_thread()) {
220
fail_msg = "method changes current thread";
224
if (fail_msg == nullptr && callee_method->has_unloaded_classes_in_signature()) {
225
fail_msg = "unloaded signature classes";
228
if (fail_msg != nullptr) {
234
if (C->directive()->should_inline(callee_method)) {
235
set_msg("force inline by CompileCommand");
239
if (C->directive()->should_not_inline(callee_method)) {
240
set_msg("disallowed by CompileCommand");
244
int inline_depth = inline_level() + 1;
245
if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth, should_delay)) {
247
set_msg("force (incremental) inline by ciReplay");
249
set_msg("force inline by ciReplay");
254
if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
255
set_msg("disallowed by ciReplay");
259
if (ciReplay::should_not_inline(callee_method)) {
260
set_msg("disallowed by ciReplay");
264
if (callee_method->force_inline()) {
265
set_msg("force inline by annotation");
271
if (is_unboxing_method(callee_method, C)) {
276
if (callee_method->has_compiled_code() &&
277
callee_method->inline_instructions_size() > InlineSmallCode) {
278
set_msg("already compiled into a big method");
284
if (caller_tree() != nullptr &&
285
callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
286
const InlineTree *top = this;
287
while (top->caller_tree() != nullptr) top = top->caller_tree();
288
ciInstanceKlass* k = top->method()->holder();
289
if (!k->is_subclass_of(C->env()->Throwable_klass())) {
290
set_msg("exception method");
296
if (callee_method->code_size() <= MaxTrivialSize) {
301
if (UseInterpreter) {
302
if (!callee_method->has_compiled_code() &&
303
!callee_method->was_executed_more_than(0)) {
304
set_msg("never executed");
308
if (is_init_with_ea(callee_method, caller_method, C)) {
313
if (MinInlineFrequencyRatio > 0) {
314
int call_site_count = caller_method->scale_count(profile.count());
315
int invoke_count = caller_method->interpreter_invocation_count();
316
assert(invoke_count != 0, "require invocation count greater than zero");
317
double freq = (double)call_site_count / (double)invoke_count;
318
double min_freq = MAX2(MinInlineFrequencyRatio, 1.0 / CompilationPolicy::min_invocations());
320
if (freq < min_freq) {
321
set_msg("low call site frequency");
330
bool InlineTree::is_not_reached(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile) {
331
if (!UseInterpreter) {
334
if (profile.count() > 0) {
337
if (!callee_method->was_executed_more_than(0)) {
340
if (caller_method->is_not_reached(caller_bci)) {
343
if (profile.count() == -1) {
346
assert(profile.count() == 0, "sanity");
351
ciMethodBlocks* caller_blocks = caller_method->get_method_blocks();
352
bool is_start_block = caller_blocks->block_containing(caller_bci)->start_bci() == 0;
353
if (is_start_block) {
362
bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
363
int caller_bci, JVMState* jvms, ciCallProfile& profile,
364
bool& should_delay) {
366
if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
367
if (!callee_method->force_inline() || !IncrementalInline) {
368
set_msg("size > DesiredMethodLimit");
370
} else if (!C->inlining_incrementally()) {
375
_forced_inline = false;
378
if (!should_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
382
if (should_not_inline(callee_method, caller_method, caller_bci, should_delay, profile)) {
386
if (InlineAccessors && callee_method->is_accessor()) {
393
if (callee_method->code_size() > MaxTrivialSize) {
396
if (C->over_inlining_cutoff()) {
397
if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
398
|| !IncrementalInline) {
399
set_msg("NodeCountInliningCutoff");
406
if (!UseInterpreter &&
407
is_init_with_ea(callee_method, caller_method, C)) {
410
} else if (forced_inline()) {
412
} else if (is_not_reached(callee_method, caller_method, caller_bci, profile)) {
414
set_msg("call site not reached");
419
if (!C->do_inlining() && InlineAccessors) {
420
set_msg("not an accessor");
426
if (inline_level() > MaxForceInlineLevel) {
427
set_msg("MaxForceInlineLevel");
430
if (inline_level() > _max_inline_level) {
431
if (!callee_method->force_inline() || !IncrementalInline) {
432
set_msg("inlining too deep");
434
} else if (!C->inlining_incrementally()) {
442
const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
443
int inline_level = 0;
444
if (!is_compiled_lambda_form) {
445
if (method() == callee_method) {
450
Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : nullptr;
451
for (JVMState* j = jvms->caller(); j != nullptr && j->has_method(); j = j->caller()) {
452
if (j->method() == callee_method) {
453
if (is_compiled_lambda_form) {
457
Node* caller_argument0 = j->map()->argument(j, 0)->uncast();
458
if (caller_argument0 == callee_argument0) {
466
if (inline_level > MaxRecursiveInlineLevel) {
467
set_msg("recursive inlining is too deep");
472
int size = callee_method->code_size_for_inlining();
474
if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
475
if (!callee_method->force_inline() || !IncrementalInline) {
476
set_msg("size > DesiredMethodLimit");
478
} else if (!C->inlining_incrementally()) {
488
bool InlineTree::pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
490
if (callee_method == nullptr) {
493
ciInstanceKlass *callee_holder = callee_method->holder();
495
if (!callee_holder->is_loaded()) {
498
if (!callee_holder->is_initialized() &&
500
C->needs_clinit_barrier(callee_holder, caller_method)) {
503
if( !UseInterpreter ) {
506
ciBytecodeStream iter(caller_method);
507
iter.force_bci(caller_bci);
508
Bytecodes::Code call_bc = iter.cur_bc();
510
if (call_bc != Bytecodes::_invokedynamic) {
511
int index = iter.get_index_u2();
512
if (!caller_method->is_klass_loaded(index, call_bc, true)) {
516
if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
525
const char* InlineTree::check_can_parse(ciMethod* callee) {
527
if ( callee->is_native()) return "native method";
528
if ( callee->is_abstract()) return "abstract method";
529
if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";
530
if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";
531
if (!callee->can_be_parsed()) return "cannot be parsed";
536
void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci,
537
ciMethod* caller_method, bool success) const {
538
const char* inline_msg = msg();
539
assert(inline_msg != nullptr, "just checking");
540
if (C->log() != nullptr) {
542
C->log()->inline_success(inline_msg);
544
C->log()->inline_fail(inline_msg);
547
CompileTask::print_inlining_ul(callee_method, inline_level(),
548
caller_bci, inlining_result_of(success), inline_msg);
549
if (C->print_inlining()) {
550
C->print_inlining(callee_method, inline_level(), caller_bci, inlining_result_of(success), inline_msg);
551
guarantee(callee_method != nullptr, "would crash in CompilerEvent::InlineEvent::post");
553
const InlineTree *top = this;
554
while (top->caller_tree() != nullptr) { top = top->caller_tree(); }
558
EventCompilerInlining event;
559
if (event.should_commit()) {
560
CompilerEvent::InlineEvent::post(event, C->compile_id(), caller_method->get_Method(), callee_method, success, inline_msg, caller_bci);
565
bool InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile,
566
bool& should_delay) {
568
assert(callee_method != nullptr, "caller checks for optimized virtual!");
571
if (jvms->caller() == nullptr) {
572
assert(_caller_jvms == nullptr, "redundant instance state");
574
assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
576
assert(_method == jvms->method(), "redundant instance state");
578
int caller_bci = jvms->bci();
579
ciMethod* caller_method = jvms->method();
582
if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
583
set_msg("failed initial checks");
584
print_inlining(callee_method, caller_bci, caller_method, false );
589
set_msg(check_can_parse(callee_method));
590
if (msg() != nullptr) {
591
print_inlining(callee_method, caller_bci, caller_method, false );
596
bool success = try_to_inline(callee_method, caller_method, caller_bci, jvms, profile,
600
if (msg() == nullptr) {
601
set_msg("inline (hot)");
603
print_inlining(callee_method, caller_bci, caller_method, true );
604
InlineTree* callee_tree = build_inline_tree_for_callee(callee_method, jvms, caller_bci);
607
callee_tree->set_late_inline();
612
if (msg() == nullptr) {
613
set_msg("too cold to inline");
615
print_inlining(callee_method, caller_bci, caller_method, false );
621
InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
623
InlineTree* old_ilt = callee_at(caller_bci, callee_method);
624
if (old_ilt != nullptr) {
627
int max_inline_level_adjust = 0;
628
if (caller_jvms->method() != nullptr) {
629
if (caller_jvms->method()->is_compiled_lambda_form()) {
630
max_inline_level_adjust += 1;
631
} else if (callee_method->is_method_handle_intrinsic() ||
632
callee_method->is_compiled_lambda_form()) {
633
max_inline_level_adjust += 1;
635
if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
636
CompileTask::print_inline_indent(inline_level());
637
tty->print_cr(" \\-> discounting inline depth");
639
if (max_inline_level_adjust != 0 && C->log()) {
640
int id1 = C->log()->identify(caller_jvms->method());
641
int id2 = C->log()->identify(callee_method);
642
C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
646
InlineTree* ilt = new (C->comp_arena()) InlineTree(C, this, callee_method, caller_jvms, caller_bci, _max_inline_level + max_inline_level_adjust);
647
_subtrees.append(ilt);
649
NOT_PRODUCT( _count_inlines += 1; )
656
InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
657
for (int i = 0; i < _subtrees.length(); i++) {
658
InlineTree* sub = _subtrees.at(i);
659
if (sub->caller_bci() == bci && callee == sub->method()) {
668
InlineTree *InlineTree::build_inline_tree_root() {
669
Compile* C = Compile::current();
672
InlineTree* ilt = new InlineTree(C, nullptr, C->method(), nullptr, -1, MaxInlineLevel);
682
InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
683
InlineTree* iltp = root;
684
uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
685
for (uint d = 1; d <= depth; d++) {
686
JVMState* jvmsp = jvms->of_depth(d);
688
assert(jvmsp->method() == iltp->method(), "tree still in sync");
689
ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
690
InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
691
if (sub == nullptr) {
693
sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
695
guarantee(sub != nullptr, "should be a sub-ilt here");
704
int InlineTree::count() const {
706
for (int i = 0 ; i < _subtrees.length(); i++) {
707
result += _subtrees.at(i)->count();
712
void InlineTree::dump_replay_data(outputStream* out, int depth_adjust) {
713
out->print(" %d %d %d ", inline_level() + depth_adjust, caller_bci(), _late_inline);
714
method()->dump_name_as_ascii(out);
715
for (int i = 0 ; i < _subtrees.length(); i++) {
716
_subtrees.at(i)->dump_replay_data(out, depth_adjust);
722
void InlineTree::print_impl(outputStream* st, int indent) const {
723
for (int i = 0; i < indent; i++) st->print(" ");
724
st->print(" @ %d", caller_bci());
725
method()->print_short_name(st);
728
for (int i = 0 ; i < _subtrees.length(); i++) {
729
_subtrees.at(i)->print_impl(st, indent + 2);
733
void InlineTree::print_value_on(outputStream* st) const {