jdk

Форк
0
/
templateTable_arm.cpp 
4505 строк · 140.2 Кб
1
/*
2
 * Copyright (c) 2008, 2023, Oracle and/or its affiliates. All rights reserved.
3
 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
 *
5
 * This code is free software; you can redistribute it and/or modify it
6
 * under the terms of the GNU General Public License version 2 only, as
7
 * published by the Free Software Foundation.
8
 *
9
 * This code is distributed in the hope that it will be useful, but WITHOUT
10
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12
 * version 2 for more details (a copy is included in the LICENSE file that
13
 * accompanied this code).
14
 *
15
 * You should have received a copy of the GNU General Public License version
16
 * 2 along with this work; if not, write to the Free Software Foundation,
17
 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18
 *
19
 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20
 * or visit www.oracle.com if you need additional information or have any
21
 * questions.
22
 *
23
 */
24

25
#include "precompiled.hpp"
26
#include "asm/macroAssembler.inline.hpp"
27
#include "gc/shared/barrierSetAssembler.hpp"
28
#include "gc/shared/collectedHeap.hpp"
29
#include "gc/shared/tlab_globals.hpp"
30
#include "interpreter/interp_masm.hpp"
31
#include "interpreter/interpreter.hpp"
32
#include "interpreter/interpreterRuntime.hpp"
33
#include "interpreter/templateTable.hpp"
34
#include "memory/universe.hpp"
35
#include "oops/cpCache.hpp"
36
#include "oops/klass.inline.hpp"
37
#include "oops/methodCounters.hpp"
38
#include "oops/methodData.hpp"
39
#include "oops/objArrayKlass.hpp"
40
#include "oops/oop.inline.hpp"
41
#include "oops/resolvedIndyEntry.hpp"
42
#include "oops/resolvedFieldEntry.hpp"
43
#include "oops/resolvedMethodEntry.hpp"
44
#include "prims/jvmtiExport.hpp"
45
#include "prims/methodHandles.hpp"
46
#include "runtime/frame.inline.hpp"
47
#include "runtime/sharedRuntime.hpp"
48
#include "runtime/stubRoutines.hpp"
49
#include "runtime/synchronizer.hpp"
50
#include "utilities/powerOfTwo.hpp"
51

52
#define __ _masm->
53

54
//----------------------------------------------------------------------------------------------------
55
// Address computation
56

57
// local variables
58
static inline Address iaddress(int n)            {
59
  return Address(Rlocals, Interpreter::local_offset_in_bytes(n));
60
}
61

62
static inline Address laddress(int n)            { return iaddress(n + 1); }
63
static inline Address haddress(int n)            { return iaddress(n + 0); }
64

65
static inline Address faddress(int n)            { return iaddress(n); }
66
static inline Address daddress(int n)            { return laddress(n); }
67
static inline Address aaddress(int n)            { return iaddress(n); }
68

69

70
void TemplateTable::get_local_base_addr(Register r, Register index) {
71
  __ sub(r, Rlocals, AsmOperand(index, lsl, Interpreter::logStackElementSize));
72
}
73

74
Address TemplateTable::load_iaddress(Register index, Register scratch) {
75
  return Address(Rlocals, index, lsl, Interpreter::logStackElementSize, basic_offset, sub_offset);
76
}
77

78
Address TemplateTable::load_aaddress(Register index, Register scratch) {
79
  return load_iaddress(index, scratch);
80
}
81

82
Address TemplateTable::load_faddress(Register index, Register scratch) {
83
#ifdef __SOFTFP__
84
  return load_iaddress(index, scratch);
85
#else
86
  get_local_base_addr(scratch, index);
87
  return Address(scratch);
88
#endif // __SOFTFP__
89
}
90

91
Address TemplateTable::load_daddress(Register index, Register scratch) {
92
  get_local_base_addr(scratch, index);
93
  return Address(scratch, Interpreter::local_offset_in_bytes(1));
94
}
95

96
// At top of Java expression stack which may be different than SP.
97
// It isn't for category 1 objects.
98
static inline Address at_tos() {
99
  return Address(Rstack_top, Interpreter::expr_offset_in_bytes(0));
100
}
101

102
static inline Address at_tos_p1() {
103
  return Address(Rstack_top, Interpreter::expr_offset_in_bytes(1));
104
}
105

106
static inline Address at_tos_p2() {
107
  return Address(Rstack_top, Interpreter::expr_offset_in_bytes(2));
108
}
109

110

111
// Loads double/long local into R0_tos_lo/R1_tos_hi with two
112
// separate ldr instructions (supports nonadjacent values).
113
// Used for longs in all modes, and for doubles in SOFTFP mode.
114
void TemplateTable::load_category2_local(Register Rlocal_index, Register tmp) {
115
  const Register Rlocal_base = tmp;
116
  assert_different_registers(Rlocal_index, tmp);
117

118
  get_local_base_addr(Rlocal_base, Rlocal_index);
119
  __ ldr(R0_tos_lo, Address(Rlocal_base, Interpreter::local_offset_in_bytes(1)));
120
  __ ldr(R1_tos_hi, Address(Rlocal_base, Interpreter::local_offset_in_bytes(0)));
121
}
122

123

124
// Stores R0_tos_lo/R1_tos_hi to double/long local with two
125
// separate str instructions (supports nonadjacent values).
126
// Used for longs in all modes, and for doubles in SOFTFP mode
127
void TemplateTable::store_category2_local(Register Rlocal_index, Register tmp) {
128
  const Register Rlocal_base = tmp;
129
  assert_different_registers(Rlocal_index, tmp);
130

131
  get_local_base_addr(Rlocal_base, Rlocal_index);
132
  __ str(R0_tos_lo, Address(Rlocal_base, Interpreter::local_offset_in_bytes(1)));
133
  __ str(R1_tos_hi, Address(Rlocal_base, Interpreter::local_offset_in_bytes(0)));
134
}
135

136
// Returns address of Java array element using temp register as address base.
137
Address TemplateTable::get_array_elem_addr(BasicType elemType, Register array, Register index, Register temp) {
138
  int logElemSize = exact_log2(type2aelembytes(elemType));
139
  __ add_ptr_scaled_int32(temp, array, index, logElemSize);
140
  return Address(temp, arrayOopDesc::base_offset_in_bytes(elemType));
141
}
142

143
// Returns address of Java array element using temp register as offset from array base
144
Address TemplateTable::get_array_elem_addr_same_base(BasicType elemType, Register array, Register index, Register temp) {
145
  int logElemSize = exact_log2(type2aelembytes(elemType));
146
  if (logElemSize == 0) {
147
    __ add(temp, index, arrayOopDesc::base_offset_in_bytes(elemType));
148
  } else {
149
    __ mov(temp, arrayOopDesc::base_offset_in_bytes(elemType));
150
    __ add_ptr_scaled_int32(temp, temp, index, logElemSize);
151
  }
152
  return Address(array, temp);
153
}
154

155
//----------------------------------------------------------------------------------------------------
156
// Condition conversion
157
AsmCondition convNegCond(TemplateTable::Condition cc) {
158
  switch (cc) {
159
    case TemplateTable::equal        : return ne;
160
    case TemplateTable::not_equal    : return eq;
161
    case TemplateTable::less         : return ge;
162
    case TemplateTable::less_equal   : return gt;
163
    case TemplateTable::greater      : return le;
164
    case TemplateTable::greater_equal: return lt;
165
  }
166
  ShouldNotReachHere();
167
  return nv;
168
}
169

170
//----------------------------------------------------------------------------------------------------
171
// Miscellaneous helper routines
172

173
// Store an oop (or null) at the address described by obj.
174
// Blows all volatile registers R0-R3, Rtemp, LR).
175
// Also destroys new_val and obj.base().
176
static void do_oop_store(InterpreterMacroAssembler* _masm,
177
                         Address obj,
178
                         Register new_val,
179
                         Register tmp1,
180
                         Register tmp2,
181
                         Register tmp3,
182
                         bool is_null,
183
                         DecoratorSet decorators = 0) {
184

185
  assert_different_registers(obj.base(), new_val, tmp1, tmp2, tmp3, noreg);
186
  if (is_null) {
187
    __ store_heap_oop_null(obj, new_val, tmp1, tmp2, tmp3, decorators);
188
  } else {
189
    __ store_heap_oop(obj, new_val, tmp1, tmp2, tmp3, decorators);
190
  }
191
}
192

193
static void do_oop_load(InterpreterMacroAssembler* _masm,
194
                        Register dst,
195
                        Address obj,
196
                        DecoratorSet decorators = 0) {
197
  __ load_heap_oop(dst, obj, noreg, noreg, noreg, decorators);
198
}
199

200
Address TemplateTable::at_bcp(int offset) {
201
  assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
202
  return Address(Rbcp, offset);
203
}
204

205

206
// Blows volatile registers R0-R3, Rtemp, LR.
207
void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
208
                                   Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
209
                                   int byte_no) {
210
  assert_different_registers(bc_reg, temp_reg);
211
  if (!RewriteBytecodes)  return;
212
  Label L_patch_done;
213

214
  switch (bc) {
215
  case Bytecodes::_fast_aputfield:
216
  case Bytecodes::_fast_bputfield:
217
  case Bytecodes::_fast_zputfield:
218
  case Bytecodes::_fast_cputfield:
219
  case Bytecodes::_fast_dputfield:
220
  case Bytecodes::_fast_fputfield:
221
  case Bytecodes::_fast_iputfield:
222
  case Bytecodes::_fast_lputfield:
223
  case Bytecodes::_fast_sputfield:
224
    {
225
      // We skip bytecode quickening for putfield instructions when
226
      // the put_code written to the constant pool cache is zero.
227
      // This is required so that every execution of this instruction
228
      // calls out to InterpreterRuntime::resolve_get_put to do
229
      // additional, required work.
230
      assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
231
      assert(load_bc_into_bc_reg, "we use bc_reg as temp");
232
      __ load_field_entry(temp_reg, bc_reg);
233
      if (byte_no == f1_byte) {
234
        __ add(temp_reg, temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset()));
235
      } else {
236
        __ add(temp_reg, temp_reg, in_bytes(ResolvedFieldEntry::put_code_offset()));
237
      }
238
      // Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
239
      __ ldrb(temp_reg, temp_reg);
240
      __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
241

242
      __ mov(bc_reg, bc);
243
      __ cbz(temp_reg, L_patch_done);  // test if bytecode is zero
244
    }
245
    break;
246
  default:
247
    assert(byte_no == -1, "sanity");
248
    // the pair bytecodes have already done the load.
249
    if (load_bc_into_bc_reg) {
250
      __ mov(bc_reg, bc);
251
    }
252
  }
253

254
  if (__ can_post_breakpoint()) {
255
    Label L_fast_patch;
256
    // if a breakpoint is present we can't rewrite the stream directly
257
    __ ldrb(temp_reg, at_bcp(0));
258
    __ cmp(temp_reg, Bytecodes::_breakpoint);
259
    __ b(L_fast_patch, ne);
260
    if (bc_reg != R3) {
261
      __ mov(R3, bc_reg);
262
    }
263
    __ mov(R1, Rmethod);
264
    __ mov(R2, Rbcp);
265
    // Let breakpoint table handling rewrite to quicker bytecode
266
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), R1, R2, R3);
267
    __ b(L_patch_done);
268
    __ bind(L_fast_patch);
269
  }
270

271
#ifdef ASSERT
272
  Label L_okay;
273
  __ ldrb(temp_reg, at_bcp(0));
274
  __ cmp(temp_reg, (int)Bytecodes::java_code(bc));
275
  __ b(L_okay, eq);
276
  __ cmp(temp_reg, bc_reg);
277
  __ b(L_okay, eq);
278
  __ stop("patching the wrong bytecode");
279
  __ bind(L_okay);
280
#endif
281

282
  // patch bytecode
283
  __ strb(bc_reg, at_bcp(0));
284
  __ bind(L_patch_done);
285
}
286

287
//----------------------------------------------------------------------------------------------------
288
// Individual instructions
289

290
void TemplateTable::nop() {
291
  transition(vtos, vtos);
292
  // nothing to do
293
}
294

295
void TemplateTable::shouldnotreachhere() {
296
  transition(vtos, vtos);
297
  __ stop("shouldnotreachhere bytecode");
298
}
299

300

301

302
void TemplateTable::aconst_null() {
303
  transition(vtos, atos);
304
  __ mov(R0_tos, 0);
305
}
306

307

308
void TemplateTable::iconst(int value) {
309
  transition(vtos, itos);
310
  __ mov_slow(R0_tos, value);
311
}
312

313

314
void TemplateTable::lconst(int value) {
315
  transition(vtos, ltos);
316
  assert((value == 0) || (value == 1), "unexpected long constant");
317
  __ mov(R0_tos, value);
318
  __ mov(R1_tos_hi, 0);
319
}
320

321

322
void TemplateTable::fconst(int value) {
323
  transition(vtos, ftos);
324
  const int zero = 0;         // 0.0f
325
  const int one = 0x3f800000; // 1.0f
326
  const int two = 0x40000000; // 2.0f
327

328
  switch(value) {
329
  case 0:   __ mov(R0_tos, zero);   break;
330
  case 1:   __ mov(R0_tos, one);    break;
331
  case 2:   __ mov(R0_tos, two);    break;
332
  default:  ShouldNotReachHere();   break;
333
  }
334

335
#ifndef __SOFTFP__
336
  __ fmsr(S0_tos, R0_tos);
337
#endif // !__SOFTFP__
338
}
339

340

341
void TemplateTable::dconst(int value) {
342
  transition(vtos, dtos);
343
  const int one_lo = 0;            // low part of 1.0
344
  const int one_hi = 0x3ff00000;   // high part of 1.0
345

346
  if (value == 0) {
347
#ifdef __SOFTFP__
348
    __ mov(R0_tos_lo, 0);
349
    __ mov(R1_tos_hi, 0);
350
#else
351
    __ mov(R0_tmp, 0);
352
    __ fmdrr(D0_tos, R0_tmp, R0_tmp);
353
#endif // __SOFTFP__
354
  } else if (value == 1) {
355
    __ mov(R0_tos_lo, one_lo);
356
    __ mov_slow(R1_tos_hi, one_hi);
357
#ifndef __SOFTFP__
358
    __ fmdrr(D0_tos, R0_tos_lo, R1_tos_hi);
359
#endif // !__SOFTFP__
360
  } else {
361
    ShouldNotReachHere();
362
  }
363
}
364

365

366
void TemplateTable::bipush() {
367
  transition(vtos, itos);
368
  __ ldrsb(R0_tos, at_bcp(1));
369
}
370

371

372
void TemplateTable::sipush() {
373
  transition(vtos, itos);
374
  __ ldrsb(R0_tmp, at_bcp(1));
375
  __ ldrb(R1_tmp, at_bcp(2));
376
  __ orr(R0_tos, R1_tmp, AsmOperand(R0_tmp, lsl, BitsPerByte));
377
}
378

379

380
void TemplateTable::ldc(LdcType type) {
381
  transition(vtos, vtos);
382
  Label fastCase, Condy, Done;
383

384
  const Register Rindex = R1_tmp;
385
  const Register Rcpool = R2_tmp;
386
  const Register Rtags  = R3_tmp;
387
  const Register RtagType = R3_tmp;
388

389
  if (is_ldc_wide(type)) {
390
    __ get_unsigned_2_byte_index_at_bcp(Rindex, 1);
391
  } else {
392
    __ ldrb(Rindex, at_bcp(1));
393
  }
394
  __ get_cpool_and_tags(Rcpool, Rtags);
395

396
  const int base_offset = ConstantPool::header_size() * wordSize;
397
  const int tags_offset = Array<u1>::base_offset_in_bytes();
398

399
  // get const type
400
  __ add(Rtemp, Rtags, tags_offset);
401
  __ ldrb(RtagType, Address(Rtemp, Rindex));
402
  volatile_barrier(MacroAssembler::LoadLoad, Rtemp);
403

404
  // unresolved class - get the resolved class
405
  __ cmp(RtagType, JVM_CONSTANT_UnresolvedClass);
406

407
  // unresolved class in error (resolution failed) - call into runtime
408
  // so that the same error from first resolution attempt is thrown.
409
  __ cond_cmp(RtagType, JVM_CONSTANT_UnresolvedClassInError, ne);
410

411
  // resolved class - need to call vm to get java mirror of the class
412
  __ cond_cmp(RtagType, JVM_CONSTANT_Class, ne);
413

414
  __ b(fastCase, ne);
415

416
  // slow case - call runtime
417
  __ mov(R1, is_ldc_wide(type) ? 1 : 0);
418
  call_VM(R0_tos, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), R1);
419
  __ push(atos);
420
  __ b(Done);
421

422
  // int, float, String
423
  __ bind(fastCase);
424

425
  __ cmp(RtagType, JVM_CONSTANT_Integer);
426
  __ cond_cmp(RtagType, JVM_CONSTANT_Float, ne);
427
  __ b(Condy, ne);
428

429
  // itos, ftos
430
  __ add(Rtemp, Rcpool, AsmOperand(Rindex, lsl, LogBytesPerWord));
431
  __ ldr_u32(R0_tos, Address(Rtemp, base_offset));
432

433
  // floats and ints are placed on stack in the same way, so
434
  // we can use push(itos) to transfer float value without VFP
435
  __ push(itos);
436
  __ b(Done);
437

438
  __ bind(Condy);
439
  condy_helper(Done);
440

441
  __ bind(Done);
442
}
443

444
// Fast path for caching oop constants.
445
void TemplateTable::fast_aldc(LdcType type) {
446
  transition(vtos, atos);
447
  int index_size = is_ldc_wide(type) ? sizeof(u2) : sizeof(u1);
448
  Label resolved;
449

450
  // We are resolved if the resolved reference cache entry contains a
451
  // non-null object (CallSite, etc.)
452
  assert_different_registers(R0_tos, R2_tmp);
453
  __ get_index_at_bcp(R2_tmp, 1, R0_tos, index_size);
454
  __ load_resolved_reference_at_index(R0_tos, R2_tmp);
455
  __ cbnz(R0_tos, resolved);
456

457
  address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
458

459
  // first time invocation - must resolve first
460
  __ mov(R1, (int)bytecode());
461
  __ call_VM(R0_tos, entry, R1);
462
  __ bind(resolved);
463

464
  { // Check for the null sentinel.
465
    // If we just called the VM, that already did the mapping for us,
466
    // but it's harmless to retry.
467
    Label notNull;
468
    Register result = R0;
469
    Register tmp = R1;
470
    Register rarg = R2;
471

472
    // Stash null_sentinel address to get its value later
473
    __ mov_slow(rarg, (uintptr_t)Universe::the_null_sentinel_addr());
474
    __ ldr(tmp, Address(rarg));
475
    __ resolve_oop_handle(tmp);
476
    __ cmp(result, tmp);
477
    __ b(notNull, ne);
478
    __ mov(result, 0);  // null object reference
479
    __ bind(notNull);
480
  }
481

482
  if (VerifyOops) {
483
    __ verify_oop(R0_tos);
484
  }
485
}
486

487
void TemplateTable::ldc2_w() {
488
  transition(vtos, vtos);
489
  const Register Rtags  = R2_tmp;
490
  const Register Rindex = R3_tmp;
491
  const Register Rcpool = R4_tmp;
492
  const Register Rbase  = R5_tmp;
493

494
  __ get_unsigned_2_byte_index_at_bcp(Rindex, 1);
495

496
  __ get_cpool_and_tags(Rcpool, Rtags);
497
  const int base_offset = ConstantPool::header_size() * wordSize;
498
  const int tags_offset = Array<u1>::base_offset_in_bytes();
499

500
  __ add(Rbase, Rcpool, AsmOperand(Rindex, lsl, LogBytesPerWord));
501

502
  // get type from tags
503
  __ add(Rtemp, Rtags, tags_offset);
504
  __ ldrb(Rtemp, Address(Rtemp, Rindex));
505

506
  Label Done, NotLong, NotDouble;
507
  __ cmp(Rtemp, JVM_CONSTANT_Double);
508
  __ b(NotDouble, ne);
509
#ifdef __SOFTFP__
510
  __ ldr(R0_tos_lo, Address(Rbase, base_offset + 0 * wordSize));
511
  __ ldr(R1_tos_hi, Address(Rbase, base_offset + 1 * wordSize));
512
#else // !__SOFTFP__
513
  __ ldr_double(D0_tos, Address(Rbase, base_offset));
514
#endif // __SOFTFP__
515
  __ push(dtos);
516
  __ b(Done);
517
  __ bind(NotDouble);
518

519
  __ cmp(Rtemp, JVM_CONSTANT_Long);
520
  __ b(NotLong, ne);
521
  __ ldr(R0_tos_lo, Address(Rbase, base_offset + 0 * wordSize));
522
  __ ldr(R1_tos_hi, Address(Rbase, base_offset + 1 * wordSize));
523
  __ push(ltos);
524
  __ b(Done);
525
  __ bind(NotLong);
526

527
  condy_helper(Done);
528

529
  __ bind(Done);
530
}
531

532

533
void TemplateTable::condy_helper(Label& Done)
534
{
535
  Register obj   = R0_tmp;
536
  Register rtmp  = R1_tmp;
537
  Register flags = R2_tmp;
538
  Register off   = R3_tmp;
539

540
  __ mov(rtmp, (int) bytecode());
541
  __ call_VM(obj, CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc), rtmp);
542
  __ get_vm_result_2(flags, rtmp);
543

544
  // VMr = obj = base address to find primitive value to push
545
  // VMr2 = flags = (tos, off) using format of CPCE::_flags
546
  __ mov(off, flags);
547

548
  __ logical_shift_left( off, off, 32 - ConstantPoolCache::field_index_bits);
549
  __ logical_shift_right(off, off, 32 - ConstantPoolCache::field_index_bits);
550

551
  const Address field(obj, off);
552

553
  __ logical_shift_right(flags, flags, ConstantPoolCache::tos_state_shift);
554

555
  switch (bytecode()) {
556
    case Bytecodes::_ldc:
557
    case Bytecodes::_ldc_w:
558
      {
559
        // tos in (itos, ftos, stos, btos, ctos, ztos)
560
        Label notIntFloat, notShort, notByte, notChar, notBool;
561
        __ cmp(flags, itos);
562
        __ cond_cmp(flags, ftos, ne);
563
        __ b(notIntFloat, ne);
564
        __ ldr(R0_tos, field);
565
        __ push(itos);
566
        __ b(Done);
567

568
        __ bind(notIntFloat);
569
        __ cmp(flags, stos);
570
        __ b(notShort, ne);
571
        __ ldrsh(R0_tos, field);
572
        __ push(stos);
573
        __ b(Done);
574

575
        __ bind(notShort);
576
        __ cmp(flags, btos);
577
        __ b(notByte, ne);
578
        __ ldrsb(R0_tos, field);
579
        __ push(btos);
580
        __ b(Done);
581

582
        __ bind(notByte);
583
        __ cmp(flags, ctos);
584
        __ b(notChar, ne);
585
        __ ldrh(R0_tos, field);
586
        __ push(ctos);
587
        __ b(Done);
588

589
        __ bind(notChar);
590
        __ cmp(flags, ztos);
591
        __ b(notBool, ne);
592
        __ ldrsb(R0_tos, field);
593
        __ push(ztos);
594
        __ b(Done);
595

596
        __ bind(notBool);
597
        break;
598
      }
599

600
    case Bytecodes::_ldc2_w:
601
      {
602
        Label notLongDouble;
603
        __ cmp(flags, ltos);
604
        __ cond_cmp(flags, dtos, ne);
605
        __ b(notLongDouble, ne);
606

607
        __ add(rtmp, obj, wordSize);
608
        __ ldr(R0_tos_lo, Address(obj, off));
609
        __ ldr(R1_tos_hi, Address(rtmp, off));
610
        __ push(ltos);
611
        __ b(Done);
612

613
        __ bind(notLongDouble);
614

615
        break;
616
      }
617

618
    default:
619
      ShouldNotReachHere();
620
    }
621

622
    __ stop("bad ldc/condy");
623
}
624

625

626
void TemplateTable::locals_index(Register reg, int offset) {
627
  __ ldrb(reg, at_bcp(offset));
628
}
629

630
void TemplateTable::iload() {
631
  iload_internal();
632
}
633

634
void TemplateTable::nofast_iload() {
635
  iload_internal(may_not_rewrite);
636
}
637

638
void TemplateTable::iload_internal(RewriteControl rc) {
639
  transition(vtos, itos);
640

641
  if ((rc == may_rewrite) && __ rewrite_frequent_pairs()) {
642
    Label rewrite, done;
643
    const Register next_bytecode = R1_tmp;
644
    const Register target_bytecode = R2_tmp;
645

646
    // get next byte
647
    __ ldrb(next_bytecode, at_bcp(Bytecodes::length_for(Bytecodes::_iload)));
648
    // if _iload, wait to rewrite to iload2.  We only want to rewrite the
649
    // last two iloads in a pair.  Comparing against fast_iload means that
650
    // the next bytecode is neither an iload or a caload, and therefore
651
    // an iload pair.
652
    __ cmp(next_bytecode, Bytecodes::_iload);
653
    __ b(done, eq);
654

655
    __ cmp(next_bytecode, Bytecodes::_fast_iload);
656
    __ mov(target_bytecode, Bytecodes::_fast_iload2);
657
    __ b(rewrite, eq);
658

659
    // if _caload, rewrite to fast_icaload
660
    __ cmp(next_bytecode, Bytecodes::_caload);
661
    __ mov(target_bytecode, Bytecodes::_fast_icaload);
662
    __ b(rewrite, eq);
663

664
    // rewrite so iload doesn't check again.
665
    __ mov(target_bytecode, Bytecodes::_fast_iload);
666

667
    // rewrite
668
    // R2: fast bytecode
669
    __ bind(rewrite);
670
    patch_bytecode(Bytecodes::_iload, target_bytecode, Rtemp, false);
671
    __ bind(done);
672
  }
673

674
  // Get the local value into tos
675
  const Register Rlocal_index = R1_tmp;
676
  locals_index(Rlocal_index);
677
  Address local = load_iaddress(Rlocal_index, Rtemp);
678
  __ ldr_s32(R0_tos, local);
679
}
680

681

682
void TemplateTable::fast_iload2() {
683
  transition(vtos, itos);
684
  const Register Rlocal_index = R1_tmp;
685

686
  locals_index(Rlocal_index);
687
  Address local = load_iaddress(Rlocal_index, Rtemp);
688
  __ ldr_s32(R0_tos, local);
689
  __ push(itos);
690

691
  locals_index(Rlocal_index, 3);
692
  local = load_iaddress(Rlocal_index, Rtemp);
693
  __ ldr_s32(R0_tos, local);
694
}
695

696
void TemplateTable::fast_iload() {
697
  transition(vtos, itos);
698
  const Register Rlocal_index = R1_tmp;
699

700
  locals_index(Rlocal_index);
701
  Address local = load_iaddress(Rlocal_index, Rtemp);
702
  __ ldr_s32(R0_tos, local);
703
}
704

705

706
void TemplateTable::lload() {
707
  transition(vtos, ltos);
708
  const Register Rlocal_index = R2_tmp;
709

710
  locals_index(Rlocal_index);
711
  load_category2_local(Rlocal_index, R3_tmp);
712
}
713

714

715
void TemplateTable::fload() {
716
  transition(vtos, ftos);
717
  const Register Rlocal_index = R2_tmp;
718

719
  // Get the local value into tos
720
  locals_index(Rlocal_index);
721
  Address local = load_faddress(Rlocal_index, Rtemp);
722
#ifdef __SOFTFP__
723
  __ ldr(R0_tos, local);
724
#else
725
  __ ldr_float(S0_tos, local);
726
#endif // __SOFTFP__
727
}
728

729

730
void TemplateTable::dload() {
731
  transition(vtos, dtos);
732
  const Register Rlocal_index = R2_tmp;
733

734
  locals_index(Rlocal_index);
735

736
#ifdef __SOFTFP__
737
  load_category2_local(Rlocal_index, R3_tmp);
738
#else
739
  __ ldr_double(D0_tos, load_daddress(Rlocal_index, Rtemp));
740
#endif // __SOFTFP__
741
}
742

743

744
void TemplateTable::aload() {
745
  transition(vtos, atos);
746
  const Register Rlocal_index = R1_tmp;
747

748
  locals_index(Rlocal_index);
749
  Address local = load_aaddress(Rlocal_index, Rtemp);
750
  __ ldr(R0_tos, local);
751
}
752

753

754
void TemplateTable::locals_index_wide(Register reg) {
755
  assert_different_registers(reg, Rtemp);
756
  __ ldrb(Rtemp, at_bcp(2));
757
  __ ldrb(reg, at_bcp(3));
758
  __ orr(reg, reg, AsmOperand(Rtemp, lsl, 8));
759
}
760

761

762
void TemplateTable::wide_iload() {
763
  transition(vtos, itos);
764
  const Register Rlocal_index = R2_tmp;
765

766
  locals_index_wide(Rlocal_index);
767
  Address local = load_iaddress(Rlocal_index, Rtemp);
768
  __ ldr_s32(R0_tos, local);
769
}
770

771

772
void TemplateTable::wide_lload() {
773
  transition(vtos, ltos);
774
  const Register Rlocal_index = R2_tmp;
775
  const Register Rlocal_base = R3_tmp;
776

777
  locals_index_wide(Rlocal_index);
778
  load_category2_local(Rlocal_index, R3_tmp);
779
}
780

781

782
void TemplateTable::wide_fload() {
783
  transition(vtos, ftos);
784
  const Register Rlocal_index = R2_tmp;
785

786
  locals_index_wide(Rlocal_index);
787
  Address local = load_faddress(Rlocal_index, Rtemp);
788
#ifdef __SOFTFP__
789
  __ ldr(R0_tos, local);
790
#else
791
  __ ldr_float(S0_tos, local);
792
#endif // __SOFTFP__
793
}
794

795

796
void TemplateTable::wide_dload() {
797
  transition(vtos, dtos);
798
  const Register Rlocal_index = R2_tmp;
799

800
  locals_index_wide(Rlocal_index);
801
#ifdef __SOFTFP__
802
  load_category2_local(Rlocal_index, R3_tmp);
803
#else
804
  __ ldr_double(D0_tos, load_daddress(Rlocal_index, Rtemp));
805
#endif // __SOFTFP__
806
}
807

808

809
void TemplateTable::wide_aload() {
810
  transition(vtos, atos);
811
  const Register Rlocal_index = R2_tmp;
812

813
  locals_index_wide(Rlocal_index);
814
  Address local = load_aaddress(Rlocal_index, Rtemp);
815
  __ ldr(R0_tos, local);
816
}
817

818
void TemplateTable::index_check(Register array, Register index) {
819
  // Pop ptr into array
820
  __ pop_ptr(array);
821
  index_check_without_pop(array, index);
822
}
823

824
void TemplateTable::index_check_without_pop(Register array, Register index) {
825
  assert_different_registers(array, index, Rtemp);
826
  // check index
827
  __ ldr_s32(Rtemp, Address(array, arrayOopDesc::length_offset_in_bytes()));
828
  __ cmp_32(index, Rtemp);
829
  if (index != R4_ArrayIndexOutOfBounds_index) {
830
    // convention with generate_ArrayIndexOutOfBounds_handler()
831
    __ mov(R4_ArrayIndexOutOfBounds_index, index, hs);
832
  }
833
  __ mov(R1, array, hs);
834
  __ b(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry, hs);
835
}
836

837

838
void TemplateTable::iaload() {
839
  transition(itos, itos);
840
  const Register Rarray = R1_tmp;
841
  const Register Rindex = R0_tos;
842

843
  index_check(Rarray, Rindex);
844
  Address addr = get_array_elem_addr_same_base(T_INT, Rarray, Rindex, Rtemp);
845
  __ access_load_at(T_INT, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg);
846
}
847

848

849
void TemplateTable::laload() {
850
  transition(itos, ltos);
851
  const Register Rarray = R1_tmp;
852
  const Register Rindex = R0_tos;
853

854
  index_check(Rarray, Rindex);
855

856
  Address addr = get_array_elem_addr_same_base(T_LONG, Rarray, Rindex, Rtemp);
857
  __ access_load_at(T_LONG, IN_HEAP | IS_ARRAY, addr, noreg /* ltos */, noreg, noreg, noreg);
858
}
859

860

861
void TemplateTable::faload() {
862
  transition(itos, ftos);
863
  const Register Rarray = R1_tmp;
864
  const Register Rindex = R0_tos;
865

866
  index_check(Rarray, Rindex);
867

868
  Address addr = get_array_elem_addr_same_base(T_FLOAT, Rarray, Rindex, Rtemp);
869
  __ access_load_at(T_FLOAT, IN_HEAP | IS_ARRAY, addr, noreg /* ftos */, noreg, noreg, noreg);
870
}
871

872

873
void TemplateTable::daload() {
874
  transition(itos, dtos);
875
  const Register Rarray = R1_tmp;
876
  const Register Rindex = R0_tos;
877

878
  index_check(Rarray, Rindex);
879

880
  Address addr = get_array_elem_addr_same_base(T_DOUBLE, Rarray, Rindex, Rtemp);
881
  __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, addr, noreg /* dtos */, noreg, noreg, noreg);
882
}
883

884

885
void TemplateTable::aaload() {
886
  transition(itos, atos);
887
  const Register Rarray = R1_tmp;
888
  const Register Rindex = R0_tos;
889

890
  index_check(Rarray, Rindex);
891
  do_oop_load(_masm, R0_tos, get_array_elem_addr_same_base(T_OBJECT, Rarray, Rindex, Rtemp), IS_ARRAY);
892
}
893

894

895
void TemplateTable::baload() {
896
  transition(itos, itos);
897
  const Register Rarray = R1_tmp;
898
  const Register Rindex = R0_tos;
899

900
  index_check(Rarray, Rindex);
901
  Address addr = get_array_elem_addr_same_base(T_BYTE, Rarray, Rindex, Rtemp);
902
  __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg);
903
}
904

905

906
void TemplateTable::caload() {
907
  transition(itos, itos);
908
  const Register Rarray = R1_tmp;
909
  const Register Rindex = R0_tos;
910

911
  index_check(Rarray, Rindex);
912
  Address addr = get_array_elem_addr_same_base(T_CHAR, Rarray, Rindex, Rtemp);
913
  __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg);
914
}
915

916

917
// iload followed by caload frequent pair
918
void TemplateTable::fast_icaload() {
919
  transition(vtos, itos);
920
  const Register Rlocal_index = R1_tmp;
921
  const Register Rarray = R1_tmp;
922
  const Register Rindex = R4_tmp; // index_check prefers index on R4
923
  assert_different_registers(Rlocal_index, Rindex);
924
  assert_different_registers(Rarray, Rindex);
925

926
  // load index out of locals
927
  locals_index(Rlocal_index);
928
  Address local = load_iaddress(Rlocal_index, Rtemp);
929
  __ ldr_s32(Rindex, local);
930

931
  // get array element
932
  index_check(Rarray, Rindex);
933
  Address addr = get_array_elem_addr_same_base(T_CHAR, Rarray, Rindex, Rtemp);
934
  __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg);
935
}
936

937

938
void TemplateTable::saload() {
939
  transition(itos, itos);
940
  const Register Rarray = R1_tmp;
941
  const Register Rindex = R0_tos;
942

943
  index_check(Rarray, Rindex);
944
  Address addr = get_array_elem_addr_same_base(T_SHORT, Rarray, Rindex, Rtemp);
945
  __ access_load_at(T_SHORT, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg);
946
}
947

948

949
void TemplateTable::iload(int n) {
950
  transition(vtos, itos);
951
  __ ldr_s32(R0_tos, iaddress(n));
952
}
953

954

955
void TemplateTable::lload(int n) {
956
  transition(vtos, ltos);
957
  __ ldr(R0_tos_lo, laddress(n));
958
  __ ldr(R1_tos_hi, haddress(n));
959
}
960

961

962
void TemplateTable::fload(int n) {
963
  transition(vtos, ftos);
964
#ifdef __SOFTFP__
965
  __ ldr(R0_tos, faddress(n));
966
#else
967
  __ ldr_float(S0_tos, faddress(n));
968
#endif // __SOFTFP__
969
}
970

971

972
void TemplateTable::dload(int n) {
973
  transition(vtos, dtos);
974
#ifdef __SOFTFP__
975
  __ ldr(R0_tos_lo, laddress(n));
976
  __ ldr(R1_tos_hi, haddress(n));
977
#else
978
  __ ldr_double(D0_tos, daddress(n));
979
#endif // __SOFTFP__
980
}
981

982

983
void TemplateTable::aload(int n) {
984
  transition(vtos, atos);
985
  __ ldr(R0_tos, aaddress(n));
986
}
987

988
void TemplateTable::aload_0() {
989
  aload_0_internal();
990
}
991

992
void TemplateTable::nofast_aload_0() {
993
  aload_0_internal(may_not_rewrite);
994
}
995

996
void TemplateTable::aload_0_internal(RewriteControl rc) {
997
  transition(vtos, atos);
998
  // According to bytecode histograms, the pairs:
999
  //
1000
  // _aload_0, _fast_igetfield
1001
  // _aload_0, _fast_agetfield
1002
  // _aload_0, _fast_fgetfield
1003
  //
1004
  // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0
1005
  // bytecode checks if the next bytecode is either _fast_igetfield,
1006
  // _fast_agetfield or _fast_fgetfield and then rewrites the
1007
  // current bytecode into a pair bytecode; otherwise it rewrites the current
1008
  // bytecode into _fast_aload_0 that doesn't do the pair check anymore.
1009
  //
1010
  // Note: If the next bytecode is _getfield, the rewrite must be delayed,
1011
  //       otherwise we may miss an opportunity for a pair.
1012
  //
1013
  // Also rewrite frequent pairs
1014
  //   aload_0, aload_1
1015
  //   aload_0, iload_1
1016
  // These bytecodes with a small amount of code are most profitable to rewrite
1017
  if ((rc == may_rewrite) && __ rewrite_frequent_pairs()) {
1018
    Label rewrite, done;
1019
    const Register next_bytecode = R1_tmp;
1020
    const Register target_bytecode = R2_tmp;
1021

1022
    // get next byte
1023
    __ ldrb(next_bytecode, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
1024

1025
    // if _getfield then wait with rewrite
1026
    __ cmp(next_bytecode, Bytecodes::_getfield);
1027
    __ b(done, eq);
1028

1029
    // if _igetfield then rewrite to _fast_iaccess_0
1030
    assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
1031
    __ cmp(next_bytecode, Bytecodes::_fast_igetfield);
1032
    __ mov(target_bytecode, Bytecodes::_fast_iaccess_0);
1033
    __ b(rewrite, eq);
1034

1035
    // if _agetfield then rewrite to _fast_aaccess_0
1036
    assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
1037
    __ cmp(next_bytecode, Bytecodes::_fast_agetfield);
1038
    __ mov(target_bytecode, Bytecodes::_fast_aaccess_0);
1039
    __ b(rewrite, eq);
1040

1041
    // if _fgetfield then rewrite to _fast_faccess_0, else rewrite to _fast_aload0
1042
    assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
1043
    assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "fix bytecode definition");
1044

1045
    __ cmp(next_bytecode, Bytecodes::_fast_fgetfield);
1046
    __ mov(target_bytecode, Bytecodes::_fast_faccess_0, eq);
1047
    __ mov(target_bytecode, Bytecodes::_fast_aload_0, ne);
1048

1049
    // rewrite
1050
    __ bind(rewrite);
1051
    patch_bytecode(Bytecodes::_aload_0, target_bytecode, Rtemp, false);
1052

1053
    __ bind(done);
1054
  }
1055

1056
  aload(0);
1057
}
1058

1059
void TemplateTable::istore() {
1060
  transition(itos, vtos);
1061
  const Register Rlocal_index = R2_tmp;
1062

1063
  locals_index(Rlocal_index);
1064
  Address local = load_iaddress(Rlocal_index, Rtemp);
1065
  __ str_32(R0_tos, local);
1066
}
1067

1068

1069
void TemplateTable::lstore() {
1070
  transition(ltos, vtos);
1071
  const Register Rlocal_index = R2_tmp;
1072

1073
  locals_index(Rlocal_index);
1074
  store_category2_local(Rlocal_index, R3_tmp);
1075
}
1076

1077

1078
void TemplateTable::fstore() {
1079
  transition(ftos, vtos);
1080
  const Register Rlocal_index = R2_tmp;
1081

1082
  locals_index(Rlocal_index);
1083
  Address local = load_faddress(Rlocal_index, Rtemp);
1084
#ifdef __SOFTFP__
1085
  __ str(R0_tos, local);
1086
#else
1087
  __ str_float(S0_tos, local);
1088
#endif // __SOFTFP__
1089
}
1090

1091

1092
void TemplateTable::dstore() {
1093
  transition(dtos, vtos);
1094
  const Register Rlocal_index = R2_tmp;
1095

1096
  locals_index(Rlocal_index);
1097

1098
#ifdef __SOFTFP__
1099
  store_category2_local(Rlocal_index, R3_tmp);
1100
#else
1101
  __ str_double(D0_tos, load_daddress(Rlocal_index, Rtemp));
1102
#endif // __SOFTFP__
1103
}
1104

1105

1106
void TemplateTable::astore() {
1107
  transition(vtos, vtos);
1108
  const Register Rlocal_index = R1_tmp;
1109

1110
  __ pop_ptr(R0_tos);
1111
  locals_index(Rlocal_index);
1112
  Address local = load_aaddress(Rlocal_index, Rtemp);
1113
  __ str(R0_tos, local);
1114
}
1115

1116

1117
void TemplateTable::wide_istore() {
1118
  transition(vtos, vtos);
1119
  const Register Rlocal_index = R2_tmp;
1120

1121
  __ pop_i(R0_tos);
1122
  locals_index_wide(Rlocal_index);
1123
  Address local = load_iaddress(Rlocal_index, Rtemp);
1124
  __ str_32(R0_tos, local);
1125
}
1126

1127

1128
void TemplateTable::wide_lstore() {
1129
  transition(vtos, vtos);
1130
  const Register Rlocal_index = R2_tmp;
1131
  const Register Rlocal_base = R3_tmp;
1132

1133
  __ pop_l(R0_tos_lo, R1_tos_hi);
1134

1135
  locals_index_wide(Rlocal_index);
1136
  store_category2_local(Rlocal_index, R3_tmp);
1137
}
1138

1139

1140
void TemplateTable::wide_fstore() {
1141
  wide_istore();
1142
}
1143

1144

1145
void TemplateTable::wide_dstore() {
1146
  wide_lstore();
1147
}
1148

1149

1150
void TemplateTable::wide_astore() {
1151
  transition(vtos, vtos);
1152
  const Register Rlocal_index = R2_tmp;
1153

1154
  __ pop_ptr(R0_tos);
1155
  locals_index_wide(Rlocal_index);
1156
  Address local = load_aaddress(Rlocal_index, Rtemp);
1157
  __ str(R0_tos, local);
1158
}
1159

1160

1161
void TemplateTable::iastore() {
1162
  transition(itos, vtos);
1163
  const Register Rindex = R4_tmp; // index_check prefers index in R4
1164
  const Register Rarray = R3_tmp;
1165
  // R0_tos: value
1166

1167
  __ pop_i(Rindex);
1168
  index_check(Rarray, Rindex);
1169
  Address addr = get_array_elem_addr_same_base(T_INT, Rarray, Rindex, Rtemp);
1170
  __ access_store_at(T_INT, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg, false);
1171
}
1172

1173

1174
void TemplateTable::lastore() {
1175
  transition(ltos, vtos);
1176
  const Register Rindex = R4_tmp; // index_check prefers index in R4
1177
  const Register Rarray = R3_tmp;
1178
  // R0_tos_lo:R1_tos_hi: value
1179

1180
  __ pop_i(Rindex);
1181
  index_check(Rarray, Rindex);
1182

1183
  Address addr = get_array_elem_addr_same_base(T_LONG, Rarray, Rindex, Rtemp);
1184
  __ access_store_at(T_LONG, IN_HEAP | IS_ARRAY, addr, noreg /* ltos */, noreg, noreg, noreg, false);
1185
}
1186

1187

1188
void TemplateTable::fastore() {
1189
  transition(ftos, vtos);
1190
  const Register Rindex = R4_tmp; // index_check prefers index in R4
1191
  const Register Rarray = R3_tmp;
1192
  // S0_tos/R0_tos: value
1193

1194
  __ pop_i(Rindex);
1195
  index_check(Rarray, Rindex);
1196
  Address addr = get_array_elem_addr_same_base(T_FLOAT, Rarray, Rindex, Rtemp);
1197
  __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY, addr, noreg /* ftos */, noreg, noreg, noreg, false);
1198
}
1199

1200

1201
void TemplateTable::dastore() {
1202
  transition(dtos, vtos);
1203
  const Register Rindex = R4_tmp; // index_check prefers index in R4
1204
  const Register Rarray = R3_tmp;
1205
  // D0_tos / R0_tos_lo:R1_to_hi: value
1206

1207
  __ pop_i(Rindex);
1208
  index_check(Rarray, Rindex);
1209

1210
  Address addr = get_array_elem_addr_same_base(T_DOUBLE, Rarray, Rindex, Rtemp);
1211
  __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, addr, noreg /* dtos */, noreg, noreg, noreg, false);
1212
}
1213

1214

1215
void TemplateTable::aastore() {
1216
  transition(vtos, vtos);
1217
  Label is_null, throw_array_store, done;
1218

1219
  const Register Raddr_1   = R1_tmp;
1220
  const Register Rvalue_2  = R2_tmp;
1221
  const Register Rarray_3  = R3_tmp;
1222
  const Register Rindex_4  = R4_tmp;   // preferred by index_check_without_pop()
1223
  const Register Rsub_5    = R5_tmp;
1224
  const Register Rsuper_LR = LR_tmp;
1225

1226
  // stack: ..., array, index, value
1227
  __ ldr(Rvalue_2, at_tos());     // Value
1228
  __ ldr_s32(Rindex_4, at_tos_p1());  // Index
1229
  __ ldr(Rarray_3, at_tos_p2());  // Array
1230

1231
  index_check_without_pop(Rarray_3, Rindex_4);
1232

1233
  // Compute the array base
1234
  __ add(Raddr_1, Rarray_3, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1235

1236
  // do array store check - check for null value first
1237
  __ cbz(Rvalue_2, is_null);
1238

1239
  // Load subklass
1240
  __ load_klass(Rsub_5, Rvalue_2);
1241
  // Load superklass
1242
  __ load_klass(Rtemp, Rarray_3);
1243
  __ ldr(Rsuper_LR, Address(Rtemp, ObjArrayKlass::element_klass_offset()));
1244

1245
  __ gen_subtype_check(Rsub_5, Rsuper_LR, throw_array_store, R0_tmp, R3_tmp);
1246
  // Come here on success
1247

1248
  // Store value
1249
  __ add(Raddr_1, Raddr_1, AsmOperand(Rindex_4, lsl, LogBytesPerHeapOop));
1250

1251
  // Now store using the appropriate barrier
1252
  do_oop_store(_masm, Raddr_1, Rvalue_2, Rtemp, R0_tmp, R3_tmp, false, IS_ARRAY);
1253
  __ b(done);
1254

1255
  __ bind(throw_array_store);
1256

1257
  // Come here on failure of subtype check
1258
  __ profile_typecheck_failed(R0_tmp);
1259

1260
  // object is at TOS
1261
  __ b(Interpreter::_throw_ArrayStoreException_entry);
1262

1263
  // Have a null in Rvalue_2, store null at array[index].
1264
  __ bind(is_null);
1265
  __ profile_null_seen(R0_tmp);
1266

1267
  // Store a null
1268
  do_oop_store(_masm, Address::indexed_oop(Raddr_1, Rindex_4), Rvalue_2, Rtemp, R0_tmp, R3_tmp, true, IS_ARRAY);
1269

1270
  // Pop stack arguments
1271
  __ bind(done);
1272
  __ add(Rstack_top, Rstack_top, 3 * Interpreter::stackElementSize);
1273
}
1274

1275

1276
void TemplateTable::bastore() {
1277
  transition(itos, vtos);
1278
  const Register Rindex = R4_tmp; // index_check prefers index in R4
1279
  const Register Rarray = R3_tmp;
1280
  // R0_tos: value
1281

1282
  __ pop_i(Rindex);
1283
  index_check(Rarray, Rindex);
1284

1285
  // Need to check whether array is boolean or byte
1286
  // since both types share the bastore bytecode.
1287
  __ load_klass(Rtemp, Rarray);
1288
  __ ldr_u32(Rtemp, Address(Rtemp, Klass::layout_helper_offset()));
1289
  Label L_skip;
1290
  __ tst(Rtemp, Klass::layout_helper_boolean_diffbit());
1291
  __ b(L_skip, eq);
1292
  __ and_32(R0_tos, R0_tos, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1
1293
  __ bind(L_skip);
1294
  Address addr = get_array_elem_addr_same_base(T_BYTE, Rarray, Rindex, Rtemp);
1295
  __ access_store_at(T_BYTE, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg, false);
1296
}
1297

1298

1299
void TemplateTable::castore() {
1300
  transition(itos, vtos);
1301
  const Register Rindex = R4_tmp; // index_check prefers index in R4
1302
  const Register Rarray = R3_tmp;
1303
  // R0_tos: value
1304

1305
  __ pop_i(Rindex);
1306
  index_check(Rarray, Rindex);
1307
  Address addr = get_array_elem_addr_same_base(T_CHAR, Rarray, Rindex, Rtemp);
1308
  __ access_store_at(T_CHAR, IN_HEAP | IS_ARRAY, addr, R0_tos, noreg, noreg, noreg, false);
1309
}
1310

1311

1312
void TemplateTable::sastore() {
1313
  assert(arrayOopDesc::base_offset_in_bytes(T_CHAR) ==
1314
           arrayOopDesc::base_offset_in_bytes(T_SHORT),
1315
         "base offsets for char and short should be equal");
1316
  castore();
1317
}
1318

1319

1320
void TemplateTable::istore(int n) {
1321
  transition(itos, vtos);
1322
  __ str_32(R0_tos, iaddress(n));
1323
}
1324

1325

1326
void TemplateTable::lstore(int n) {
1327
  transition(ltos, vtos);
1328
  __ str(R0_tos_lo, laddress(n));
1329
  __ str(R1_tos_hi, haddress(n));
1330
}
1331

1332

1333
void TemplateTable::fstore(int n) {
1334
  transition(ftos, vtos);
1335
#ifdef __SOFTFP__
1336
  __ str(R0_tos, faddress(n));
1337
#else
1338
  __ str_float(S0_tos, faddress(n));
1339
#endif // __SOFTFP__
1340
}
1341

1342

1343
void TemplateTable::dstore(int n) {
1344
  transition(dtos, vtos);
1345
#ifdef __SOFTFP__
1346
  __ str(R0_tos_lo, laddress(n));
1347
  __ str(R1_tos_hi, haddress(n));
1348
#else
1349
  __ str_double(D0_tos, daddress(n));
1350
#endif // __SOFTFP__
1351
}
1352

1353

1354
void TemplateTable::astore(int n) {
1355
  transition(vtos, vtos);
1356
  __ pop_ptr(R0_tos);
1357
  __ str(R0_tos, aaddress(n));
1358
}
1359

1360

1361
void TemplateTable::pop() {
1362
  transition(vtos, vtos);
1363
  __ add(Rstack_top, Rstack_top, Interpreter::stackElementSize);
1364
}
1365

1366

1367
void TemplateTable::pop2() {
1368
  transition(vtos, vtos);
1369
  __ add(Rstack_top, Rstack_top, 2*Interpreter::stackElementSize);
1370
}
1371

1372

1373
void TemplateTable::dup() {
1374
  transition(vtos, vtos);
1375
  // stack: ..., a
1376
  __ load_ptr(0, R0_tmp);
1377
  __ push_ptr(R0_tmp);
1378
  // stack: ..., a, a
1379
}
1380

1381

1382
void TemplateTable::dup_x1() {
1383
  transition(vtos, vtos);
1384
  // stack: ..., a, b
1385
  __ load_ptr(0, R0_tmp);  // load b
1386
  __ load_ptr(1, R2_tmp);  // load a
1387
  __ store_ptr(1, R0_tmp); // store b
1388
  __ store_ptr(0, R2_tmp); // store a
1389
  __ push_ptr(R0_tmp);     // push b
1390
  // stack: ..., b, a, b
1391
}
1392

1393

1394
void TemplateTable::dup_x2() {
1395
  transition(vtos, vtos);
1396
  // stack: ..., a, b, c
1397
  __ load_ptr(0, R0_tmp);   // load c
1398
  __ load_ptr(1, R2_tmp);   // load b
1399
  __ load_ptr(2, R4_tmp);   // load a
1400

1401
  __ push_ptr(R0_tmp);      // push c
1402

1403
  // stack: ..., a, b, c, c
1404
  __ store_ptr(1, R2_tmp);  // store b
1405
  __ store_ptr(2, R4_tmp);  // store a
1406
  __ store_ptr(3, R0_tmp);  // store c
1407
  // stack: ..., c, a, b, c
1408
}
1409

1410

1411
void TemplateTable::dup2() {
1412
  transition(vtos, vtos);
1413
  // stack: ..., a, b
1414
  __ load_ptr(1, R0_tmp);  // load a
1415
  __ push_ptr(R0_tmp);     // push a
1416
  __ load_ptr(1, R0_tmp);  // load b
1417
  __ push_ptr(R0_tmp);     // push b
1418
  // stack: ..., a, b, a, b
1419
}
1420

1421

1422
void TemplateTable::dup2_x1() {
1423
  transition(vtos, vtos);
1424

1425
  // stack: ..., a, b, c
1426
  __ load_ptr(0, R4_tmp);  // load c
1427
  __ load_ptr(1, R2_tmp);  // load b
1428
  __ load_ptr(2, R0_tmp);  // load a
1429

1430
  __ push_ptr(R2_tmp);     // push b
1431
  __ push_ptr(R4_tmp);     // push c
1432

1433
  // stack: ..., a, b, c, b, c
1434

1435
  __ store_ptr(2, R0_tmp);  // store a
1436
  __ store_ptr(3, R4_tmp);  // store c
1437
  __ store_ptr(4, R2_tmp);  // store b
1438

1439
  // stack: ..., b, c, a, b, c
1440
}
1441

1442

1443
void TemplateTable::dup2_x2() {
1444
  transition(vtos, vtos);
1445
  // stack: ..., a, b, c, d
1446
  __ load_ptr(0, R0_tmp);  // load d
1447
  __ load_ptr(1, R2_tmp);  // load c
1448
  __ push_ptr(R2_tmp);     // push c
1449
  __ push_ptr(R0_tmp);     // push d
1450
  // stack: ..., a, b, c, d, c, d
1451
  __ load_ptr(4, R4_tmp);  // load b
1452
  __ store_ptr(4, R0_tmp); // store d in b
1453
  __ store_ptr(2, R4_tmp); // store b in d
1454
  // stack: ..., a, d, c, b, c, d
1455
  __ load_ptr(5, R4_tmp);  // load a
1456
  __ store_ptr(5, R2_tmp); // store c in a
1457
  __ store_ptr(3, R4_tmp); // store a in c
1458
  // stack: ..., c, d, a, b, c, d
1459
}
1460

1461

1462
void TemplateTable::swap() {
1463
  transition(vtos, vtos);
1464
  // stack: ..., a, b
1465
  __ load_ptr(1, R0_tmp);  // load a
1466
  __ load_ptr(0, R2_tmp);  // load b
1467
  __ store_ptr(0, R0_tmp); // store a in b
1468
  __ store_ptr(1, R2_tmp); // store b in a
1469
  // stack: ..., b, a
1470
}
1471

1472

1473
void TemplateTable::iop2(Operation op) {
1474
  transition(itos, itos);
1475
  const Register arg1 = R1_tmp;
1476
  const Register arg2 = R0_tos;
1477

1478
  __ pop_i(arg1);
1479
  switch (op) {
1480
    case add  : __ add_32 (R0_tos, arg1, arg2); break;
1481
    case sub  : __ sub_32 (R0_tos, arg1, arg2); break;
1482
    case mul  : __ mul_32 (R0_tos, arg1, arg2); break;
1483
    case _and : __ and_32 (R0_tos, arg1, arg2); break;
1484
    case _or  : __ orr_32 (R0_tos, arg1, arg2); break;
1485
    case _xor : __ eor_32 (R0_tos, arg1, arg2); break;
1486
    case shl  : __ andr(arg2, arg2, 0x1f); __ mov (R0_tos, AsmOperand(arg1, lsl, arg2)); break;
1487
    case shr  : __ andr(arg2, arg2, 0x1f); __ mov (R0_tos, AsmOperand(arg1, asr, arg2)); break;
1488
    case ushr : __ andr(arg2, arg2, 0x1f); __ mov (R0_tos, AsmOperand(arg1, lsr, arg2)); break;
1489
    default   : ShouldNotReachHere();
1490
  }
1491
}
1492

1493

1494
void TemplateTable::lop2(Operation op) {
1495
  transition(ltos, ltos);
1496
  const Register arg1_lo = R2_tmp;
1497
  const Register arg1_hi = R3_tmp;
1498
  const Register arg2_lo = R0_tos_lo;
1499
  const Register arg2_hi = R1_tos_hi;
1500

1501
  __ pop_l(arg1_lo, arg1_hi);
1502
  switch (op) {
1503
    case add : __ adds(R0_tos_lo, arg1_lo, arg2_lo); __ adc (R1_tos_hi, arg1_hi, arg2_hi); break;
1504
    case sub : __ subs(R0_tos_lo, arg1_lo, arg2_lo); __ sbc (R1_tos_hi, arg1_hi, arg2_hi); break;
1505
    case _and: __ andr(R0_tos_lo, arg1_lo, arg2_lo); __ andr(R1_tos_hi, arg1_hi, arg2_hi); break;
1506
    case _or : __ orr (R0_tos_lo, arg1_lo, arg2_lo); __ orr (R1_tos_hi, arg1_hi, arg2_hi); break;
1507
    case _xor: __ eor (R0_tos_lo, arg1_lo, arg2_lo); __ eor (R1_tos_hi, arg1_hi, arg2_hi); break;
1508
    default : ShouldNotReachHere();
1509
  }
1510
}
1511

1512

1513
void TemplateTable::idiv() {
1514
  transition(itos, itos);
1515
  __ mov(R2, R0_tos);
1516
  __ pop_i(R0);
1517
  // R0 - dividend
1518
  // R2 - divisor
1519
  __ call(StubRoutines::Arm::idiv_irem_entry(), relocInfo::none);
1520
  // R1 - result
1521
  __ mov(R0_tos, R1);
1522
}
1523

1524

1525
void TemplateTable::irem() {
1526
  transition(itos, itos);
1527
  __ mov(R2, R0_tos);
1528
  __ pop_i(R0);
1529
  // R0 - dividend
1530
  // R2 - divisor
1531
  __ call(StubRoutines::Arm::idiv_irem_entry(), relocInfo::none);
1532
  // R0 - remainder
1533
}
1534

1535

1536
void TemplateTable::lmul() {
1537
  transition(ltos, ltos);
1538
  const Register arg1_lo = R0_tos_lo;
1539
  const Register arg1_hi = R1_tos_hi;
1540
  const Register arg2_lo = R2_tmp;
1541
  const Register arg2_hi = R3_tmp;
1542

1543
  __ pop_l(arg2_lo, arg2_hi);
1544

1545
  __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::lmul), arg1_lo, arg1_hi, arg2_lo, arg2_hi);
1546
}
1547

1548

1549
void TemplateTable::ldiv() {
1550
  transition(ltos, ltos);
1551
  const Register x_lo = R2_tmp;
1552
  const Register x_hi = R3_tmp;
1553
  const Register y_lo = R0_tos_lo;
1554
  const Register y_hi = R1_tos_hi;
1555

1556
  __ pop_l(x_lo, x_hi);
1557

1558
  // check if y = 0
1559
  __ orrs(Rtemp, y_lo, y_hi);
1560
  __ call(Interpreter::_throw_ArithmeticException_entry, relocInfo::none, eq);
1561
  __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::ldiv), y_lo, y_hi, x_lo, x_hi);
1562
}
1563

1564

1565
void TemplateTable::lrem() {
1566
  transition(ltos, ltos);
1567
  const Register x_lo = R2_tmp;
1568
  const Register x_hi = R3_tmp;
1569
  const Register y_lo = R0_tos_lo;
1570
  const Register y_hi = R1_tos_hi;
1571

1572
  __ pop_l(x_lo, x_hi);
1573

1574
  // check if y = 0
1575
  __ orrs(Rtemp, y_lo, y_hi);
1576
  __ call(Interpreter::_throw_ArithmeticException_entry, relocInfo::none, eq);
1577
  __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::lrem), y_lo, y_hi, x_lo, x_hi);
1578
}
1579

1580

1581
void TemplateTable::lshl() {
1582
  transition(itos, ltos);
1583
  const Register shift_cnt = R4_tmp;
1584
  const Register val_lo = R2_tmp;
1585
  const Register val_hi = R3_tmp;
1586

1587
  __ pop_l(val_lo, val_hi);
1588
  __ andr(shift_cnt, R0_tos, 63);
1589
  __ long_shift(R0_tos_lo, R1_tos_hi, val_lo, val_hi, lsl, shift_cnt);
1590
}
1591

1592

1593
void TemplateTable::lshr() {
1594
  transition(itos, ltos);
1595
  const Register shift_cnt = R4_tmp;
1596
  const Register val_lo = R2_tmp;
1597
  const Register val_hi = R3_tmp;
1598

1599
  __ pop_l(val_lo, val_hi);
1600
  __ andr(shift_cnt, R0_tos, 63);
1601
  __ long_shift(R0_tos_lo, R1_tos_hi, val_lo, val_hi, asr, shift_cnt);
1602
}
1603

1604

1605
void TemplateTable::lushr() {
1606
  transition(itos, ltos);
1607
  const Register shift_cnt = R4_tmp;
1608
  const Register val_lo = R2_tmp;
1609
  const Register val_hi = R3_tmp;
1610

1611
  __ pop_l(val_lo, val_hi);
1612
  __ andr(shift_cnt, R0_tos, 63);
1613
  __ long_shift(R0_tos_lo, R1_tos_hi, val_lo, val_hi, lsr, shift_cnt);
1614
}
1615

1616

1617
void TemplateTable::fop2(Operation op) {
1618
  transition(ftos, ftos);
1619
#ifdef __SOFTFP__
1620
  __ mov(R1, R0_tos);
1621
  __ pop_i(R0);
1622
  switch (op) {
1623
    case add: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_fadd_glibc), R0, R1); break;
1624
    case sub: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_fsub_glibc), R0, R1); break;
1625
    case mul: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_fmul), R0, R1); break;
1626
    case div: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_fdiv), R0, R1); break;
1627
    case rem: __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::frem), R0, R1); break;
1628
    default : ShouldNotReachHere();
1629
  }
1630
#else
1631
  const FloatRegister arg1 = S1_tmp;
1632
  const FloatRegister arg2 = S0_tos;
1633

1634
  switch (op) {
1635
    case add: __ pop_f(arg1); __ add_float(S0_tos, arg1, arg2); break;
1636
    case sub: __ pop_f(arg1); __ sub_float(S0_tos, arg1, arg2); break;
1637
    case mul: __ pop_f(arg1); __ mul_float(S0_tos, arg1, arg2); break;
1638
    case div: __ pop_f(arg1); __ div_float(S0_tos, arg1, arg2); break;
1639
    case rem:
1640
#ifndef __ABI_HARD__
1641
      __ pop_f(arg1);
1642
      __ fmrs(R0, arg1);
1643
      __ fmrs(R1, arg2);
1644
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::frem), R0, R1);
1645
      __ fmsr(S0_tos, R0);
1646
#else
1647
      __ mov_float(S1_reg, arg2);
1648
      __ pop_f(S0);
1649
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::frem));
1650
#endif // !__ABI_HARD__
1651
      break;
1652
    default : ShouldNotReachHere();
1653
  }
1654
#endif // __SOFTFP__
1655
}
1656

1657

1658
void TemplateTable::dop2(Operation op) {
1659
  transition(dtos, dtos);
1660
#ifdef __SOFTFP__
1661
  __ mov(R2, R0_tos_lo);
1662
  __ mov(R3, R1_tos_hi);
1663
  __ pop_l(R0, R1);
1664
  switch (op) {
1665
    // __aeabi_XXXX_glibc: Imported code from glibc soft-fp bundle for calculation accuracy improvement. See CR 6757269.
1666
    case add: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_dadd_glibc), R0, R1, R2, R3); break;
1667
    case sub: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_dsub_glibc), R0, R1, R2, R3); break;
1668
    case mul: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_dmul), R0, R1, R2, R3); break;
1669
    case div: __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_ddiv), R0, R1, R2, R3); break;
1670
    case rem: __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::drem), R0, R1, R2, R3); break;
1671
    default : ShouldNotReachHere();
1672
  }
1673
#else
1674
  const FloatRegister arg1 = D1_tmp;
1675
  const FloatRegister arg2 = D0_tos;
1676

1677
  switch (op) {
1678
    case add: __ pop_d(arg1); __ add_double(D0_tos, arg1, arg2); break;
1679
    case sub: __ pop_d(arg1); __ sub_double(D0_tos, arg1, arg2); break;
1680
    case mul: __ pop_d(arg1); __ mul_double(D0_tos, arg1, arg2); break;
1681
    case div: __ pop_d(arg1); __ div_double(D0_tos, arg1, arg2); break;
1682
    case rem:
1683
#ifndef __ABI_HARD__
1684
      __ pop_d(arg1);
1685
      __ fmrrd(R0, R1, arg1);
1686
      __ fmrrd(R2, R3, arg2);
1687
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::drem), R0, R1, R2, R3);
1688
      __ fmdrr(D0_tos, R0, R1);
1689
#else
1690
      __ mov_double(D1, arg2);
1691
      __ pop_d(D0);
1692
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::drem));
1693
#endif // !__ABI_HARD__
1694
      break;
1695
    default : ShouldNotReachHere();
1696
  }
1697
#endif // __SOFTFP__
1698
}
1699

1700

1701
void TemplateTable::ineg() {
1702
  transition(itos, itos);
1703
  __ neg_32(R0_tos, R0_tos);
1704
}
1705

1706

1707
void TemplateTable::lneg() {
1708
  transition(ltos, ltos);
1709
  __ rsbs(R0_tos_lo, R0_tos_lo, 0);
1710
  __ rsc (R1_tos_hi, R1_tos_hi, 0);
1711
}
1712

1713

1714
void TemplateTable::fneg() {
1715
  transition(ftos, ftos);
1716
#ifdef __SOFTFP__
1717
  // Invert sign bit
1718
  const int sign_mask = 0x80000000;
1719
  __ eor(R0_tos, R0_tos, sign_mask);
1720
#else
1721
  __ neg_float(S0_tos, S0_tos);
1722
#endif // __SOFTFP__
1723
}
1724

1725

1726
void TemplateTable::dneg() {
1727
  transition(dtos, dtos);
1728
#ifdef __SOFTFP__
1729
  // Invert sign bit in the high part of the double
1730
  const int sign_mask_hi = 0x80000000;
1731
  __ eor(R1_tos_hi, R1_tos_hi, sign_mask_hi);
1732
#else
1733
  __ neg_double(D0_tos, D0_tos);
1734
#endif // __SOFTFP__
1735
}
1736

1737

1738
void TemplateTable::iinc() {
1739
  transition(vtos, vtos);
1740
  const Register Rconst = R2_tmp;
1741
  const Register Rlocal_index = R1_tmp;
1742
  const Register Rval = R0_tmp;
1743

1744
  __ ldrsb(Rconst, at_bcp(2));
1745
  locals_index(Rlocal_index);
1746
  Address local = load_iaddress(Rlocal_index, Rtemp);
1747
  __ ldr_s32(Rval, local);
1748
  __ add(Rval, Rval, Rconst);
1749
  __ str_32(Rval, local);
1750
}
1751

1752

1753
void TemplateTable::wide_iinc() {
1754
  transition(vtos, vtos);
1755
  const Register Rconst = R2_tmp;
1756
  const Register Rlocal_index = R1_tmp;
1757
  const Register Rval = R0_tmp;
1758

1759
  // get constant in Rconst
1760
  __ ldrsb(R2_tmp, at_bcp(4));
1761
  __ ldrb(R3_tmp, at_bcp(5));
1762
  __ orr(Rconst, R3_tmp, AsmOperand(R2_tmp, lsl, 8));
1763

1764
  locals_index_wide(Rlocal_index);
1765
  Address local = load_iaddress(Rlocal_index, Rtemp);
1766
  __ ldr_s32(Rval, local);
1767
  __ add(Rval, Rval, Rconst);
1768
  __ str_32(Rval, local);
1769
}
1770

1771

1772
void TemplateTable::convert() {
1773
  // Checking
1774
#ifdef ASSERT
1775
  { TosState tos_in  = ilgl;
1776
    TosState tos_out = ilgl;
1777
    switch (bytecode()) {
1778
      case Bytecodes::_i2l: // fall through
1779
      case Bytecodes::_i2f: // fall through
1780
      case Bytecodes::_i2d: // fall through
1781
      case Bytecodes::_i2b: // fall through
1782
      case Bytecodes::_i2c: // fall through
1783
      case Bytecodes::_i2s: tos_in = itos; break;
1784
      case Bytecodes::_l2i: // fall through
1785
      case Bytecodes::_l2f: // fall through
1786
      case Bytecodes::_l2d: tos_in = ltos; break;
1787
      case Bytecodes::_f2i: // fall through
1788
      case Bytecodes::_f2l: // fall through
1789
      case Bytecodes::_f2d: tos_in = ftos; break;
1790
      case Bytecodes::_d2i: // fall through
1791
      case Bytecodes::_d2l: // fall through
1792
      case Bytecodes::_d2f: tos_in = dtos; break;
1793
      default             : ShouldNotReachHere();
1794
    }
1795
    switch (bytecode()) {
1796
      case Bytecodes::_l2i: // fall through
1797
      case Bytecodes::_f2i: // fall through
1798
      case Bytecodes::_d2i: // fall through
1799
      case Bytecodes::_i2b: // fall through
1800
      case Bytecodes::_i2c: // fall through
1801
      case Bytecodes::_i2s: tos_out = itos; break;
1802
      case Bytecodes::_i2l: // fall through
1803
      case Bytecodes::_f2l: // fall through
1804
      case Bytecodes::_d2l: tos_out = ltos; break;
1805
      case Bytecodes::_i2f: // fall through
1806
      case Bytecodes::_l2f: // fall through
1807
      case Bytecodes::_d2f: tos_out = ftos; break;
1808
      case Bytecodes::_i2d: // fall through
1809
      case Bytecodes::_l2d: // fall through
1810
      case Bytecodes::_f2d: tos_out = dtos; break;
1811
      default             : ShouldNotReachHere();
1812
    }
1813
    transition(tos_in, tos_out);
1814
  }
1815
#endif // ASSERT
1816

1817
  // Conversion
1818
  switch (bytecode()) {
1819
    case Bytecodes::_i2l:
1820
      __ mov(R1_tos_hi, AsmOperand(R0_tos, asr, BitsPerWord-1));
1821
      break;
1822

1823
    case Bytecodes::_i2f:
1824
#ifdef __SOFTFP__
1825
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_i2f), R0_tos);
1826
#else
1827
      __ fmsr(S0_tmp, R0_tos);
1828
      __ fsitos(S0_tos, S0_tmp);
1829
#endif // __SOFTFP__
1830
      break;
1831

1832
    case Bytecodes::_i2d:
1833
#ifdef __SOFTFP__
1834
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_i2d), R0_tos);
1835
#else
1836
      __ fmsr(S0_tmp, R0_tos);
1837
      __ fsitod(D0_tos, S0_tmp);
1838
#endif // __SOFTFP__
1839
      break;
1840

1841
    case Bytecodes::_i2b:
1842
      __ sign_extend(R0_tos, R0_tos, 8);
1843
      break;
1844

1845
    case Bytecodes::_i2c:
1846
      __ zero_extend(R0_tos, R0_tos, 16);
1847
      break;
1848

1849
    case Bytecodes::_i2s:
1850
      __ sign_extend(R0_tos, R0_tos, 16);
1851
      break;
1852

1853
    case Bytecodes::_l2i:
1854
      /* nothing to do */
1855
      break;
1856

1857
    case Bytecodes::_l2f:
1858
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::l2f), R0_tos_lo, R1_tos_hi);
1859
#if !defined(__SOFTFP__) && !defined(__ABI_HARD__)
1860
      __ fmsr(S0_tos, R0);
1861
#endif // !__SOFTFP__ && !__ABI_HARD__
1862
      break;
1863

1864
    case Bytecodes::_l2d:
1865
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::l2d), R0_tos_lo, R1_tos_hi);
1866
#if !defined(__SOFTFP__) && !defined(__ABI_HARD__)
1867
      __ fmdrr(D0_tos, R0, R1);
1868
#endif // !__SOFTFP__ && !__ABI_HARD__
1869
      break;
1870

1871
    case Bytecodes::_f2i:
1872
#ifndef __SOFTFP__
1873
      __ ftosizs(S0_tos, S0_tos);
1874
      __ fmrs(R0_tos, S0_tos);
1875
#else
1876
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2i), R0_tos);
1877
#endif // !__SOFTFP__
1878
      break;
1879

1880
    case Bytecodes::_f2l:
1881
#ifndef __SOFTFP__
1882
      __ fmrs(R0_tos, S0_tos);
1883
#endif // !__SOFTFP__
1884
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2l), R0_tos);
1885
      break;
1886

1887
    case Bytecodes::_f2d:
1888
#ifdef __SOFTFP__
1889
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_f2d), R0_tos);
1890
#else
1891
      __ convert_f2d(D0_tos, S0_tos);
1892
#endif // __SOFTFP__
1893
      break;
1894

1895
    case Bytecodes::_d2i:
1896
#ifndef __SOFTFP__
1897
      __ ftosizd(Stemp, D0);
1898
      __ fmrs(R0, Stemp);
1899
#else
1900
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2i), R0_tos_lo, R1_tos_hi);
1901
#endif // !__SOFTFP__
1902
      break;
1903

1904
    case Bytecodes::_d2l:
1905
#ifndef __SOFTFP__
1906
      __ fmrrd(R0_tos_lo, R1_tos_hi, D0_tos);
1907
#endif // !__SOFTFP__
1908
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2l), R0_tos_lo, R1_tos_hi);
1909
      break;
1910

1911
    case Bytecodes::_d2f:
1912
#ifdef __SOFTFP__
1913
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, __aeabi_d2f), R0_tos_lo, R1_tos_hi);
1914
#else
1915
      __ convert_d2f(S0_tos, D0_tos);
1916
#endif // __SOFTFP__
1917
      break;
1918

1919
    default:
1920
      ShouldNotReachHere();
1921
  }
1922
}
1923

1924

1925
void TemplateTable::lcmp() {
1926
  transition(ltos, itos);
1927
  const Register arg1_lo = R2_tmp;
1928
  const Register arg1_hi = R3_tmp;
1929
  const Register arg2_lo = R0_tos_lo;
1930
  const Register arg2_hi = R1_tos_hi;
1931
  const Register res = R4_tmp;
1932

1933
  __ pop_l(arg1_lo, arg1_hi);
1934

1935
  // long compare arg1 with arg2
1936
  // result is -1/0/+1 if '<'/'='/'>'
1937
  Label done;
1938

1939
  __ mov (res, 0);
1940
  __ cmp (arg1_hi, arg2_hi);
1941
  __ mvn (res, 0, lt);
1942
  __ mov (res, 1, gt);
1943
  __ b(done, ne);
1944
  __ cmp (arg1_lo, arg2_lo);
1945
  __ mvn (res, 0, lo);
1946
  __ mov (res, 1, hi);
1947
  __ bind(done);
1948
  __ mov (R0_tos, res);
1949
}
1950

1951

1952
void TemplateTable::float_cmp(bool is_float, int unordered_result) {
1953
  assert((unordered_result == 1) || (unordered_result == -1), "invalid unordered result");
1954

1955

1956
#ifdef __SOFTFP__
1957

1958
  if (is_float) {
1959
    transition(ftos, itos);
1960
    const Register Rx = R0;
1961
    const Register Ry = R1;
1962

1963
    __ mov(Ry, R0_tos);
1964
    __ pop_i(Rx);
1965

1966
    if (unordered_result == 1) {
1967
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::fcmpg), Rx, Ry);
1968
    } else {
1969
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::fcmpl), Rx, Ry);
1970
    }
1971

1972
  } else {
1973

1974
    transition(dtos, itos);
1975
    const Register Rx_lo = R0;
1976
    const Register Rx_hi = R1;
1977
    const Register Ry_lo = R2;
1978
    const Register Ry_hi = R3;
1979

1980
    __ mov(Ry_lo, R0_tos_lo);
1981
    __ mov(Ry_hi, R1_tos_hi);
1982
    __ pop_l(Rx_lo, Rx_hi);
1983

1984
    if (unordered_result == 1) {
1985
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dcmpg), Rx_lo, Rx_hi, Ry_lo, Ry_hi);
1986
    } else {
1987
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::dcmpl), Rx_lo, Rx_hi, Ry_lo, Ry_hi);
1988
    }
1989
  }
1990

1991
#else
1992

1993
  if (is_float) {
1994
    transition(ftos, itos);
1995
    __ pop_f(S1_tmp);
1996
    __ fcmps(S1_tmp, S0_tos);
1997
  } else {
1998
    transition(dtos, itos);
1999
    __ pop_d(D1_tmp);
2000
    __ fcmpd(D1_tmp, D0_tos);
2001
  }
2002

2003
  __ fmstat();
2004

2005
  // comparison result | flag N | flag Z | flag C | flag V
2006
  // "<"               |   1    |   0    |   0    |   0
2007
  // "=="              |   0    |   1    |   1    |   0
2008
  // ">"               |   0    |   0    |   1    |   0
2009
  // unordered         |   0    |   0    |   1    |   1
2010

2011
  if (unordered_result < 0) {
2012
    __ mov(R0_tos, 1);           // result ==  1 if greater
2013
    __ mvn(R0_tos, 0, lt);       // result == -1 if less or unordered (N!=V)
2014
  } else {
2015
    __ mov(R0_tos, 1);           // result ==  1 if greater or unordered
2016
    __ mvn(R0_tos, 0, mi);       // result == -1 if less (N=1)
2017
  }
2018
  __ mov(R0_tos, 0, eq);         // result ==  0 if equ (Z=1)
2019
#endif // __SOFTFP__
2020
}
2021

2022

2023
void TemplateTable::branch(bool is_jsr, bool is_wide) {
2024

2025
  const Register Rdisp = R0_tmp;
2026
  const Register Rbumped_taken_count = R5_tmp;
2027

2028
  __ profile_taken_branch(R0_tmp, Rbumped_taken_count); // R0 holds updated MDP, Rbumped_taken_count holds bumped taken count
2029

2030
  const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
2031
                             InvocationCounter::counter_offset();
2032
  const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
2033
                              InvocationCounter::counter_offset();
2034
  const int method_offset = frame::interpreter_frame_method_offset * wordSize;
2035

2036
  // Load up R0 with the branch displacement
2037
  if (is_wide) {
2038
    __ ldrsb(R0_tmp, at_bcp(1));
2039
    __ ldrb(R1_tmp, at_bcp(2));
2040
    __ ldrb(R2_tmp, at_bcp(3));
2041
    __ ldrb(R3_tmp, at_bcp(4));
2042
    __ orr(R0_tmp, R1_tmp, AsmOperand(R0_tmp, lsl, BitsPerByte));
2043
    __ orr(R0_tmp, R2_tmp, AsmOperand(R0_tmp, lsl, BitsPerByte));
2044
    __ orr(Rdisp, R3_tmp, AsmOperand(R0_tmp, lsl, BitsPerByte));
2045
  } else {
2046
    __ ldrsb(R0_tmp, at_bcp(1));
2047
    __ ldrb(R1_tmp, at_bcp(2));
2048
    __ orr(Rdisp, R1_tmp, AsmOperand(R0_tmp, lsl, BitsPerByte));
2049
  }
2050

2051
  // Handle all the JSR stuff here, then exit.
2052
  // It's much shorter and cleaner than intermingling with the
2053
  // non-JSR normal-branch stuff occurring below.
2054
  if (is_jsr) {
2055
    // compute return address as bci in R1
2056
    const Register Rret_addr = R1_tmp;
2057
    assert_different_registers(Rdisp, Rret_addr, Rtemp);
2058

2059
    __ ldr(Rtemp, Address(Rmethod, Method::const_offset()));
2060
    __ sub(Rret_addr, Rbcp, - (is_wide ? 5 : 3) + in_bytes(ConstMethod::codes_offset()));
2061
    __ sub(Rret_addr, Rret_addr, Rtemp);
2062

2063
    // Load the next target bytecode into R3_bytecode and advance Rbcp
2064
    __ ldrb(R3_bytecode, Address(Rbcp, Rdisp, lsl, 0, pre_indexed));
2065

2066
    // Push return address
2067
    __ push_i(Rret_addr);
2068
    // jsr returns vtos
2069
    __ dispatch_only_noverify(vtos);
2070
    return;
2071
  }
2072

2073
  // Normal (non-jsr) branch handling
2074

2075
  // Adjust the bcp by the displacement in Rdisp and load next bytecode.
2076
  __ ldrb(R3_bytecode, Address(Rbcp, Rdisp, lsl, 0, pre_indexed));
2077

2078
  assert(UseLoopCounter || !UseOnStackReplacement, "on-stack-replacement requires loop counters");
2079
  Label backedge_counter_overflow;
2080
  Label dispatch;
2081

2082
  if (UseLoopCounter) {
2083
    // increment backedge counter for backward branches
2084
    // Rdisp (R0): target offset
2085

2086
    const Register Rcnt = R2_tmp;
2087
    const Register Rcounters = R1_tmp;
2088

2089
    // count only if backward branch
2090
    __ tst(Rdisp, Rdisp);
2091
    __ b(dispatch, pl);
2092

2093
    Label no_mdo;
2094
    int increment = InvocationCounter::count_increment;
2095
    if (ProfileInterpreter) {
2096
      // Are we profiling?
2097
      __ ldr(Rtemp, Address(Rmethod, Method::method_data_offset()));
2098
      __ cbz(Rtemp, no_mdo);
2099
      // Increment the MDO backedge counter
2100
      const Address mdo_backedge_counter(Rtemp, in_bytes(MethodData::backedge_counter_offset()) +
2101
                                                in_bytes(InvocationCounter::counter_offset()));
2102
      const Address mask(Rtemp, in_bytes(MethodData::backedge_mask_offset()));
2103
      __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
2104
                                 Rcnt, R4_tmp, eq, &backedge_counter_overflow);
2105
      __ b(dispatch);
2106
    }
2107
    __ bind(no_mdo);
2108
    // Increment backedge counter in MethodCounters*
2109
    // Note Rbumped_taken_count is a callee saved registers for ARM32
2110
    __ get_method_counters(Rmethod, Rcounters, dispatch, true /*saveRegs*/,
2111
                           Rdisp, R3_bytecode,
2112
                           noreg);
2113
    const Address mask(Rcounters, in_bytes(MethodCounters::backedge_mask_offset()));
2114
    __ increment_mask_and_jump(Address(Rcounters, be_offset), increment, mask,
2115
                               Rcnt, R4_tmp, eq, &backedge_counter_overflow);
2116
    __ bind(dispatch);
2117
  }
2118

2119
  if (!UseOnStackReplacement) {
2120
    __ bind(backedge_counter_overflow);
2121
  }
2122

2123
  // continue with the bytecode @ target
2124
  __ dispatch_only(vtos, true);
2125

2126
  if (UseLoopCounter && UseOnStackReplacement) {
2127
    // invocation counter overflow
2128
    __ bind(backedge_counter_overflow);
2129

2130
    __ sub(R1, Rbcp, Rdisp);                   // branch bcp
2131
    call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), R1);
2132

2133
    // R0: osr nmethod (osr ok) or null (osr not possible)
2134
    const Register Rnmethod = R0;
2135

2136
    __ ldrb(R3_bytecode, Address(Rbcp));       // reload next bytecode
2137

2138
    __ cbz(Rnmethod, dispatch);                // test result, no osr if null
2139

2140
    // nmethod may have been invalidated (VM may block upon call_VM return)
2141
    __ ldrb(R1_tmp, Address(Rnmethod, nmethod::state_offset()));
2142
    __ cmp(R1_tmp, nmethod::in_use);
2143
    __ b(dispatch, ne);
2144

2145
    // We have the address of an on stack replacement routine in Rnmethod,
2146
    // We need to prepare to execute the OSR method. First we must
2147
    // migrate the locals and monitors off of the stack.
2148

2149
    __ mov(Rtmp_save0, Rnmethod);                      // save the nmethod
2150

2151
    call_VM(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin));
2152

2153
    // R0 is OSR buffer
2154

2155
    __ ldr(R1_tmp, Address(Rtmp_save0, nmethod::osr_entry_point_offset()));
2156
    __ ldr(Rtemp, Address(FP, frame::interpreter_frame_sender_sp_offset * wordSize));
2157

2158
    __ ldmia(FP, RegisterSet(FP) | RegisterSet(LR));
2159
    __ bic(SP, Rtemp, StackAlignmentInBytes - 1);     // Remove frame and align stack
2160

2161
    __ jump(R1_tmp);
2162
  }
2163
}
2164

2165

2166
void TemplateTable::if_0cmp(Condition cc) {
2167
  transition(itos, vtos);
2168
  // assume branch is more often taken than not (loops use backward branches)
2169
  Label not_taken;
2170
  __ cmp_32(R0_tos, 0);
2171
  __ b(not_taken, convNegCond(cc));
2172
  branch(false, false);
2173
  __ bind(not_taken);
2174
  __ profile_not_taken_branch(R0_tmp);
2175
}
2176

2177

2178
void TemplateTable::if_icmp(Condition cc) {
2179
  transition(itos, vtos);
2180
  // assume branch is more often taken than not (loops use backward branches)
2181
  Label not_taken;
2182
  __ pop_i(R1_tmp);
2183
  __ cmp_32(R1_tmp, R0_tos);
2184
  __ b(not_taken, convNegCond(cc));
2185
  branch(false, false);
2186
  __ bind(not_taken);
2187
  __ profile_not_taken_branch(R0_tmp);
2188
}
2189

2190

2191
void TemplateTable::if_nullcmp(Condition cc) {
2192
  transition(atos, vtos);
2193
  assert(cc == equal || cc == not_equal, "invalid condition");
2194

2195
  // assume branch is more often taken than not (loops use backward branches)
2196
  Label not_taken;
2197
  if (cc == equal) {
2198
    __ cbnz(R0_tos, not_taken);
2199
  } else {
2200
    __ cbz(R0_tos, not_taken);
2201
  }
2202
  branch(false, false);
2203
  __ bind(not_taken);
2204
  __ profile_not_taken_branch(R0_tmp);
2205
}
2206

2207

2208
void TemplateTable::if_acmp(Condition cc) {
2209
  transition(atos, vtos);
2210
  // assume branch is more often taken than not (loops use backward branches)
2211
  Label not_taken;
2212
  __ pop_ptr(R1_tmp);
2213
  __ cmpoop(R1_tmp, R0_tos);
2214
  __ b(not_taken, convNegCond(cc));
2215
  branch(false, false);
2216
  __ bind(not_taken);
2217
  __ profile_not_taken_branch(R0_tmp);
2218
}
2219

2220

2221
void TemplateTable::ret() {
2222
  transition(vtos, vtos);
2223
  const Register Rlocal_index = R1_tmp;
2224
  const Register Rret_bci = Rtmp_save0; // R4/R19
2225

2226
  locals_index(Rlocal_index);
2227
  Address local = load_iaddress(Rlocal_index, Rtemp);
2228
  __ ldr_s32(Rret_bci, local);          // get return bci, compute return bcp
2229
  __ profile_ret(Rtmp_save1, Rret_bci);
2230
  __ ldr(Rtemp, Address(Rmethod, Method::const_offset()));
2231
  __ add(Rtemp, Rtemp, in_bytes(ConstMethod::codes_offset()));
2232
  __ add(Rbcp, Rtemp, Rret_bci);
2233
  __ dispatch_next(vtos);
2234
}
2235

2236

2237
void TemplateTable::wide_ret() {
2238
  transition(vtos, vtos);
2239
  const Register Rlocal_index = R1_tmp;
2240
  const Register Rret_bci = Rtmp_save0; // R4/R19
2241

2242
  locals_index_wide(Rlocal_index);
2243
  Address local = load_iaddress(Rlocal_index, Rtemp);
2244
  __ ldr_s32(Rret_bci, local);               // get return bci, compute return bcp
2245
  __ profile_ret(Rtmp_save1, Rret_bci);
2246
  __ ldr(Rtemp, Address(Rmethod, Method::const_offset()));
2247
  __ add(Rtemp, Rtemp, in_bytes(ConstMethod::codes_offset()));
2248
  __ add(Rbcp, Rtemp, Rret_bci);
2249
  __ dispatch_next(vtos);
2250
}
2251

2252

2253
void TemplateTable::tableswitch() {
2254
  transition(itos, vtos);
2255

2256
  const Register Rindex  = R0_tos;
2257
  const Register Rtemp2  = R1_tmp;
2258
  const Register Rabcp   = R2_tmp;  // aligned bcp
2259
  const Register Rlow    = R3_tmp;
2260
  const Register Rhigh   = R4_tmp;
2261
  const Register Roffset = R5_tmp;
2262

2263
  // align bcp
2264
  __ add(Rtemp, Rbcp, 1 + (2*BytesPerInt-1));
2265
  __ align_reg(Rabcp, Rtemp, BytesPerInt);
2266

2267
  // load lo & hi
2268
  __ ldmia(Rabcp, RegisterSet(Rlow) | RegisterSet(Rhigh), writeback);
2269
  __ byteswap_u32(Rlow, Rtemp, Rtemp2);
2270
  __ byteswap_u32(Rhigh, Rtemp, Rtemp2);
2271

2272
  // compare index with high bound
2273
  __ cmp_32(Rhigh, Rindex);
2274

2275

2276
  // if Rindex <= Rhigh then calculate index in table (Rindex - Rlow)
2277
  __ subs(Rindex, Rindex, Rlow, ge);
2278

2279
  // if Rindex <= Rhigh and (Rindex - Rlow) >= 0
2280
  // ("ge" status accumulated from cmp and subs instructions) then load
2281
  // offset from table, otherwise load offset for default case
2282

2283
  if(ProfileInterpreter) {
2284
    Label default_case, continue_execution;
2285

2286
    __ b(default_case, lt);
2287
    __ ldr(Roffset, Address(Rabcp, Rindex, lsl, LogBytesPerInt));
2288
    __ profile_switch_case(Rabcp, Rindex, Rtemp2, R0_tmp);
2289
    __ b(continue_execution);
2290

2291
    __ bind(default_case);
2292
    __ profile_switch_default(R0_tmp);
2293
    __ ldr(Roffset, Address(Rabcp, -3 * BytesPerInt));
2294

2295
    __ bind(continue_execution);
2296
  } else {
2297
    __ ldr(Roffset, Address(Rabcp, -3 * BytesPerInt), lt);
2298
    __ ldr(Roffset, Address(Rabcp, Rindex, lsl, LogBytesPerInt), ge);
2299
  }
2300

2301
  __ byteswap_u32(Roffset, Rtemp, Rtemp2);
2302

2303
  // load the next bytecode to R3_bytecode and advance Rbcp
2304
  __ ldrb(R3_bytecode, Address(Rbcp, Roffset, lsl, 0, pre_indexed));
2305
  __ dispatch_only(vtos, true);
2306

2307
}
2308

2309

2310
void TemplateTable::lookupswitch() {
2311
  transition(itos, itos);
2312
  __ stop("lookupswitch bytecode should have been rewritten");
2313
}
2314

2315

2316
void TemplateTable::fast_linearswitch() {
2317
  transition(itos, vtos);
2318
  Label loop, found, default_case, continue_execution;
2319

2320
  const Register Rkey     = R0_tos;
2321
  const Register Rabcp    = R2_tmp;  // aligned bcp
2322
  const Register Rdefault = R3_tmp;
2323
  const Register Rcount   = R4_tmp;
2324
  const Register Roffset  = R5_tmp;
2325

2326
  // bswap Rkey, so we can avoid bswapping the table entries
2327
  __ byteswap_u32(Rkey, R1_tmp, Rtemp);
2328

2329
  // align bcp
2330
  __ add(Rtemp, Rbcp, 1 + (BytesPerInt-1));
2331
  __ align_reg(Rabcp, Rtemp, BytesPerInt);
2332

2333
  // load default & counter
2334
  __ ldmia(Rabcp, RegisterSet(Rdefault) | RegisterSet(Rcount), writeback);
2335
  __ byteswap_u32(Rcount, R1_tmp, Rtemp);
2336

2337
  __ cmp_32(Rcount, 0);
2338
  __ ldr(Rtemp, Address(Rabcp, 2*BytesPerInt, post_indexed), ne);
2339
  __ b(default_case, eq);
2340

2341
  // table search
2342
  __ bind(loop);
2343
  __ cmp_32(Rtemp, Rkey);
2344
  __ b(found, eq);
2345
  __ subs(Rcount, Rcount, 1);
2346
  __ ldr(Rtemp, Address(Rabcp, 2*BytesPerInt, post_indexed), ne);
2347
  __ b(loop, ne);
2348

2349
  // default case
2350
  __ bind(default_case);
2351
  __ profile_switch_default(R0_tmp);
2352
  __ mov(Roffset, Rdefault);
2353
  __ b(continue_execution);
2354

2355
  // entry found -> get offset
2356
  __ bind(found);
2357
  // Rabcp is already incremented and points to the next entry
2358
  __ ldr_s32(Roffset, Address(Rabcp, -BytesPerInt));
2359
  if (ProfileInterpreter) {
2360
    // Calculate index of the selected case.
2361
    assert_different_registers(Roffset, Rcount, Rtemp, R0_tmp, R1_tmp, R2_tmp);
2362

2363
    // align bcp
2364
    __ add(Rtemp, Rbcp, 1 + (BytesPerInt-1));
2365
    __ align_reg(R2_tmp, Rtemp, BytesPerInt);
2366

2367
    // load number of cases
2368
    __ ldr_u32(R2_tmp, Address(R2_tmp, BytesPerInt));
2369
    __ byteswap_u32(R2_tmp, R1_tmp, Rtemp);
2370

2371
    // Selected index = <number of cases> - <current loop count>
2372
    __ sub(R1_tmp, R2_tmp, Rcount);
2373
    __ profile_switch_case(R0_tmp, R1_tmp, Rtemp, R1_tmp);
2374
  }
2375

2376
  // continue execution
2377
  __ bind(continue_execution);
2378
  __ byteswap_u32(Roffset, R1_tmp, Rtemp);
2379

2380
  // load the next bytecode to R3_bytecode and advance Rbcp
2381
  __ ldrb(R3_bytecode, Address(Rbcp, Roffset, lsl, 0, pre_indexed));
2382
  __ dispatch_only(vtos, true);
2383
}
2384

2385

2386
void TemplateTable::fast_binaryswitch() {
2387
  transition(itos, vtos);
2388
  // Implementation using the following core algorithm:
2389
  //
2390
  // int binary_search(int key, LookupswitchPair* array, int n) {
2391
  //   // Binary search according to "Methodik des Programmierens" by
2392
  //   // Edsger W. Dijkstra and W.H.J. Feijen, Addison Wesley Germany 1985.
2393
  //   int i = 0;
2394
  //   int j = n;
2395
  //   while (i+1 < j) {
2396
  //     // invariant P: 0 <= i < j <= n and (a[i] <= key < a[j] or Q)
2397
  //     // with      Q: for all i: 0 <= i < n: key < a[i]
2398
  //     // where a stands for the array and assuming that the (inexisting)
2399
  //     // element a[n] is infinitely big.
2400
  //     int h = (i + j) >> 1;
2401
  //     // i < h < j
2402
  //     if (key < array[h].fast_match()) {
2403
  //       j = h;
2404
  //     } else {
2405
  //       i = h;
2406
  //     }
2407
  //   }
2408
  //   // R: a[i] <= key < a[i+1] or Q
2409
  //   // (i.e., if key is within array, i is the correct index)
2410
  //   return i;
2411
  // }
2412

2413
  // register allocation
2414
  const Register key    = R0_tos;                // already set (tosca)
2415
  const Register array  = R1_tmp;
2416
  const Register i      = R2_tmp;
2417
  const Register j      = R3_tmp;
2418
  const Register h      = R4_tmp;
2419
  const Register val    = R5_tmp;
2420
  const Register temp1  = Rtemp;
2421
  const Register temp2  = LR_tmp;
2422
  const Register offset = R3_tmp;
2423

2424
  // set 'array' = aligned bcp + 2 ints
2425
  __ add(temp1, Rbcp, 1 + (BytesPerInt-1) + 2*BytesPerInt);
2426
  __ align_reg(array, temp1, BytesPerInt);
2427

2428
  // initialize i & j
2429
  __ mov(i, 0);                                  // i = 0;
2430
  __ ldr_s32(j, Address(array, -BytesPerInt));   // j = length(array);
2431
  // Convert j into native byteordering
2432
  __ byteswap_u32(j, temp1, temp2);
2433

2434
  // and start
2435
  Label entry;
2436
  __ b(entry);
2437

2438
  // binary search loop
2439
  { Label loop;
2440
    __ bind(loop);
2441
    // int h = (i + j) >> 1;
2442
    __ add(h, i, j);                             // h = i + j;
2443
    __ logical_shift_right(h, h, 1);             // h = (i + j) >> 1;
2444
    // if (key < array[h].fast_match()) {
2445
    //   j = h;
2446
    // } else {
2447
    //   i = h;
2448
    // }
2449
    __ ldr_s32(val, Address(array, h, lsl, 1+LogBytesPerInt));
2450
    // Convert array[h].match to native byte-ordering before compare
2451
    __ byteswap_u32(val, temp1, temp2);
2452
    __ cmp_32(key, val);
2453
    __ mov(j, h, lt);   // j = h if (key <  array[h].fast_match())
2454
    __ mov(i, h, ge);   // i = h if (key >= array[h].fast_match())
2455
    // while (i+1 < j)
2456
    __ bind(entry);
2457
    __ add(temp1, i, 1);                             // i+1
2458
    __ cmp(temp1, j);                                // i+1 < j
2459
    __ b(loop, lt);
2460
  }
2461

2462
  // end of binary search, result index is i (must check again!)
2463
  Label default_case;
2464
  // Convert array[i].match to native byte-ordering before compare
2465
  __ ldr_s32(val, Address(array, i, lsl, 1+LogBytesPerInt));
2466
  __ byteswap_u32(val, temp1, temp2);
2467
  __ cmp_32(key, val);
2468
  __ b(default_case, ne);
2469

2470
  // entry found
2471
  __ add(temp1, array, AsmOperand(i, lsl, 1+LogBytesPerInt));
2472
  __ ldr_s32(offset, Address(temp1, 1*BytesPerInt));
2473
  __ profile_switch_case(R0, i, R1, i);
2474
  __ byteswap_u32(offset, temp1, temp2);
2475
  __ ldrb(R3_bytecode, Address(Rbcp, offset, lsl, 0, pre_indexed));
2476
  __ dispatch_only(vtos, true);
2477

2478
  // default case
2479
  __ bind(default_case);
2480
  __ profile_switch_default(R0);
2481
  __ ldr_s32(offset, Address(array, -2*BytesPerInt));
2482
  __ byteswap_u32(offset, temp1, temp2);
2483
  __ ldrb(R3_bytecode, Address(Rbcp, offset, lsl, 0, pre_indexed));
2484
  __ dispatch_only(vtos, true);
2485
}
2486

2487

2488
void TemplateTable::_return(TosState state) {
2489
  transition(state, state);
2490
  assert(_desc->calls_vm(), "inconsistent calls_vm information"); // call in remove_activation
2491

2492
  if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2493
    Label skip_register_finalizer;
2494
    assert(state == vtos, "only valid state");
2495
    __ ldr(R1, aaddress(0));
2496
    __ load_klass(Rtemp, R1);
2497
    __ ldr_u32(Rtemp, Address(Rtemp, Klass::access_flags_offset()));
2498
    __ tbz(Rtemp, exact_log2(JVM_ACC_HAS_FINALIZER), skip_register_finalizer);
2499

2500
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), R1);
2501

2502
    __ bind(skip_register_finalizer);
2503
  }
2504

2505
  if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2506
    Label no_safepoint;
2507
    __ ldr(Rtemp, Address(Rthread, JavaThread::polling_word_offset()));
2508
    __ tbz(Rtemp, exact_log2(SafepointMechanism::poll_bit()), no_safepoint);
2509
    __ push(state);
2510
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint));
2511
    __ pop(state);
2512
    __ bind(no_safepoint);
2513
  }
2514

2515
  // Narrow result if state is itos but result type is smaller.
2516
  // Need to narrow in the return bytecode rather than in generate_return_entry
2517
  // since compiled code callers expect the result to already be narrowed.
2518
  if (state == itos) {
2519
    __ narrow(R0_tos);
2520
  }
2521
  __ remove_activation(state, LR);
2522

2523
  __ interp_verify_oop(R0_tos, state, __FILE__, __LINE__);
2524

2525
  // According to interpreter calling conventions, result is returned in R0/R1,
2526
  // so ftos (S0) and dtos (D0) are moved to R0/R1.
2527
  // This conversion should be done after remove_activation, as it uses
2528
  // push(state) & pop(state) to preserve return value.
2529
  __ convert_tos_to_retval(state);
2530

2531
  __ ret();
2532

2533
  __ nop(); // to avoid filling CPU pipeline with invalid instructions
2534
  __ nop();
2535
}
2536

2537

2538
// ----------------------------------------------------------------------------
2539
// Volatile variables demand their effects be made known to all CPU's in
2540
// order.  Store buffers on most chips allow reads & writes to reorder; the
2541
// JMM's ReadAfterWrite.java test fails in -Xint mode without some kind of
2542
// memory barrier (i.e., it's not sufficient that the interpreter does not
2543
// reorder volatile references, the hardware also must not reorder them).
2544
//
2545
// According to the new Java Memory Model (JMM):
2546
// (1) All volatiles are serialized wrt to each other.
2547
// ALSO reads & writes act as acquire & release, so:
2548
// (2) A read cannot let unrelated NON-volatile memory refs that happen after
2549
// the read float up to before the read.  It's OK for non-volatile memory refs
2550
// that happen before the volatile read to float down below it.
2551
// (3) Similar a volatile write cannot let unrelated NON-volatile memory refs
2552
// that happen BEFORE the write float down to after the write.  It's OK for
2553
// non-volatile memory refs that happen after the volatile write to float up
2554
// before it.
2555
//
2556
// We only put in barriers around volatile refs (they are expensive), not
2557
// _between_ memory refs (that would require us to track the flavor of the
2558
// previous memory refs).  Requirements (2) and (3) require some barriers
2559
// before volatile stores and after volatile loads.  These nearly cover
2560
// requirement (1) but miss the volatile-store-volatile-load case.  This final
2561
// case is placed after volatile-stores although it could just as well go
2562
// before volatile-loads.
2563
void TemplateTable::volatile_barrier(MacroAssembler::Membar_mask_bits order_constraint,
2564
                                     Register tmp,
2565
                                     bool preserve_flags,
2566
                                     Register load_tgt) {
2567
  __ membar(order_constraint, tmp, preserve_flags, load_tgt);
2568
}
2569

2570
// Blows all volatile registers: R0-R3, Rtemp, LR.
2571
void TemplateTable::resolve_cache_and_index_for_method(int byte_no,
2572
                                                       Register Rcache,
2573
                                                       Register Rindex) {
2574
  assert_different_registers(Rcache, Rindex, Rtemp);
2575
  assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2576

2577
  Label resolved;
2578
  Bytecodes::Code code = bytecode();
2579
  __ load_method_entry(Rcache, Rindex);
2580
  switch(byte_no) {
2581
    case f1_byte:
2582
      __ add(Rtemp, Rcache, in_bytes(ResolvedMethodEntry::bytecode1_offset()));
2583
      break;
2584
    case f2_byte:
2585
      __ add(Rtemp, Rcache, in_bytes(ResolvedMethodEntry::bytecode2_offset()));
2586
      break;
2587
  }
2588
  // Load-acquire the bytecode to match store-release in InterpreterRuntime
2589
  __ ldrb(Rtemp, Rtemp);
2590
  __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
2591
  __ cmp(Rtemp, code);  // have we resolved this bytecode?
2592
  __ b(resolved, eq);
2593

2594
  // resolve first time through
2595
  address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2596
  __ mov(R1, code);
2597
  __ call_VM(noreg, entry, R1);
2598
  // Update registers with resolved info
2599
  __ load_method_entry(Rcache, Rindex);
2600
  __ bind(resolved);
2601
}
2602

2603
void TemplateTable::resolve_cache_and_index_for_field(int byte_no,
2604
                                                      Register Rcache,
2605
                                                      Register Rindex) {
2606
  assert_different_registers(Rcache, Rindex, Rtemp);
2607

2608
  Label resolved;
2609

2610
  Bytecodes::Code code = bytecode();
2611
  switch (code) {
2612
  case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2613
  case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2614
  default: break;
2615
  }
2616

2617
  assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2618
  __ load_field_entry(Rcache, Rindex);
2619
  if (byte_no == f1_byte) {
2620
    __ add(Rtemp, Rcache, in_bytes(ResolvedFieldEntry::get_code_offset()));
2621
  } else {
2622
    __ add(Rtemp, Rcache, in_bytes(ResolvedFieldEntry::put_code_offset()));
2623
  }
2624

2625
  // Load-acquire the bytecode to match store-release in ResolvedFieldEntry::fill_in()
2626
  __ ldrb(Rtemp, Rtemp);
2627
  __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
2628

2629
  __ cmp(Rtemp, code);  // have we resolved this bytecode?
2630
  __ b(resolved, eq);
2631

2632
  // resolve first time through
2633
  address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2634
  __ mov(R1, code);
2635
  __ call_VM(noreg, entry, R1);
2636
  // Update registers with resolved info
2637
  __ load_field_entry(Rcache, Rindex);
2638
  __ bind(resolved);
2639
}
2640

2641
void TemplateTable::load_resolved_field_entry(Register obj,
2642
                                              Register cache,
2643
                                              Register tos_state,
2644
                                              Register offset,
2645
                                              Register flags,
2646
                                              bool is_static = false) {
2647
  assert_different_registers(cache, tos_state, flags, offset);
2648

2649
  // Field offset
2650
  __ ldr(offset, Address(cache, in_bytes(ResolvedFieldEntry::field_offset_offset())));
2651

2652
  // Flags
2653
  __ ldrb(flags, Address(cache, in_bytes(ResolvedFieldEntry::flags_offset())));
2654

2655
  // TOS state
2656
  __ ldrb(tos_state, Address(cache, in_bytes(ResolvedFieldEntry::type_offset())));
2657

2658
  // Klass overwrite register
2659
  if (is_static) {
2660
    __ ldr(obj, Address(cache, ResolvedFieldEntry::field_holder_offset()));
2661
    const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2662
    __ ldr(obj, Address(obj, mirror_offset));
2663
    __ resolve_oop_handle(obj);
2664
  }
2665
}
2666

2667
// The rmethod register is input and overwritten to be the adapter method for the
2668
// indy call. Link Register (lr) is set to the return address for the adapter and
2669
// an appendix may be pushed to the stack. Registers R1-R3, Rtemp (R12) are clobbered
2670
void TemplateTable::load_invokedynamic_entry(Register method) {
2671
  // setup registers
2672
  const Register appendix = R1;
2673
  const Register cache = R2_tmp;
2674
  const Register index = R3_tmp;
2675
  assert_different_registers(method, appendix, cache, index);
2676

2677
  __ save_bcp();
2678

2679
  Label resolved;
2680
  __ load_resolved_indy_entry(cache, index);
2681
  // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2682
  __ ldr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2683
  TemplateTable::volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
2684
  // Compare the method to zero
2685
  __ cbnz(method, resolved);
2686

2687
  Bytecodes::Code code = bytecode();
2688

2689
  // Call to the interpreter runtime to resolve invokedynamic
2690
  address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2691
  __ mov(R1, code); // this is essentially Bytecodes::_invokedynamic, call_VM requires R1
2692
  __ call_VM(noreg, entry, R1);
2693
  // Update registers with resolved info
2694
  __ load_resolved_indy_entry(cache, index);
2695
  // Load-acquire the adapter method to match store-release in ResolvedIndyEntry::fill_in()
2696
  __ ldr(method, Address(cache, in_bytes(ResolvedIndyEntry::method_offset())));
2697
  TemplateTable::volatile_barrier(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), noreg, true);
2698

2699
#ifdef ASSERT
2700
  __ cbnz(method, resolved);
2701
  __ stop("Should be resolved by now");
2702
#endif // ASSERT
2703
  __ bind(resolved);
2704

2705
  Label L_no_push;
2706
  // Check if there is an appendix
2707
  __ ldrb(index, Address(cache, in_bytes(ResolvedIndyEntry::flags_offset())));
2708
  __ tbz(index, ResolvedIndyEntry::has_appendix_shift, L_no_push);
2709
  // Get appendix
2710
  __ ldrh(index, Address(cache, in_bytes(ResolvedIndyEntry::resolved_references_index_offset())));
2711
  // Push the appendix as a trailing parameter
2712
  // since the parameter_size includes it.
2713
  __ load_resolved_reference_at_index(appendix, index);
2714
  __ verify_oop(appendix);
2715
  __ push(appendix);  // push appendix (MethodType, CallSite, etc.)
2716
  __ bind(L_no_push);
2717

2718
  // compute return type
2719
  __ ldrb(index, Address(cache, in_bytes(ResolvedIndyEntry::result_type_offset())));
2720
  // load return address
2721
  {
2722
    const address table_addr = (address) Interpreter::invoke_return_entry_table_for(code);
2723
    __ mov_address(Rtemp, table_addr);
2724
    __ ldr(LR, Address(Rtemp, index, lsl, Interpreter::logStackElementSize));
2725
  }
2726
}
2727

2728
// Blows all volatile registers: R0-R3, Rtemp, LR.
2729
void TemplateTable::load_resolved_method_entry_special_or_static(Register Rcache,
2730
                                                                 Register method,
2731
                                                                 Register flags) {
2732
  Register index = flags;
2733
  assert_different_registers(Rcache, method, flags);
2734
  resolve_cache_and_index_for_method(f1_byte, Rcache, index);
2735
  __ ldrb(flags, Address(Rcache, in_bytes(ResolvedMethodEntry::flags_offset())));
2736
  __ ldr(method, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2737
}
2738

2739
void TemplateTable::load_resolved_method_entry_handle(Register Rcache,
2740
                                                      Register method,
2741
                                                      Register ref_index,
2742
                                                      Register flags) {
2743
  Register index = ref_index;
2744
  assert_different_registers(method, flags);
2745
  assert_different_registers(Rcache, method, index);
2746

2747

2748
  resolve_cache_and_index_for_method(f1_byte, Rcache, index);
2749
  __ ldrb(flags, Address(Rcache, in_bytes(ResolvedMethodEntry::flags_offset())));
2750

2751
  // maybe push appendix to arguments (just before return address)
2752
  Label L_no_push;
2753
  __ tbz(flags, ResolvedMethodEntry::has_appendix_shift, L_no_push);
2754
  // invokehandle uses an index into the resolved references array
2755
  __ ldrh(ref_index, Address(Rcache, in_bytes(ResolvedMethodEntry::resolved_references_index_offset())));
2756
  // Push the appendix as a trailing parameter.
2757
  // This must be done before we get the receiver,
2758
  // since the parameter_size includes it.
2759
  Register appendix = method;
2760
  __ load_resolved_reference_at_index(appendix, ref_index);
2761
  __ push(appendix);  // push appendix (MethodType, CallSite, etc.)
2762
  __ bind(L_no_push);
2763

2764
  __ ldr(method, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2765
}
2766

2767
void TemplateTable::load_resolved_method_entry_interface(Register Rcache,
2768
                                                         Register klass,
2769
                                                         Register method_or_table_index,
2770
                                                         Register flags) {
2771
  // setup registers
2772
  const Register index = method_or_table_index;
2773
  assert_different_registers(method_or_table_index, Rcache, flags);
2774

2775
  // determine constant pool cache field offsets
2776
  resolve_cache_and_index_for_method(f1_byte, Rcache, index);
2777
  __ ldrb(flags, Address(Rcache, in_bytes(ResolvedMethodEntry::flags_offset())));
2778

2779
  // Invokeinterface can behave in different ways:
2780
  // If calling a method from java.lang.Object, the forced virtual flag is true so the invocation will
2781
  // behave like an invokevirtual call. The state of the virtual final flag will determine whether a method or
2782
  // vtable index is placed in the register.
2783
  // Otherwise, the registers will be populated with the klass and method.
2784

2785
  Label NotVirtual; Label NotVFinal; Label Done;
2786
  __ tbz(flags, ResolvedMethodEntry::is_forced_virtual_shift, NotVirtual);
2787
  __ tbz(flags, ResolvedMethodEntry::is_vfinal_shift, NotVFinal);
2788
  __ ldr(method_or_table_index, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2789
  __ b(Done);
2790

2791
  __ bind(NotVFinal);
2792
  __ ldrh(method_or_table_index, Address(Rcache, in_bytes(ResolvedMethodEntry::table_index_offset())));
2793
  __ b(Done);
2794

2795
  __ bind(NotVirtual);
2796
  __ ldr(method_or_table_index, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2797
  __ ldr(klass, Address(Rcache, in_bytes(ResolvedMethodEntry::klass_offset())));
2798
  __ bind(Done);
2799
}
2800

2801
void TemplateTable::load_resolved_method_entry_virtual(Register Rcache,
2802
                                                       Register method_or_table_index,
2803
                                                       Register flags) {
2804
  // setup registers
2805
  const Register index = flags;
2806
  assert_different_registers(method_or_table_index, Rcache, flags);
2807

2808
  // determine constant pool cache field offsets
2809
  resolve_cache_and_index_for_method(f2_byte, Rcache, index);
2810
  __ ldrb(flags, Address(Rcache, in_bytes(ResolvedMethodEntry::flags_offset())));
2811

2812
  // method_or_table_index can either be an itable index or a method depending on the virtual final flag
2813
  Label NotVFinal; Label Done;
2814
  __ tbz(flags, ResolvedMethodEntry::is_vfinal_shift, NotVFinal);
2815
  __ ldr(method_or_table_index, Address(Rcache, in_bytes(ResolvedMethodEntry::method_offset())));
2816
  __ b(Done);
2817

2818
  __ bind(NotVFinal);
2819
  __ ldrh(method_or_table_index, Address(Rcache, in_bytes(ResolvedMethodEntry::table_index_offset())));
2820
  __ bind(Done);
2821
}
2822

2823
// The registers cache and index expected to be set before call, and should not be Rtemp.
2824
// Blows volatile registers R0-R3, Rtemp, LR,
2825
// except cache and index registers which are preserved.
2826
void TemplateTable::jvmti_post_field_access(Register Rcache,
2827
                                            Register Rindex,
2828
                                            bool is_static,
2829
                                            bool has_tos) {
2830
  assert_different_registers(Rcache, Rindex, Rtemp);
2831

2832
  if (__ can_post_field_access()) {
2833
    // Check to see if a field access watch has been set before we take
2834
    // the time to call into the VM.
2835

2836
    Label Lcontinue;
2837

2838
    __ ldr_global_s32(Rtemp, (address)JvmtiExport::get_field_access_count_addr());
2839
    __ cbz(Rtemp, Lcontinue);
2840

2841
    // cache entry pointer
2842
    __ load_field_entry(R2, Rindex);
2843

2844
    if (is_static) {
2845
      __ mov(R1, 0);        // null object reference
2846
    } else {
2847
      __ pop(atos);         // Get the object
2848
      __ mov(R1, R0_tos);
2849
      __ verify_oop(R1);
2850
      __ push(atos);        // Restore stack state
2851
    }
2852
    // R1: object pointer or null
2853
    // R2: cache entry pointer
2854
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2855
               R1, R2);
2856
    __ load_field_entry(Rcache, Rindex);
2857

2858
    __ bind(Lcontinue);
2859
  }
2860
}
2861

2862

2863
void TemplateTable::pop_and_check_object(Register r) {
2864
  __ pop_ptr(r);
2865
  __ null_check(r, Rtemp);  // for field access must check obj.
2866
  __ verify_oop(r);
2867
}
2868

2869

2870
void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2871
  transition(vtos, vtos);
2872

2873
  const Register Rcache = R4_tmp;
2874
  const Register Rindex = R3_tmp;
2875

2876
  const Register Roffset = R2_tmp;
2877
  const Register Rtos_state = R3_tmp;
2878
  const Register Robj = R4_tmp; // Rcache is free at the time of loading Robj
2879
  const Register Rflags = R5_tmp;
2880

2881
  resolve_cache_and_index_for_field(byte_no, Rcache, Rindex);
2882
  jvmti_post_field_access(Rcache, Rindex, is_static, false);
2883
  load_resolved_field_entry(Robj, Rcache, Rtos_state, Roffset, Rflags, is_static);
2884

2885
  if (!is_static) {
2886
    pop_and_check_object(Robj);
2887
  }
2888

2889
  Label Done, Lint, Ltable, shouldNotReachHere;
2890
  Label Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
2891

2892
  // There are actually two versions of implementation of getfield/getstatic:
2893
  //
2894
  // 1) Table switch using add(PC,...) instruction (fast_version)
2895
  // 2) Table switch using ldr(PC,...) instruction
2896
  //
2897
  // First version requires fixed size of code block for each case and
2898
  // can not be used in RewriteBytecodes and VerifyOops
2899
  // modes.
2900

2901
  // Size of fixed size code block for fast_version
2902
  const int log_max_block_size = 3;
2903
  const int max_block_size = 1 << log_max_block_size;
2904

2905
  // Decide if fast version is enabled
2906
  bool fast_version = (is_static || !RewriteBytecodes) && !VerifyOops;
2907

2908
  // On 32-bit ARM atos and itos cases can be merged only for fast version, because
2909
  // atos requires additional processing in slow version.
2910
  bool atos_merged_with_itos = fast_version;
2911

2912
  assert(number_of_states == 10, "number of tos states should be equal to 9");
2913

2914
  __ cmp(Rtos_state, itos);
2915
  if(atos_merged_with_itos) {
2916
    __ cmp(Rtos_state, atos, ne);
2917
  }
2918

2919
  // table switch by type
2920
  if(fast_version) {
2921
    __ add(PC, PC, AsmOperand(Rtos_state, lsl, log_max_block_size + Assembler::LogInstructionSize), ne);
2922
  } else {
2923
    __ ldr(PC, Address(PC, Rtos_state, lsl, LogBytesPerWord), ne);
2924
  }
2925

2926
  // jump to itos/atos case
2927
  __ b(Lint);
2928

2929
  // table with addresses for slow version
2930
  if (fast_version) {
2931
    // nothing to do
2932
  } else  {
2933
    __ bind(Ltable);
2934
    __ emit_address(Lbtos);
2935
    __ emit_address(Lztos);
2936
    __ emit_address(Lctos);
2937
    __ emit_address(Lstos);
2938
    __ emit_address(Litos);
2939
    __ emit_address(Lltos);
2940
    __ emit_address(Lftos);
2941
    __ emit_address(Ldtos);
2942
    __ emit_address(Latos);
2943
  }
2944

2945
#ifdef ASSERT
2946
  int seq = 0;
2947
#endif
2948
  // btos
2949
  {
2950
    assert(btos == seq++, "btos has unexpected value");
2951
    FixedSizeCodeBlock btos_block(_masm, max_block_size, fast_version);
2952
    __ bind(Lbtos);
2953
    __ access_load_at(T_BYTE, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
2954
    __ push(btos);
2955
    // Rewrite bytecode to be faster
2956
    if (!is_static && rc == may_rewrite) {
2957
      patch_bytecode(Bytecodes::_fast_bgetfield, R0_tmp, Rtemp);
2958
    }
2959
    __ b(Done);
2960
  }
2961

2962
  // ztos (same as btos for getfield)
2963
  {
2964
    assert(ztos == seq++, "ztos has unexpected value");
2965
    FixedSizeCodeBlock ztos_block(_masm, max_block_size, fast_version);
2966
    __ bind(Lztos);
2967
    __ access_load_at(T_BOOLEAN, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
2968
    __ push(ztos);
2969
    // Rewrite bytecode to be faster (use btos fast getfield)
2970
    if (!is_static && rc == may_rewrite) {
2971
      patch_bytecode(Bytecodes::_fast_bgetfield, R0_tmp, Rtemp);
2972
    }
2973
    __ b(Done);
2974
  }
2975

2976
  // ctos
2977
  {
2978
    assert(ctos == seq++, "ctos has unexpected value");
2979
    FixedSizeCodeBlock ctos_block(_masm, max_block_size, fast_version);
2980
    __ bind(Lctos);
2981
    __ access_load_at(T_CHAR, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
2982
    __ push(ctos);
2983
    if (!is_static && rc == may_rewrite) {
2984
      patch_bytecode(Bytecodes::_fast_cgetfield, R0_tmp, Rtemp);
2985
    }
2986
    __ b(Done);
2987
  }
2988

2989
  // stos
2990
  {
2991
    assert(stos == seq++, "stos has unexpected value");
2992
    FixedSizeCodeBlock stos_block(_masm, max_block_size, fast_version);
2993
    __ bind(Lstos);
2994
    __ access_load_at(T_SHORT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
2995
    __ push(stos);
2996
    if (!is_static && rc == may_rewrite) {
2997
      patch_bytecode(Bytecodes::_fast_sgetfield, R0_tmp, Rtemp);
2998
    }
2999
    __ b(Done);
3000
  }
3001

3002
  // itos
3003
  {
3004
    assert(itos == seq++, "itos has unexpected value");
3005
    FixedSizeCodeBlock itos_block(_masm, max_block_size, fast_version);
3006
    __ bind(Litos);
3007
    __ b(shouldNotReachHere);
3008
  }
3009

3010
  // ltos
3011
  {
3012
    assert(ltos == seq++, "ltos has unexpected value");
3013
    FixedSizeCodeBlock ltos_block(_masm, max_block_size, fast_version);
3014
    __ bind(Lltos);
3015
    __ access_load_at(T_LONG, IN_HEAP, Address(Robj, Roffset), noreg /* ltos */, noreg, noreg, noreg);
3016
    __ push(ltos);
3017
    if (!is_static && rc == may_rewrite) {
3018
      patch_bytecode(Bytecodes::_fast_lgetfield, R0_tmp, Rtemp);
3019
    }
3020
    __ b(Done);
3021
  }
3022

3023
  // ftos
3024
  {
3025
    assert(ftos == seq++, "ftos has unexpected value");
3026
    FixedSizeCodeBlock ftos_block(_masm, max_block_size, fast_version);
3027
    __ bind(Lftos);
3028
    // floats and ints are placed on stack in same way, so
3029
    // we can use push(itos) to transfer value without using VFP
3030
    __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3031
    __ push(itos);
3032
    if (!is_static && rc == may_rewrite) {
3033
      patch_bytecode(Bytecodes::_fast_fgetfield, R0_tmp, Rtemp);
3034
    }
3035
    __ b(Done);
3036
  }
3037

3038
  // dtos
3039
  {
3040
    assert(dtos == seq++, "dtos has unexpected value");
3041
    FixedSizeCodeBlock dtos_block(_masm, max_block_size, fast_version);
3042
    __ bind(Ldtos);
3043
    // doubles and longs are placed on stack in the same way, so
3044
    // we can use push(ltos) to transfer value without using VFP
3045
    __ access_load_at(T_LONG, IN_HEAP, Address(Robj, Roffset), noreg /* ltos */, noreg, noreg, noreg);
3046
    __ push(ltos);
3047
    if (!is_static && rc == may_rewrite) {
3048
      patch_bytecode(Bytecodes::_fast_dgetfield, R0_tmp, Rtemp);
3049
    }
3050
    __ b(Done);
3051
  }
3052

3053
  // atos
3054
  {
3055
    assert(atos == seq++, "atos has unexpected value");
3056

3057
    // atos case for slow version on 32-bit ARM
3058
    if(!atos_merged_with_itos) {
3059
      __ bind(Latos);
3060
      do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
3061
      __ push(atos);
3062
      // Rewrite bytecode to be faster
3063
      if (!is_static && rc == may_rewrite) {
3064
        patch_bytecode(Bytecodes::_fast_agetfield, R0_tmp, Rtemp);
3065
      }
3066
      __ b(Done);
3067
    }
3068
  }
3069

3070
  assert(vtos == seq++, "vtos has unexpected value");
3071

3072
  __ bind(shouldNotReachHere);
3073
  __ should_not_reach_here();
3074

3075
  // itos and atos cases are frequent so it makes sense to move them out of table switch
3076
  // atos case can be merged with itos case (and thus moved out of table switch) on 32-bit ARM, fast version only
3077

3078
  __ bind(Lint);
3079
  __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3080
  __ push(itos);
3081
  // Rewrite bytecode to be faster
3082
  if (!is_static && rc == may_rewrite) {
3083
    patch_bytecode(Bytecodes::_fast_igetfield, R0_tmp, Rtemp);
3084
  }
3085

3086
  __ bind(Done);
3087

3088
  {
3089
    // Check for volatile field
3090
    Label notVolatile;
3091
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3092
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3093
    __ bind(notVolatile);
3094
  }
3095
}
3096

3097
void TemplateTable::getfield(int byte_no) {
3098
  getfield_or_static(byte_no, false);
3099
}
3100

3101
void TemplateTable::nofast_getfield(int byte_no) {
3102
  getfield_or_static(byte_no, false, may_not_rewrite);
3103
}
3104

3105
void TemplateTable::getstatic(int byte_no) {
3106
  getfield_or_static(byte_no, true);
3107
}
3108

3109

3110
// The registers cache and index expected to be set before call, and should not be R1 or Rtemp.
3111
// Blows volatile registers R0-R3, Rtemp, LR,
3112
// except cache and index registers which are preserved.
3113
void TemplateTable::jvmti_post_field_mod(Register Rcache, Register Rindex, bool is_static) {
3114
  ByteSize cp_base_offset = ConstantPoolCache::base_offset();
3115
  assert_different_registers(Rcache, Rindex, R1, Rtemp);
3116

3117
  if (__ can_post_field_modification()) {
3118
    // Check to see if a field modification watch has been set before we take
3119
    // the time to call into the VM.
3120
    Label Lcontinue;
3121

3122
    __ ldr_global_s32(Rtemp, (address)JvmtiExport::get_field_modification_count_addr());
3123
    __ cbz(Rtemp, Lcontinue);
3124

3125
    __ mov(R2, Rcache);
3126

3127
    if (is_static) {
3128
      // Life is simple.  Null out the object pointer.
3129
      __ mov(R1, 0);
3130
    } else {
3131
      // Life is harder. The stack holds the value on top, followed by the object.
3132
      // We don't know the size of the value, though; it could be one or two words
3133
      // depending on its type. As a result, we must find the type to determine where
3134
      // the object is.
3135
      __ ldrb(R3, Address(Rcache, in_bytes(ResolvedFieldEntry::type_offset())));
3136

3137
      __ cmp(R3, ltos);
3138
      __ cond_cmp(R3, dtos, ne);
3139
      // two word value (ltos/dtos)
3140
      __ ldr(R1, Address(SP, Interpreter::expr_offset_in_bytes(2)), eq);
3141

3142
      // one word value (not ltos, dtos)
3143
      __ ldr(R1, Address(SP, Interpreter::expr_offset_in_bytes(1)), ne);
3144
    }
3145

3146
    // object (tos)
3147
    __ mov(R3, Rstack_top);
3148

3149
    // R1: object pointer set up above (null if static)
3150
    // R2: cache entry pointer
3151
    // R3: value object on the stack
3152
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
3153
               R1, R2, R3);
3154
    __ load_field_entry(Rcache, Rindex);
3155

3156
    __ bind(Lcontinue);
3157
  }
3158
}
3159

3160

3161
void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3162
  transition(vtos, vtos);
3163

3164
  const Register Rcache = R4_tmp;
3165
  const Register Rindex = R3_tmp;
3166

3167
  const Register Roffset = R2_tmp;
3168
  const Register Rtos_state = R3_tmp;
3169
  const Register Robj = R4_tmp; // Rcache is free at the time of loading Robj
3170
  const Register Rflags = R5_tmp;
3171

3172
  resolve_cache_and_index_for_field(byte_no, Rcache, Rindex);
3173
  jvmti_post_field_mod(Rcache, Rindex, is_static);
3174
  load_resolved_field_entry(Robj, Rcache, Rtos_state, Roffset, Rflags, is_static);
3175

3176
  // Check for volatile field
3177
  {
3178
    Label notVolatile;
3179
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3180
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3181
    __ bind(notVolatile);
3182
  }
3183

3184
  Label Done, Lint, shouldNotReachHere;
3185
  Label Ltable, Lbtos, Lztos, Lctos, Lstos, Litos, Lltos, Lftos, Ldtos, Latos;
3186

3187
  // There are actually two versions of implementation of putfield/putstatic:
3188
  //
3189
  // 32-bit ARM:
3190
  // 1) Table switch using add(PC,...) instruction (fast_version)
3191
  // 2) Table switch using ldr(PC,...) instruction
3192
  //
3193
  // First version requires fixed size of code block for each case and
3194
  // can not be used in RewriteBytecodes and VerifyOops
3195
  // modes.
3196

3197
  // Size of fixed size code block for fast_version (in instructions)
3198
  const int log_max_block_size = 3;
3199
  const int max_block_size = 1 << log_max_block_size;
3200

3201
  // Decide if fast version is enabled
3202
  bool fast_version = (is_static || !RewriteBytecodes) && !VerifyOops;
3203

3204
  assert(number_of_states == 10, "number of tos states should be equal to 9");
3205

3206
  // itos case is frequent and is moved outside table switch
3207
  __ cmp(Rtos_state, itos);
3208

3209
  // table switch by type
3210
  if (fast_version) {
3211
    __ add(PC, PC, AsmOperand(Rtos_state, lsl, log_max_block_size + Assembler::LogInstructionSize), ne);
3212
  } else  {
3213
    __ ldr(PC, Address(PC, Rtos_state, lsl, LogBytesPerWord), ne);
3214
  }
3215

3216
  // jump to itos case
3217
  __ b(Lint);
3218

3219
  // table with addresses for slow version
3220
  if (fast_version) {
3221
    // nothing to do
3222
  } else  {
3223
    __ bind(Ltable);
3224
    __ emit_address(Lbtos);
3225
    __ emit_address(Lztos);
3226
    __ emit_address(Lctos);
3227
    __ emit_address(Lstos);
3228
    __ emit_address(Litos);
3229
    __ emit_address(Lltos);
3230
    __ emit_address(Lftos);
3231
    __ emit_address(Ldtos);
3232
    __ emit_address(Latos);
3233
  }
3234

3235
#ifdef ASSERT
3236
  int seq = 0;
3237
#endif
3238
  // btos
3239
  {
3240
    assert(btos == seq++, "btos has unexpected value");
3241
    FixedSizeCodeBlock btos_block(_masm, max_block_size, fast_version);
3242
    __ bind(Lbtos);
3243
    __ pop(btos);
3244
    if (!is_static) pop_and_check_object(Robj);
3245
    __ access_store_at(T_BYTE, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3246
    if (!is_static && rc == may_rewrite) {
3247
      patch_bytecode(Bytecodes::_fast_bputfield, R0_tmp, Rtemp, true, byte_no);
3248
    }
3249
    __ b(Done);
3250
  }
3251

3252
  // ztos
3253
  {
3254
    assert(ztos == seq++, "ztos has unexpected value");
3255
    FixedSizeCodeBlock ztos_block(_masm, max_block_size, fast_version);
3256
    __ bind(Lztos);
3257
    __ pop(ztos);
3258
    if (!is_static) pop_and_check_object(Robj);
3259
    __ access_store_at(T_BOOLEAN, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3260
    if (!is_static && rc == may_rewrite) {
3261
      patch_bytecode(Bytecodes::_fast_zputfield, R0_tmp, Rtemp, true, byte_no);
3262
    }
3263
    __ b(Done);
3264
  }
3265

3266
  // ctos
3267
  {
3268
    assert(ctos == seq++, "ctos has unexpected value");
3269
    FixedSizeCodeBlock ctos_block(_masm, max_block_size, fast_version);
3270
    __ bind(Lctos);
3271
    __ pop(ctos);
3272
    if (!is_static) pop_and_check_object(Robj);
3273
    __ access_store_at(T_CHAR, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3274
    if (!is_static && rc == may_rewrite) {
3275
      patch_bytecode(Bytecodes::_fast_cputfield, R0_tmp, Rtemp, true, byte_no);
3276
    }
3277
    __ b(Done);
3278
  }
3279

3280
  // stos
3281
  {
3282
    assert(stos == seq++, "stos has unexpected value");
3283
    FixedSizeCodeBlock stos_block(_masm, max_block_size, fast_version);
3284
    __ bind(Lstos);
3285
    __ pop(stos);
3286
    if (!is_static) pop_and_check_object(Robj);
3287
    __ access_store_at(T_SHORT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3288
    if (!is_static && rc == may_rewrite) {
3289
      patch_bytecode(Bytecodes::_fast_sputfield, R0_tmp, Rtemp, true, byte_no);
3290
    }
3291
    __ b(Done);
3292
  }
3293

3294
  // itos
3295
  {
3296
    assert(itos == seq++, "itos has unexpected value");
3297
    FixedSizeCodeBlock itos_block(_masm, max_block_size, fast_version);
3298
    __ bind(Litos);
3299
    __ b(shouldNotReachHere);
3300
  }
3301

3302
  // ltos
3303
  {
3304
    assert(ltos == seq++, "ltos has unexpected value");
3305
    FixedSizeCodeBlock ltos_block(_masm, max_block_size, fast_version);
3306
    __ bind(Lltos);
3307
    __ pop(ltos);
3308
    if (!is_static) pop_and_check_object(Robj);
3309
    __ access_store_at(T_LONG, IN_HEAP, Address(Robj, Roffset), noreg /* ltos */, noreg, noreg, noreg, false);
3310
    if (!is_static && rc == may_rewrite) {
3311
      patch_bytecode(Bytecodes::_fast_lputfield, R0_tmp, Rtemp, true, byte_no);
3312
    }
3313
    __ b(Done);
3314
  }
3315

3316
  // ftos
3317
  {
3318
    assert(ftos == seq++, "ftos has unexpected value");
3319
    FixedSizeCodeBlock ftos_block(_masm, max_block_size, fast_version);
3320
    __ bind(Lftos);
3321
    // floats and ints are placed on stack in the same way, so
3322
    // we can use pop(itos) to transfer value without using VFP
3323
    __ pop(itos);
3324
    if (!is_static) pop_and_check_object(Robj);
3325
    __ access_store_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3326
    if (!is_static && rc == may_rewrite) {
3327
      patch_bytecode(Bytecodes::_fast_fputfield, R0_tmp, Rtemp, true, byte_no);
3328
    }
3329
    __ b(Done);
3330
  }
3331

3332
  // dtos
3333
  {
3334
    assert(dtos == seq++, "dtos has unexpected value");
3335
    FixedSizeCodeBlock dtos_block(_masm, max_block_size, fast_version);
3336
    __ bind(Ldtos);
3337
    // doubles and longs are placed on stack in the same way, so
3338
    // we can use pop(ltos) to transfer value without using VFP
3339
    __ pop(ltos);
3340
    if (!is_static) pop_and_check_object(Robj);
3341
    __ access_store_at(T_LONG, IN_HEAP, Address(Robj, Roffset), noreg /* ltos */, noreg, noreg, noreg, false);
3342
    if (!is_static && rc == may_rewrite) {
3343
      patch_bytecode(Bytecodes::_fast_dputfield, R0_tmp, Rtemp, true, byte_no);
3344
    }
3345
    __ b(Done);
3346
  }
3347

3348
  // atos
3349
  {
3350
    assert(atos == seq++, "atos has unexpected value");
3351
    __ bind(Latos);
3352
    __ pop(atos);
3353
    if (!is_static) pop_and_check_object(Robj);
3354
    // Store into the field
3355
    do_oop_store(_masm, Address(Robj, Roffset), R0_tos, Rtemp, R1_tmp, R5_tmp, false);
3356
    if (!is_static && rc == may_rewrite) {
3357
      patch_bytecode(Bytecodes::_fast_aputfield, R0_tmp, Rtemp, true, byte_no);
3358
    }
3359
    __ b(Done);
3360
  }
3361

3362
  __ bind(shouldNotReachHere);
3363
  __ should_not_reach_here();
3364

3365
  // itos case is frequent and is moved outside table switch
3366
  __ bind(Lint);
3367
  __ pop(itos);
3368
  if (!is_static) pop_and_check_object(Robj);
3369
  __ access_store_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg, false);
3370
  if (!is_static && rc == may_rewrite) {
3371
    patch_bytecode(Bytecodes::_fast_iputfield, R0_tmp, Rtemp, true, byte_no);
3372
  }
3373

3374
  __ bind(Done);
3375

3376
  {
3377
    Label notVolatile;
3378
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3379
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3380
    __ bind(notVolatile);
3381
  }
3382
}
3383

3384
void TemplateTable::putfield(int byte_no) {
3385
  putfield_or_static(byte_no, false);
3386
}
3387

3388
void TemplateTable::nofast_putfield(int byte_no) {
3389
  putfield_or_static(byte_no, false, may_not_rewrite);
3390
}
3391

3392
void TemplateTable::putstatic(int byte_no) {
3393
  putfield_or_static(byte_no, true);
3394
}
3395

3396

3397
void TemplateTable::jvmti_post_fast_field_mod() {
3398
  // This version of jvmti_post_fast_field_mod() is not used on ARM
3399
  Unimplemented();
3400
}
3401

3402
// Blows volatile registers R0-R3, Rtemp, LR,
3403
// but preserves tosca with the given state.
3404
void TemplateTable::jvmti_post_fast_field_mod(TosState state) {
3405
  if (__ can_post_field_modification()) {
3406
    // Check to see if a field modification watch has been set before we take
3407
    // the time to call into the VM.
3408
    Label done;
3409

3410
    __ ldr_global_s32(R2, (address)JvmtiExport::get_field_modification_count_addr());
3411
    __ cbz(R2, done);
3412

3413
    __ pop_ptr(R3);               // copy the object pointer from tos
3414
    __ verify_oop(R3);
3415
    __ push_ptr(R3);              // put the object pointer back on tos
3416

3417
    __ push(state);               // save value on the stack
3418

3419
    // access constant pool cache entry
3420
    __ load_field_entry(R2, R1);
3421

3422
    __ mov(R1, R3);
3423
    assert(Interpreter::expr_offset_in_bytes(0) == 0, "adjust this code");
3424
    __ mov(R3, Rstack_top); // put tos addr into R3
3425

3426
    // R1: object pointer copied above
3427
    // R2: cache entry pointer
3428
    // R3: jvalue object on the stack
3429
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), R1, R2, R3);
3430

3431
    __ pop(state);                // restore value
3432

3433
    __ bind(done);
3434
  }
3435
}
3436

3437

3438
void TemplateTable::fast_storefield(TosState state) {
3439
  transition(state, vtos);
3440

3441
  ByteSize base = ConstantPoolCache::base_offset();
3442

3443
  jvmti_post_fast_field_mod(state);
3444

3445
  const Register Rcache = R4_tmp;
3446
  const Register Rindex = R3_tmp;
3447

3448
  const Register Roffset = R2_tmp;
3449
  const Register Rtos_state = R3_tmp;
3450
  const Register Robj = R4_tmp;  // Rcache is free at the time of loading Robj
3451
  const Register Rflags = R5_tmp;
3452

3453
  // access constant pool cache
3454
  __ load_field_entry(Rcache, Rindex);
3455
  load_resolved_field_entry(Robj, Rcache, Rtos_state, Roffset, Rflags);
3456

3457
  // load flags to test volatile
3458
  {
3459
    // Check for volatile store
3460
    Label notVolatile;
3461
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3462
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3463
    __ bind(notVolatile);
3464
  }
3465

3466
  // Get object from stack
3467
  pop_and_check_object(Robj);
3468

3469
  Address addr = Address(Robj, Roffset);
3470
  // access field
3471
  switch (bytecode()) {
3472
    case Bytecodes::_fast_zputfield:
3473
      __ access_store_at(T_BOOLEAN, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3474
      break;
3475
    case Bytecodes::_fast_bputfield:
3476
      __ access_store_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3477
      break;
3478
    case Bytecodes::_fast_sputfield:
3479
      __ access_store_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3480
      break;
3481
    case Bytecodes::_fast_cputfield:
3482
      __ access_store_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg,false);
3483
      break;
3484
    case Bytecodes::_fast_iputfield:
3485
      __ access_store_at(T_INT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg, false);
3486
      break;
3487
    case Bytecodes::_fast_lputfield:
3488
      __ access_store_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3489
      break;
3490
    case Bytecodes::_fast_fputfield:
3491
      __ access_store_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3492
      break;
3493
    case Bytecodes::_fast_dputfield:
3494
      __ access_store_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg, false);
3495
      break;
3496
    case Bytecodes::_fast_aputfield:
3497
      do_oop_store(_masm, addr, R0_tos, Rtemp, R1_tmp, R2_tmp, false);
3498
      break;
3499

3500
    default:
3501
      ShouldNotReachHere();
3502
  }
3503

3504
  {
3505
    // Check for volatile store
3506
    Label notVolatile;
3507
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3508
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::StoreStore | MacroAssembler::LoadStore), Rtemp);
3509
    __ bind(notVolatile);
3510
  }
3511

3512
}
3513

3514
void TemplateTable::fast_accessfield(TosState state) {
3515
  transition(atos, state);
3516

3517
  // do the JVMTI work here to avoid disturbing the register state below
3518
  if (__ can_post_field_access()) {
3519
    // Check to see if a field access watch has been set before we take
3520
    // the time to call into the VM.
3521
    Label done;
3522
    __ ldr_global_s32(R2, (address) JvmtiExport::get_field_access_count_addr());
3523
    __ cbz(R2, done);
3524
    // access constant pool cache entry
3525
    __ load_field_entry(R2, R1);
3526
    __ push_ptr(R0_tos);  // save object pointer before call_VM() clobbers it
3527
    __ verify_oop(R0_tos);
3528
    __ mov(R1, R0_tos);
3529
    // R1: object pointer copied above
3530
    // R2: cache entry pointer
3531
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), R1, R2);
3532
    __ pop_ptr(R0_tos);   // restore object pointer
3533

3534
    __ bind(done);
3535
  }
3536

3537
  const Register Robj    = R0_tos;
3538
  const Register Rcache  = R2_tmp;
3539
  const Register Rflags  = R2_tmp;
3540
  const Register Rindex  = R3_tmp;
3541
  const Register Roffset = R3_tmp;
3542

3543
  // access constant pool cache
3544
  __ load_field_entry(Rcache, Rindex);
3545
  // replace index with field offset from cache entry
3546
  __ ldr(Roffset, Address(Rcache, ResolvedFieldEntry::field_offset_offset()));
3547

3548
  // load flags to test volatile
3549
  __ ldrb(Rflags, Address(Rcache, ResolvedFieldEntry::flags_offset()));
3550

3551
  __ verify_oop(Robj);
3552
  __ null_check(Robj);
3553

3554
  Address addr = Address(Robj, Roffset);
3555
  // access field
3556
  switch (bytecode()) {
3557
    case Bytecodes::_fast_bgetfield:
3558
      __ access_load_at(T_BYTE, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3559
      break;
3560
    case Bytecodes::_fast_sgetfield:
3561
      __ access_load_at(T_SHORT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3562
      break;
3563
    case Bytecodes::_fast_cgetfield:
3564
      __ access_load_at(T_CHAR, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3565
      break;
3566
    case Bytecodes::_fast_igetfield:
3567
      __ access_load_at(T_INT, IN_HEAP, addr, R0_tos, noreg, noreg, noreg);
3568
      break;
3569
    case Bytecodes::_fast_lgetfield:
3570
      __ access_load_at(T_LONG, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3571
      break;
3572
    case Bytecodes::_fast_fgetfield:
3573
      __ access_load_at(T_FLOAT, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3574
      break;
3575
    case Bytecodes::_fast_dgetfield:
3576
      __ access_load_at(T_DOUBLE, IN_HEAP, addr, noreg, noreg, noreg, noreg);
3577
      break;
3578
    case Bytecodes::_fast_agetfield:
3579
      do_oop_load(_masm, R0_tos, addr);
3580
      __ verify_oop(R0_tos);
3581
      break;
3582
    default:
3583
      ShouldNotReachHere();
3584
  }
3585

3586
  {
3587
    // Check for volatile load
3588
    Label notVolatile;
3589
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3590
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3591
    __ bind(notVolatile);
3592
  }
3593
}
3594

3595

3596
void TemplateTable::fast_xaccess(TosState state) {
3597
  transition(vtos, state);
3598

3599
  const Register Robj = R1_tmp;
3600
  const Register Rcache = R2_tmp;
3601
  const Register Rindex = R3_tmp;
3602
  const Register Roffset = R3_tmp;
3603
  const Register Rflags = R4_tmp;
3604
  Label done;
3605

3606
  // get receiver
3607
  __ ldr(Robj, aaddress(0));
3608

3609
  // access constant pool cache
3610
  __ load_field_entry(Rcache, Rindex, 2);
3611
  __ ldr(Roffset, Address(Rcache, ResolvedFieldEntry::field_offset_offset()));
3612

3613
  // load flags to test volatile
3614
  __ ldrb(Rflags, Address(Rcache, ResolvedFieldEntry::flags_offset()));
3615

3616
  // make sure exception is reported in correct bcp range (getfield is next instruction)
3617
  __ add(Rbcp, Rbcp, 1);
3618
  __ null_check(Robj, Rtemp);
3619
  __ sub(Rbcp, Rbcp, 1);
3620

3621

3622
  if (state == itos) {
3623
    __ access_load_at(T_INT, IN_HEAP, Address(Robj, Roffset), R0_tos, noreg, noreg, noreg);
3624
  } else if (state == atos) {
3625
    do_oop_load(_masm, R0_tos, Address(Robj, Roffset));
3626
    __ verify_oop(R0_tos);
3627
  } else if (state == ftos) {
3628
#ifdef __SOFTFP__
3629
    __ ldr(R0_tos, Address(Robj, Roffset));
3630
#else
3631
    __ access_load_at(T_FLOAT, IN_HEAP, Address(Robj, Roffset), noreg /* ftos */, noreg, noreg, noreg);
3632
#endif // __SOFTFP__
3633
  } else {
3634
    ShouldNotReachHere();
3635
  }
3636

3637
  {
3638
    // Check for volatile load
3639
    Label notVolatile;
3640
    __ tbz(Rflags, ResolvedFieldEntry::is_volatile_shift, notVolatile);
3641
    __ membar(MacroAssembler::Membar_mask_bits(MacroAssembler::LoadLoad | MacroAssembler::LoadStore), Rtemp);
3642
    __ bind(notVolatile);
3643
  }
3644

3645
  __ bind(done);
3646
}
3647

3648

3649

3650
//----------------------------------------------------------------------------------------------------
3651
// Calls
3652

3653
void TemplateTable::prepare_invoke(Register Rcache, Register recv) {
3654

3655
  const Register ret_type = R1_tmp;
3656

3657
  const Bytecodes::Code code = bytecode();
3658
  const bool load_receiver = (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic);
3659

3660
  // save 'interpreter return address'
3661
  __ save_bcp();
3662

3663
  // Load TOS state for later
3664
  __ ldrb(ret_type, Address(Rcache, in_bytes(ResolvedMethodEntry::type_offset())));
3665

3666
  // load receiver if needed (after extra argument is pushed so parameter size is correct)
3667
  if (load_receiver) {
3668
    __ ldrh(recv, Address(Rcache, in_bytes(ResolvedMethodEntry::num_parameters_offset())));
3669
    __ add(recv, Rstack_top, AsmOperand(recv, lsl, Interpreter::logStackElementSize));
3670
    __ ldr(recv, Address(recv, -Interpreter::stackElementSize));
3671
    __ verify_oop(recv);
3672
  }
3673

3674
  // load return address
3675
  { const address table = (address) Interpreter::invoke_return_entry_table_for(code);
3676
    __ mov_slow(LR, table);
3677
    __ ldr(LR, Address::indexed_ptr(LR, ret_type));
3678
  }
3679
}
3680

3681

3682
void TemplateTable::invokevirtual_helper(Register index,
3683
                                         Register recv,
3684
                                         Register flags) {
3685

3686
  const Register recv_klass = R2_tmp;
3687

3688
  assert_different_registers(index, recv, flags, Rtemp);
3689
  assert_different_registers(index, recv_klass, R0_tmp, Rtemp);
3690

3691
  // Test for an invoke of a final method
3692
  Label notFinal;
3693
  __ tbz(flags, ResolvedMethodEntry::is_vfinal_shift, notFinal);
3694

3695
  assert(index == Rmethod, "Method* must be Rmethod, for interpreter calling convention");
3696

3697
  // do the call - the index is actually the method to call
3698

3699
  // It's final, need a null check here!
3700
  __ null_check(recv, Rtemp);
3701

3702
  // profile this call
3703
  __ profile_final_call(R0_tmp);
3704

3705
  __ jump_from_interpreted(Rmethod);
3706

3707
  __ bind(notFinal);
3708

3709
  // get receiver klass
3710
  __ load_klass(recv_klass, recv);
3711

3712
  // profile this call
3713
  __ profile_virtual_call(R0_tmp, recv_klass);
3714

3715
  // get target Method* & entry point
3716
  const ByteSize base = Klass::vtable_start_offset();
3717
  assert(vtableEntry::size() == 1, "adjust the scaling in the code below");
3718
  __ add(Rtemp, recv_klass, AsmOperand(index, lsl, LogHeapWordSize));
3719
  __ ldr(Rmethod, Address(Rtemp, base + vtableEntry::method_offset()));
3720
  __ jump_from_interpreted(Rmethod);
3721
}
3722

3723
void TemplateTable::invokevirtual(int byte_no) {
3724
  transition(vtos, vtos);
3725
  assert(byte_no == f2_byte, "use this argument");
3726

3727
  const Register Rrecv  = R2_tmp;
3728
  const Register Rflags = R3_tmp;
3729

3730
  load_resolved_method_entry_virtual(Rrecv,   // ResolvedMethodEntry*
3731
                                     Rmethod, // Method* or itable index
3732
                                     Rflags); // Flags
3733
  prepare_invoke(Rrecv, Rrecv);
3734

3735
  // Rmethod: index
3736
  // Rrecv:   receiver
3737
  // Rflags:  flags
3738
  // LR:      return address
3739

3740
  invokevirtual_helper(Rmethod, Rrecv, Rflags);
3741
}
3742

3743

3744
void TemplateTable::invokespecial(int byte_no) {
3745
  transition(vtos, vtos);
3746
  assert(byte_no == f1_byte, "use this argument");
3747

3748
  const Register Rrecv  = R2_tmp;
3749
  const Register Rflags = R3_tmp;
3750

3751
  load_resolved_method_entry_special_or_static(Rrecv,  // ResolvedMethodEntry*
3752
                                               Rmethod, // Method*
3753
                                               Rflags); // Flags
3754
  prepare_invoke(Rrecv, Rrecv);
3755
  __ verify_oop(Rrecv);
3756
  __ null_check(Rrecv, Rtemp);
3757
  // do the call
3758
  __ profile_call(Rrecv);
3759
  __ jump_from_interpreted(Rmethod);
3760
}
3761

3762

3763
void TemplateTable::invokestatic(int byte_no) {
3764
  transition(vtos, vtos);
3765
  assert(byte_no == f1_byte, "use this argument");
3766

3767
  const Register Rrecv  = R2_tmp;
3768
  const Register Rflags = R3_tmp;
3769

3770
  load_resolved_method_entry_special_or_static(Rrecv,  // ResolvedMethodEntry*
3771
                                               Rmethod, // Method*
3772
                                               Rflags); // Flags
3773
  prepare_invoke(Rrecv, Rrecv);
3774
  // do the call
3775
  __ profile_call(Rrecv);
3776
  __ jump_from_interpreted(Rmethod);
3777
}
3778

3779

3780
void TemplateTable::fast_invokevfinal(int byte_no) {
3781
  transition(vtos, vtos);
3782
  assert(byte_no == f2_byte, "use this argument");
3783
  __ stop("fast_invokevfinal is not used on ARM");
3784
}
3785

3786

3787
void TemplateTable::invokeinterface(int byte_no) {
3788
  transition(vtos, vtos);
3789
  assert(byte_no == f1_byte, "use this argument");
3790

3791
  const Register Ritable = R1_tmp;
3792
  const Register Rrecv   = R2_tmp;
3793
  const Register Rinterf = R5_tmp;
3794
  const Register Rindex  = R4_tmp;
3795
  const Register Rflags  = R3_tmp;
3796
  const Register Rklass  = R2_tmp; // Note! Same register with Rrecv
3797

3798
  load_resolved_method_entry_interface(Rrecv,   // ResolvedMethodEntry*
3799
                                       Rinterf, // Klass*
3800
                                       Rmethod, // Method* or itable/vtable index
3801
                                       Rflags); // Flags
3802
  prepare_invoke(Rrecv, Rrecv);
3803

3804
  // First check for Object case, then private interface method,
3805
  // then regular interface method.
3806

3807
  // Special case of invokeinterface called for virtual method of
3808
  // java.lang.Object.  See cpCache.cpp for details.
3809
  Label notObjectMethod;
3810
  __ tbz(Rflags, ResolvedMethodEntry::is_forced_virtual_shift, notObjectMethod);
3811
  invokevirtual_helper(Rmethod, Rrecv, Rflags);
3812
  __ bind(notObjectMethod);
3813

3814
  // Get receiver klass into Rklass - also a null check
3815
  __ load_klass(Rklass, Rrecv);
3816

3817
  // Check for private method invocation - indicated by vfinal
3818
  Label no_such_interface;
3819

3820
  Label notVFinal;
3821
  __ tbz(Rflags, ResolvedMethodEntry::is_vfinal_shift, notVFinal);
3822

3823
  Label subtype;
3824
  __ check_klass_subtype(Rklass, Rinterf, R1_tmp, R3_tmp, noreg, subtype);
3825
  // If we get here the typecheck failed
3826
  __ b(no_such_interface);
3827
  __ bind(subtype);
3828

3829
  // do the call
3830
  __ profile_final_call(R0_tmp);
3831
  __ jump_from_interpreted(Rmethod);
3832

3833
  __ bind(notVFinal);
3834

3835
  // Receiver subtype check against REFC.
3836
  __ lookup_interface_method(// inputs: rec. class, interface
3837
                             Rklass, Rinterf, noreg,
3838
                             // outputs:  scan temp. reg1, scan temp. reg2
3839
                             noreg, Ritable, Rtemp,
3840
                             no_such_interface);
3841

3842
  // profile this call
3843
  __ profile_virtual_call(R0_tmp, Rklass);
3844

3845
  // Get declaring interface class from method
3846
  __ ldr(Rtemp, Address(Rmethod, Method::const_offset()));
3847
  __ ldr(Rtemp, Address(Rtemp, ConstMethod::constants_offset()));
3848
  __ ldr(Rinterf, Address(Rtemp, ConstantPool::pool_holder_offset()));
3849

3850
  // Get itable index from method
3851
  __ ldr_s32(Rtemp, Address(Rmethod, Method::itable_index_offset()));
3852
  __ add(Rtemp, Rtemp, (-Method::itable_index_max)); // small negative constant is too large for an immediate on arm32
3853
  __ neg(Rindex, Rtemp);
3854

3855
  __ lookup_interface_method(// inputs: rec. class, interface
3856
                             Rklass, Rinterf, Rindex,
3857
                             // outputs:  scan temp. reg1, scan temp. reg2
3858
                             Rmethod, Ritable, Rtemp,
3859
                             no_such_interface);
3860

3861
  // Rmethod: Method* to call
3862

3863
  // Check for abstract method error
3864
  // Note: This should be done more efficiently via a throw_abstract_method_error
3865
  //       interpreter entry point and a conditional jump to it in case of a null
3866
  //       method.
3867
  { Label L;
3868
    __ cbnz(Rmethod, L);
3869
    // throw exception
3870
    // note: must restore interpreter registers to canonical
3871
    //       state for exception handling to work correctly!
3872
    __ restore_method();
3873
    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
3874
    // the call_VM checks for exception, so we should never return here.
3875
    __ should_not_reach_here();
3876
    __ bind(L);
3877
  }
3878

3879
  // do the call
3880
  __ jump_from_interpreted(Rmethod);
3881

3882
  // throw exception
3883
  __ bind(no_such_interface);
3884
  __ restore_method();
3885
  __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
3886
  // the call_VM checks for exception, so we should never return here.
3887
  __ should_not_reach_here();
3888
}
3889

3890
void TemplateTable::invokehandle(int byte_no) {
3891
  transition(vtos, vtos);
3892

3893
  const Register Rrecv  = R2_tmp;
3894
  const Register Rmtype = R4_tmp;
3895

3896
  load_resolved_method_entry_handle(R2_tmp,  // ResolvedMethodEntry*
3897
                                    Rmethod, // Method*
3898
                                    Rmtype,  // Resolved Reference
3899
                                    R3_tmp); // Flags
3900
  prepare_invoke(Rrecv, Rrecv);
3901
  __ null_check(Rrecv, Rtemp);
3902

3903
  // Rmtype:  MethodType object (from cpool->resolved_references[f1], if necessary)
3904
  // Rmethod: MH.invokeExact_MT method (from f2)
3905

3906
  // Note:  Rmtype is already pushed (if necessary) by prepare_invoke
3907

3908
  // do the call
3909
  __ profile_final_call(R3_tmp);  // FIXME: profile the LambdaForm also
3910
  __ jump_from_interpreted(Rmethod);
3911
}
3912

3913
void TemplateTable::invokedynamic(int byte_no) {
3914
  transition(vtos, vtos);
3915

3916
  const Register Rcallsite = R4_tmp;
3917
  const Register R5_method = R5_tmp;  // can't reuse Rmethod!
3918

3919
  load_invokedynamic_entry(R5_method);
3920

3921
  // Rcallsite: CallSite object (from cpool->resolved_references[f1])
3922
  // Rmethod:   MH.linkToCallSite method (from f2)
3923

3924
  // Note:  Rcallsite is already pushed by prepare_invoke
3925

3926
  if (ProfileInterpreter) {
3927
    __ profile_call(R2_tmp);
3928
  }
3929

3930
  // do the call
3931
  __ mov(Rmethod, R5_method);
3932
  __ jump_from_interpreted(Rmethod);
3933
}
3934

3935
//----------------------------------------------------------------------------------------------------
3936
// Allocation
3937

3938
void TemplateTable::_new() {
3939
  transition(vtos, atos);
3940

3941
  const Register Robj   = R0_tos;
3942
  const Register Rcpool = R1_tmp;
3943
  const Register Rindex = R2_tmp;
3944
  const Register Rtags  = R3_tmp;
3945
  const Register Rsize  = R3_tmp;
3946

3947
  Register Rklass = R4_tmp;
3948
  assert_different_registers(Rcpool, Rindex, Rtags, Rklass, Rtemp);
3949
  assert_different_registers(Rcpool, Rindex, Rklass, Rsize);
3950

3951
  Label slow_case;
3952
  Label done;
3953
  Label initialize_header;
3954

3955
  __ get_unsigned_2_byte_index_at_bcp(Rindex, 1);
3956
  __ get_cpool_and_tags(Rcpool, Rtags);
3957

3958
  // Make sure the class we're about to instantiate has been resolved.
3959
  // This is done before loading InstanceKlass to be consistent with the order
3960
  // how Constant Pool is updated (see ConstantPool::klass_at_put)
3961
  const int tags_offset = Array<u1>::base_offset_in_bytes();
3962
  __ add(Rtemp, Rtags, Rindex);
3963

3964
  __ ldrb(Rtemp, Address(Rtemp, tags_offset));
3965

3966
  // use Rklass as a scratch
3967
  volatile_barrier(MacroAssembler::LoadLoad, Rklass);
3968

3969
  // get InstanceKlass
3970
  __ cmp(Rtemp, JVM_CONSTANT_Class);
3971
  __ b(slow_case, ne);
3972
  __ load_resolved_klass_at_offset(Rcpool, Rindex, Rklass);
3973

3974
  // make sure klass is initialized
3975
  // make sure klass is fully initialized
3976
  __ ldrb(Rtemp, Address(Rklass, InstanceKlass::init_state_offset()));
3977
  __ cmp(Rtemp, InstanceKlass::fully_initialized);
3978
  __ b(slow_case, ne);
3979

3980
  // get instance_size in InstanceKlass (scaled to a count of bytes)
3981
  __ ldr_u32(Rsize, Address(Rklass, Klass::layout_helper_offset()));
3982

3983
  // test to see if it is malformed in some way
3984
  // Klass::_lh_instance_slow_path_bit is really a bit mask, not bit number
3985
  __ tbnz(Rsize, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3986

3987
  // Allocate the instance:
3988
  //  If TLAB is enabled:
3989
  //    Try to allocate in the TLAB.
3990
  //    If fails, go to the slow path.
3991
  //    Initialize the allocation.
3992
  //    Exit.
3993
  //
3994
  //  Go to slow path.
3995
  if (UseTLAB) {
3996
    const Register Rtlab_top = R1_tmp;
3997
    const Register Rtlab_end = R2_tmp;
3998
    assert_different_registers(Robj, Rsize, Rklass, Rtlab_top, Rtlab_end);
3999

4000
    __ tlab_allocate(Robj, Rtlab_top, Rtlab_end, Rsize, slow_case);
4001
    if (ZeroTLAB) {
4002
      // the fields have been already cleared
4003
      __ b(initialize_header);
4004
    }
4005

4006
    const Register Rzero0 = R1_tmp;
4007
    const Register Rzero1 = R2_tmp;
4008
    const Register Rzero_end = R5_tmp;
4009
    const Register Rzero_cur = Rtemp;
4010
    assert_different_registers(Robj, Rsize, Rklass, Rzero0, Rzero1, Rzero_cur, Rzero_end);
4011

4012
    // The object is initialized before the header.  If the object size is
4013
    // zero, go directly to the header initialization.
4014
    __ subs(Rsize, Rsize, sizeof(oopDesc));
4015
    __ add(Rzero_cur, Robj, sizeof(oopDesc));
4016
    __ b(initialize_header, eq);
4017

4018
#ifdef ASSERT
4019
    // make sure Rsize is a multiple of 8
4020
    Label L;
4021
    __ tst(Rsize, 0x07);
4022
    __ b(L, eq);
4023
    __ stop("object size is not multiple of 8 - adjust this code");
4024
    __ bind(L);
4025
#endif
4026

4027
    __ mov(Rzero0, 0);
4028
    __ mov(Rzero1, 0);
4029
    __ add(Rzero_end, Rzero_cur, Rsize);
4030

4031
    // initialize remaining object fields: Rsize was a multiple of 8
4032
    { Label loop;
4033
      // loop is unrolled 2 times
4034
      __ bind(loop);
4035
      // #1
4036
      __ stmia(Rzero_cur, RegisterSet(Rzero0) | RegisterSet(Rzero1), writeback);
4037
      __ cmp(Rzero_cur, Rzero_end);
4038
      // #2
4039
      __ stmia(Rzero_cur, RegisterSet(Rzero0) | RegisterSet(Rzero1), writeback, ne);
4040
      __ cmp(Rzero_cur, Rzero_end, ne);
4041
      __ b(loop, ne);
4042
    }
4043

4044
    // initialize object header only.
4045
    __ bind(initialize_header);
4046
    __ mov_slow(Rtemp, (intptr_t)markWord::prototype().value());
4047
    // mark
4048
    __ str(Rtemp, Address(Robj, oopDesc::mark_offset_in_bytes()));
4049

4050
    // klass
4051
    __ store_klass(Rklass, Robj); // blows Rklass:
4052
    Rklass = noreg;
4053

4054
    // Note: Disable DTrace runtime check for now to eliminate overhead on each allocation
4055
    if (DTraceAllocProbes) {
4056
      // Trigger dtrace event for fastpath
4057
      Label Lcontinue;
4058

4059
      __ ldrb_global(Rtemp, (address)&DTraceAllocProbes);
4060
      __ cbz(Rtemp, Lcontinue);
4061

4062
      __ push(atos);
4063
      __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), Robj);
4064
      __ pop(atos);
4065

4066
      __ bind(Lcontinue);
4067
    }
4068

4069
    __ b(done);
4070
  } else {
4071
    // jump over literals
4072
    __ b(slow_case);
4073
  }
4074

4075
  // slow case
4076
  __ bind(slow_case);
4077
  __ get_constant_pool(Rcpool);
4078
  __ get_unsigned_2_byte_index_at_bcp(Rindex, 1);
4079
  __ call_VM(Robj, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), Rcpool, Rindex);
4080

4081
  // continue
4082
  __ bind(done);
4083

4084
  // StoreStore barrier required after complete initialization
4085
  // (headers + content zeroing), before the object may escape.
4086
  __ membar(MacroAssembler::StoreStore, R1_tmp);
4087
}
4088

4089

4090
void TemplateTable::newarray() {
4091
  transition(itos, atos);
4092
  __ ldrb(R1, at_bcp(1));
4093
  __ mov(R2, R0_tos);
4094
  call_VM(R0_tos, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray), R1, R2);
4095
  // MacroAssembler::StoreStore useless (included in the runtime exit path)
4096
}
4097

4098

4099
void TemplateTable::anewarray() {
4100
  transition(itos, atos);
4101
  __ get_unsigned_2_byte_index_at_bcp(R2, 1);
4102
  __ get_constant_pool(R1);
4103
  __ mov(R3, R0_tos);
4104
  call_VM(R0_tos, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray), R1, R2, R3);
4105
  // MacroAssembler::StoreStore useless (included in the runtime exit path)
4106
}
4107

4108

4109
void TemplateTable::arraylength() {
4110
  transition(atos, itos);
4111
  __ ldr_s32(R0_tos, Address(R0_tos, arrayOopDesc::length_offset_in_bytes()));
4112
}
4113

4114

4115
void TemplateTable::checkcast() {
4116
  transition(atos, atos);
4117
  Label done, is_null, quicked, resolved, throw_exception;
4118

4119
  const Register Robj = R0_tos;
4120
  const Register Rcpool = R2_tmp;
4121
  const Register Rtags = R3_tmp;
4122
  const Register Rindex = R4_tmp;
4123
  const Register Rsuper = R3_tmp;
4124
  const Register Rsub   = R4_tmp;
4125
  const Register Rsubtype_check_tmp1 = R1_tmp;
4126
  const Register Rsubtype_check_tmp2 = LR_tmp;
4127

4128
  __ cbz(Robj, is_null);
4129

4130
  // Get cpool & tags index
4131
  __ get_cpool_and_tags(Rcpool, Rtags);
4132
  __ get_unsigned_2_byte_index_at_bcp(Rindex, 1);
4133

4134
  // See if bytecode has already been quicked
4135
  __ add(Rtemp, Rtags, Rindex);
4136
  __ ldrb(Rtemp, Address(Rtemp, Array<u1>::base_offset_in_bytes()));
4137

4138
  __ cmp(Rtemp, JVM_CONSTANT_Class);
4139

4140
  volatile_barrier(MacroAssembler::LoadLoad, Rtemp, true);
4141

4142
  __ b(quicked, eq);
4143

4144
  __ push(atos);
4145
  call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4146
  // vm_result_2 has metadata result
4147
  __ get_vm_result_2(Rsuper, Robj);
4148
  __ pop_ptr(Robj);
4149
  __ b(resolved);
4150

4151
  __ bind(throw_exception);
4152
  // Come here on failure of subtype check
4153
  __ profile_typecheck_failed(R1_tmp);
4154
  __ mov(R2_ClassCastException_obj, Robj);             // convention with generate_ClassCastException_handler()
4155
  __ b(Interpreter::_throw_ClassCastException_entry);
4156

4157
  // Get superklass in Rsuper and subklass in Rsub
4158
  __ bind(quicked);
4159
  __ load_resolved_klass_at_offset(Rcpool, Rindex, Rsuper);
4160

4161
  __ bind(resolved);
4162
  __ load_klass(Rsub, Robj);
4163

4164
  // Generate subtype check. Blows both tmps and Rtemp.
4165
  assert_different_registers(Robj, Rsub, Rsuper, Rsubtype_check_tmp1, Rsubtype_check_tmp2, Rtemp);
4166
  __ gen_subtype_check(Rsub, Rsuper, throw_exception, Rsubtype_check_tmp1, Rsubtype_check_tmp2);
4167

4168
  // Come here on success
4169

4170
  // Collect counts on whether this check-cast sees nulls a lot or not.
4171
  if (ProfileInterpreter) {
4172
    __ b(done);
4173
    __ bind(is_null);
4174
    __ profile_null_seen(R1_tmp);
4175
  } else {
4176
    __ bind(is_null);   // same as 'done'
4177
  }
4178
  __ bind(done);
4179
}
4180

4181

4182
void TemplateTable::instanceof() {
4183
  // result = 0: obj == nullptr or  obj is not an instanceof the specified klass
4184
  // result = 1: obj != nullptr and obj is     an instanceof the specified klass
4185

4186
  transition(atos, itos);
4187
  Label done, is_null, not_subtype, quicked, resolved;
4188

4189
  const Register Robj = R0_tos;
4190
  const Register Rcpool = R2_tmp;
4191
  const Register Rtags = R3_tmp;
4192
  const Register Rindex = R4_tmp;
4193
  const Register Rsuper = R3_tmp;
4194
  const Register Rsub   = R4_tmp;
4195
  const Register Rsubtype_check_tmp1 = R0_tmp;
4196
  const Register Rsubtype_check_tmp2 = R1_tmp;
4197

4198
  __ cbz(Robj, is_null);
4199

4200
  __ load_klass(Rsub, Robj);
4201

4202
  // Get cpool & tags index
4203
  __ get_cpool_and_tags(Rcpool, Rtags);
4204
  __ get_unsigned_2_byte_index_at_bcp(Rindex, 1);
4205

4206
  // See if bytecode has already been quicked
4207
  __ add(Rtemp, Rtags, Rindex);
4208
  __ ldrb(Rtemp, Address(Rtemp, Array<u1>::base_offset_in_bytes()));
4209
  __ cmp(Rtemp, JVM_CONSTANT_Class);
4210

4211
  volatile_barrier(MacroAssembler::LoadLoad, Rtemp, true);
4212

4213
  __ b(quicked, eq);
4214

4215
  __ push(atos);
4216
  call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
4217
  // vm_result_2 has metadata result
4218
  __ get_vm_result_2(Rsuper, Robj);
4219
  __ pop_ptr(Robj);
4220
  __ b(resolved);
4221

4222
  // Get superklass in Rsuper and subklass in Rsub
4223
  __ bind(quicked);
4224
  __ load_resolved_klass_at_offset(Rcpool, Rindex, Rsuper);
4225

4226
  __ bind(resolved);
4227
  __ load_klass(Rsub, Robj);
4228

4229
  // Generate subtype check. Blows both tmps and Rtemp.
4230
  __ gen_subtype_check(Rsub, Rsuper, not_subtype, Rsubtype_check_tmp1, Rsubtype_check_tmp2);
4231

4232
  // Come here on success
4233
  __ mov(R0_tos, 1);
4234
  __ b(done);
4235

4236
  __ bind(not_subtype);
4237
  // Come here on failure
4238
  __ profile_typecheck_failed(R1_tmp);
4239
  __ mov(R0_tos, 0);
4240

4241
  // Collect counts on whether this test sees nulls a lot or not.
4242
  if (ProfileInterpreter) {
4243
    __ b(done);
4244
    __ bind(is_null);
4245
    __ profile_null_seen(R1_tmp);
4246
  } else {
4247
    __ bind(is_null);   // same as 'done'
4248
  }
4249
  __ bind(done);
4250
}
4251

4252

4253
//----------------------------------------------------------------------------------------------------
4254
// Breakpoints
4255
void TemplateTable::_breakpoint() {
4256

4257
  // Note: We get here even if we are single stepping..
4258
  // jbug inists on setting breakpoints at every bytecode
4259
  // even if we are in single step mode.
4260

4261
  transition(vtos, vtos);
4262

4263
  // get the unpatched byte code
4264
  __ mov(R1, Rmethod);
4265
  __ mov(R2, Rbcp);
4266
  __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at), R1, R2);
4267
  __ mov(Rtmp_save0, R0);
4268

4269
  // post the breakpoint event
4270
  __ mov(R1, Rmethod);
4271
  __ mov(R2, Rbcp);
4272
  __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), R1, R2);
4273

4274
  // complete the execution of original bytecode
4275
  __ mov(R3_bytecode, Rtmp_save0);
4276
  __ dispatch_only_normal(vtos);
4277
}
4278

4279

4280
//----------------------------------------------------------------------------------------------------
4281
// Exceptions
4282

4283
void TemplateTable::athrow() {
4284
  transition(atos, vtos);
4285
  __ mov(Rexception_obj, R0_tos);
4286
  __ null_check(Rexception_obj, Rtemp);
4287
  __ b(Interpreter::throw_exception_entry());
4288
}
4289

4290

4291
//----------------------------------------------------------------------------------------------------
4292
// Synchronization
4293
//
4294
// Note: monitorenter & exit are symmetric routines; which is reflected
4295
//       in the assembly code structure as well
4296
//
4297
// Stack layout:
4298
//
4299
// [expressions  ] <--- Rstack_top        = expression stack top
4300
// ..
4301
// [expressions  ]
4302
// [monitor entry] <--- monitor block top = expression stack bot
4303
// ..
4304
// [monitor entry]
4305
// [frame data   ] <--- monitor block bot
4306
// ...
4307
// [saved FP     ] <--- FP
4308

4309

4310
void TemplateTable::monitorenter() {
4311
  transition(atos, vtos);
4312

4313
  const Register Robj = R0_tos;
4314
  const Register Rentry = R1_tmp;
4315

4316
  // check for null object
4317
  __ null_check(Robj, Rtemp);
4318

4319
  const int entry_size = (frame::interpreter_frame_monitor_size_in_bytes());
4320
  assert (entry_size % StackAlignmentInBytes == 0, "keep stack alignment");
4321
  Label allocate_monitor, allocated;
4322

4323
  // initialize entry pointer
4324
  __ mov(Rentry, 0);                             // points to free slot or null
4325

4326
  // find a free slot in the monitor block (result in Rentry)
4327
  { Label loop, exit;
4328
    const Register Rcur = R2_tmp;
4329
    const Register Rcur_obj = Rtemp;
4330
    const Register Rbottom = R3_tmp;
4331
    assert_different_registers(Robj, Rentry, Rcur, Rbottom, Rcur_obj);
4332

4333
    __ ldr(Rcur, Address(FP, frame::interpreter_frame_monitor_block_top_offset * wordSize));
4334
                                 // points to current entry, starting with top-most entry
4335
    __ sub(Rbottom, FP, -frame::interpreter_frame_monitor_block_bottom_offset * wordSize);
4336
                                 // points to word before bottom of monitor block
4337

4338
    __ cmp(Rcur, Rbottom);                       // check if there are no monitors
4339
    __ ldr(Rcur_obj, Address(Rcur, BasicObjectLock::obj_offset()), ne);
4340
                                                 // prefetch monitor's object for the first iteration
4341
    __ b(allocate_monitor, eq);                  // there are no monitors, skip searching
4342

4343
    __ bind(loop);
4344
    __ cmp(Rcur_obj, 0);                         // check if current entry is used
4345
    __ mov(Rentry, Rcur, eq);                    // if not used then remember entry
4346

4347
    __ cmp(Rcur_obj, Robj);                      // check if current entry is for same object
4348
    __ b(exit, eq);                              // if same object then stop searching
4349

4350
    __ add(Rcur, Rcur, entry_size);              // otherwise advance to next entry
4351

4352
    __ cmp(Rcur, Rbottom);                       // check if bottom reached
4353
    __ ldr(Rcur_obj, Address(Rcur, BasicObjectLock::obj_offset()), ne);
4354
                                                 // prefetch monitor's object for the next iteration
4355
    __ b(loop, ne);                              // if not at bottom then check this entry
4356
    __ bind(exit);
4357
  }
4358

4359
  __ cbnz(Rentry, allocated);                    // check if a slot has been found; if found, continue with that one
4360

4361
  __ bind(allocate_monitor);
4362

4363
  // allocate one if there's no free slot
4364
  { Label loop;
4365
    assert_different_registers(Robj, Rentry, R2_tmp, Rtemp);
4366

4367
    // 1. compute new pointers
4368

4369

4370
    __ ldr(Rentry, Address(FP, frame::interpreter_frame_monitor_block_top_offset * wordSize));
4371
                                                 // old monitor block top / expression stack bottom
4372

4373
    __ sub(Rstack_top, Rstack_top, entry_size);  // move expression stack top
4374
    __ check_stack_top_on_expansion();
4375

4376
    __ sub(Rentry, Rentry, entry_size);          // move expression stack bottom
4377

4378
    __ mov(R2_tmp, Rstack_top);                  // set start value for copy loop
4379

4380
    __ str(Rentry, Address(FP, frame::interpreter_frame_monitor_block_top_offset * wordSize));
4381
                                                 // set new monitor block top
4382

4383
    // 2. move expression stack contents
4384

4385
    __ cmp(R2_tmp, Rentry);                                 // check if expression stack is empty
4386
    __ ldr(Rtemp, Address(R2_tmp, entry_size), ne);         // load expression stack word from old location
4387
    __ b(allocated, eq);
4388

4389
    __ bind(loop);
4390
    __ str(Rtemp, Address(R2_tmp, wordSize, post_indexed)); // store expression stack word at new location
4391
                                                            // and advance to next word
4392
    __ cmp(R2_tmp, Rentry);                                 // check if bottom reached
4393
    __ ldr(Rtemp, Address(R2, entry_size), ne);             // load expression stack word from old location
4394
    __ b(loop, ne);                                         // if not at bottom then copy next word
4395
  }
4396

4397
  // call run-time routine
4398

4399
  // Rentry: points to monitor entry
4400
  __ bind(allocated);
4401

4402
  // Increment bcp to point to the next bytecode, so exception handling for async. exceptions work correctly.
4403
  // The object has already been popped from the stack, so the expression stack looks correct.
4404
  __ add(Rbcp, Rbcp, 1);
4405

4406
  __ str(Robj, Address(Rentry, BasicObjectLock::obj_offset()));     // store object
4407
  __ lock_object(Rentry);
4408

4409
  // check to make sure this monitor doesn't cause stack overflow after locking
4410
  __ save_bcp();  // in case of exception
4411
  __ arm_stack_overflow_check(0, Rtemp);
4412

4413
  // The bcp has already been incremented. Just need to dispatch to next instruction.
4414
  __ dispatch_next(vtos);
4415
}
4416

4417

4418
void TemplateTable::monitorexit() {
4419
  transition(atos, vtos);
4420

4421
  const Register Robj = R0_tos;
4422
  const Register Rcur = R1_tmp;
4423
  const Register Rbottom = R2_tmp;
4424
  const Register Rcur_obj = Rtemp;
4425
  const Register Rmonitor = R0;      // fixed in unlock_object()
4426

4427
  // check for null object
4428
  __ null_check(Robj, Rtemp);
4429

4430
  const int entry_size = (frame::interpreter_frame_monitor_size_in_bytes());
4431
  Label found, throw_exception;
4432

4433
  // find matching slot
4434
  { Label loop;
4435
    assert_different_registers(Robj, Rcur, Rbottom, Rcur_obj);
4436

4437
    __ ldr(Rcur, Address(FP, frame::interpreter_frame_monitor_block_top_offset * wordSize));
4438
                                 // points to current entry, starting with top-most entry
4439
    __ sub(Rbottom, FP, -frame::interpreter_frame_monitor_block_bottom_offset * wordSize);
4440
                                 // points to word before bottom of monitor block
4441

4442
    __ cmp(Rcur, Rbottom);                       // check if bottom reached
4443
    __ ldr(Rcur_obj, Address(Rcur, BasicObjectLock::obj_offset()), ne);
4444
                                                 // prefetch monitor's object for the first iteration
4445
    __ b(throw_exception, eq);                   // throw exception if there are now monitors
4446

4447
    __ bind(loop);
4448
    // check if current entry is for same object
4449
    __ cmp(Rcur_obj, Robj);
4450
    __ b(found, eq);                             // if same object then stop searching
4451
    __ add(Rcur, Rcur, entry_size);              // otherwise advance to next entry
4452
    __ cmp(Rcur, Rbottom);                       // check if bottom reached
4453
    __ ldr(Rcur_obj, Address(Rcur, BasicObjectLock::obj_offset()), ne);
4454
    __ b (loop, ne);                             // if not at bottom then check this entry
4455
  }
4456

4457
  // error handling. Unlocking was not block-structured
4458
  __ bind(throw_exception);
4459
  __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));
4460
  __ should_not_reach_here();
4461

4462
  // call run-time routine
4463
  // Rcur: points to monitor entry
4464
  __ bind(found);
4465
  __ push_ptr(Robj);                             // make sure object is on stack (contract with oopMaps)
4466
  __ mov(Rmonitor, Rcur);
4467
  __ unlock_object(Rmonitor);
4468
  __ pop_ptr(Robj);                              // discard object
4469
}
4470

4471

4472
//----------------------------------------------------------------------------------------------------
4473
// Wide instructions
4474

4475
void TemplateTable::wide() {
4476
  transition(vtos, vtos);
4477
  __ ldrb(R3_bytecode, at_bcp(1));
4478

4479
  InlinedAddress Ltable((address)Interpreter::_wentry_point);
4480
  __ ldr_literal(Rtemp, Ltable);
4481
  __ indirect_jump(Address::indexed_ptr(Rtemp, R3_bytecode), Rtemp);
4482

4483
  __ nop(); // to avoid filling CPU pipeline with invalid instructions
4484
  __ nop();
4485
  __ bind_literal(Ltable);
4486
}
4487

4488

4489
//----------------------------------------------------------------------------------------------------
4490
// Multi arrays
4491

4492
void TemplateTable::multianewarray() {
4493
  transition(vtos, atos);
4494
  __ ldrb(Rtmp_save0, at_bcp(3));   // get number of dimensions
4495

4496
  // last dim is on top of stack; we want address of first one:
4497
  // first_addr = last_addr + ndims * stackElementSize - 1*wordsize
4498
  // the latter wordSize to point to the beginning of the array.
4499
  __ add(Rtemp, Rstack_top, AsmOperand(Rtmp_save0, lsl, Interpreter::logStackElementSize));
4500
  __ sub(R1, Rtemp, wordSize);
4501

4502
  call_VM(R0, CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), R1);
4503
  __ add(Rstack_top, Rstack_top, AsmOperand(Rtmp_save0, lsl, Interpreter::logStackElementSize));
4504
  // MacroAssembler::StoreStore useless (included in the runtime exit path)
4505
}
4506

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.