20
#include "qemu/osdep.h"
22
#include "tcg/helper-info.h"
23
#include "tcg/tcg-ldst.h"
31
#if defined(CONFIG_DEBUG_TCG)
32
# define tci_assert(cond) assert(cond)
34
# define tci_assert(cond) ((void)(cond))
37
__thread uintptr_t tci_tb_ptr;
39
static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
40
uint32_t low_index, uint64_t value)
42
regs[low_index] = (uint32_t)value;
43
regs[high_index] = value >> 32;
47
static uint64_t tci_uint64(uint32_t high, uint32_t low)
49
return ((uint64_t)high << 32) + low;
68
static void tci_args_l(uint32_t insn, const void *tb_ptr, void **l0)
70
int diff = sextract32(insn, 12, 20);
71
*l0 = diff ? (void *)tb_ptr + diff : NULL;
74
static void tci_args_r(uint32_t insn, TCGReg *r0)
76
*r0 = extract32(insn, 8, 4);
79
static void tci_args_nl(uint32_t insn, const void *tb_ptr,
80
uint8_t *n0, void **l1)
82
*n0 = extract32(insn, 8, 4);
83
*l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
86
static void tci_args_rl(uint32_t insn, const void *tb_ptr,
87
TCGReg *r0, void **l1)
89
*r0 = extract32(insn, 8, 4);
90
*l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
93
static void tci_args_rr(uint32_t insn, TCGReg *r0, TCGReg *r1)
95
*r0 = extract32(insn, 8, 4);
96
*r1 = extract32(insn, 12, 4);
99
static void tci_args_ri(uint32_t insn, TCGReg *r0, tcg_target_ulong *i1)
101
*r0 = extract32(insn, 8, 4);
102
*i1 = sextract32(insn, 12, 20);
105
static void tci_args_rrm(uint32_t insn, TCGReg *r0,
106
TCGReg *r1, MemOpIdx *m2)
108
*r0 = extract32(insn, 8, 4);
109
*r1 = extract32(insn, 12, 4);
110
*m2 = extract32(insn, 16, 16);
113
static void tci_args_rrr(uint32_t insn, TCGReg *r0, TCGReg *r1, TCGReg *r2)
115
*r0 = extract32(insn, 8, 4);
116
*r1 = extract32(insn, 12, 4);
117
*r2 = extract32(insn, 16, 4);
120
static void tci_args_rrs(uint32_t insn, TCGReg *r0, TCGReg *r1, int32_t *i2)
122
*r0 = extract32(insn, 8, 4);
123
*r1 = extract32(insn, 12, 4);
124
*i2 = sextract32(insn, 16, 16);
127
static void tci_args_rrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
128
uint8_t *i2, uint8_t *i3)
130
*r0 = extract32(insn, 8, 4);
131
*r1 = extract32(insn, 12, 4);
132
*i2 = extract32(insn, 16, 6);
133
*i3 = extract32(insn, 22, 6);
136
static void tci_args_rrrc(uint32_t insn,
137
TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
139
*r0 = extract32(insn, 8, 4);
140
*r1 = extract32(insn, 12, 4);
141
*r2 = extract32(insn, 16, 4);
142
*c3 = extract32(insn, 20, 4);
145
static void tci_args_rrrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
146
TCGReg *r2, uint8_t *i3, uint8_t *i4)
148
*r0 = extract32(insn, 8, 4);
149
*r1 = extract32(insn, 12, 4);
150
*r2 = extract32(insn, 16, 4);
151
*i3 = extract32(insn, 20, 6);
152
*i4 = extract32(insn, 26, 6);
155
static void tci_args_rrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
156
TCGReg *r2, TCGReg *r3, TCGReg *r4)
158
*r0 = extract32(insn, 8, 4);
159
*r1 = extract32(insn, 12, 4);
160
*r2 = extract32(insn, 16, 4);
161
*r3 = extract32(insn, 20, 4);
162
*r4 = extract32(insn, 24, 4);
165
static void tci_args_rrrr(uint32_t insn,
166
TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
168
*r0 = extract32(insn, 8, 4);
169
*r1 = extract32(insn, 12, 4);
170
*r2 = extract32(insn, 16, 4);
171
*r3 = extract32(insn, 20, 4);
174
static void tci_args_rrrrrc(uint32_t insn, TCGReg *r0, TCGReg *r1,
175
TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
177
*r0 = extract32(insn, 8, 4);
178
*r1 = extract32(insn, 12, 4);
179
*r2 = extract32(insn, 16, 4);
180
*r3 = extract32(insn, 20, 4);
181
*r4 = extract32(insn, 24, 4);
182
*c5 = extract32(insn, 28, 4);
185
static void tci_args_rrrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
186
TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
188
*r0 = extract32(insn, 8, 4);
189
*r1 = extract32(insn, 12, 4);
190
*r2 = extract32(insn, 16, 4);
191
*r3 = extract32(insn, 20, 4);
192
*r4 = extract32(insn, 24, 4);
193
*r5 = extract32(insn, 28, 4);
196
static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
233
result = (u0 & u1) == 0;
236
result = (u0 & u1) != 0;
239
g_assert_not_reached();
244
static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
281
result = (u0 & u1) == 0;
284
result = (u0 & u1) != 0;
287
g_assert_not_reached();
292
static uint64_t tci_qemu_ld(CPUArchState *env, uint64_t taddr,
293
MemOpIdx oi, const void *tb_ptr)
295
MemOp mop = get_memop(oi);
296
uintptr_t ra = (uintptr_t)tb_ptr;
298
switch (mop & MO_SSIZE) {
300
return helper_ldub_mmu(env, taddr, oi, ra);
302
return helper_ldsb_mmu(env, taddr, oi, ra);
304
return helper_lduw_mmu(env, taddr, oi, ra);
306
return helper_ldsw_mmu(env, taddr, oi, ra);
308
return helper_ldul_mmu(env, taddr, oi, ra);
310
return helper_ldsl_mmu(env, taddr, oi, ra);
312
return helper_ldq_mmu(env, taddr, oi, ra);
314
g_assert_not_reached();
318
static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val,
319
MemOpIdx oi, const void *tb_ptr)
321
MemOp mop = get_memop(oi);
322
uintptr_t ra = (uintptr_t)tb_ptr;
324
switch (mop & MO_SIZE) {
326
helper_stb_mmu(env, taddr, val, oi, ra);
329
helper_stw_mmu(env, taddr, val, oi, ra);
332
helper_stl_mmu(env, taddr, val, oi, ra);
335
helper_stq_mmu(env, taddr, val, oi, ra);
338
g_assert_not_reached();
342
#if TCG_TARGET_REG_BITS == 64
343
# define CASE_32_64(x) \
344
case glue(glue(INDEX_op_, x), _i64): \
345
case glue(glue(INDEX_op_, x), _i32):
347
case glue(glue(INDEX_op_, x), _i64):
349
# define CASE_32_64(x) \
350
case glue(glue(INDEX_op_, x), _i32):
360
uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
361
const void *v_tb_ptr)
363
const uint32_t *tb_ptr = v_tb_ptr;
364
tcg_target_ulong regs[TCG_TARGET_NB_REGS];
365
uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE)
368
regs[TCG_AREG0] = (tcg_target_ulong)env;
369
regs[TCG_REG_CALL_STACK] = (uintptr_t)stack;
375
TCGReg r0, r1, r2, r3, r4, r5;
380
uint64_t tmp64, taddr;
387
opc = extract32(insn, 0, 8);
392
void *call_slots[MAX_CALL_IARGS];
397
tci_args_nl(insn, tb_ptr, &len, &ptr);
398
func = ((void **)ptr)[0];
399
cif = ((void **)ptr)[1];
402
for (i = s = 0; i < n; ++i) {
403
ffi_type *t = cif->arg_types[i];
404
call_slots[i] = &stack[s];
405
s += DIV_ROUND_UP(t->size, 8);
409
tci_tb_ptr = (uintptr_t)tb_ptr;
410
ffi_call(cif, func, stack, call_slots);
422
if (sizeof(ffi_arg) == 8) {
423
regs[TCG_REG_R0] = (uint32_t)stack[0];
425
regs[TCG_REG_R0] = *(uint32_t *)stack;
433
memcpy(®s[TCG_REG_R0], stack, 8);
436
memcpy(®s[TCG_REG_R0], stack, 16);
439
g_assert_not_reached();
444
tci_args_l(insn, tb_ptr, &ptr);
447
case INDEX_op_setcond_i32:
448
tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
449
regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
451
case INDEX_op_movcond_i32:
452
tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
453
tmp32 = tci_compare32(regs[r1], regs[r2], condition);
454
regs[r0] = regs[tmp32 ? r3 : r4];
456
#if TCG_TARGET_REG_BITS == 32
457
case INDEX_op_setcond2_i32:
458
tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
459
T1 = tci_uint64(regs[r2], regs[r1]);
460
T2 = tci_uint64(regs[r4], regs[r3]);
461
regs[r0] = tci_compare64(T1, T2, condition);
463
#elif TCG_TARGET_REG_BITS == 64
464
case INDEX_op_setcond_i64:
465
tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
466
regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
468
case INDEX_op_movcond_i64:
469
tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
470
tmp32 = tci_compare64(regs[r1], regs[r2], condition);
471
regs[r0] = regs[tmp32 ? r3 : r4];
475
tci_args_rr(insn, &r0, &r1);
478
case INDEX_op_tci_movi:
479
tci_args_ri(insn, &r0, &t1);
482
case INDEX_op_tci_movl:
483
tci_args_rl(insn, tb_ptr, &r0, &ptr);
484
regs[r0] = *(tcg_target_ulong *)ptr;
490
tci_args_rrs(insn, &r0, &r1, &ofs);
491
ptr = (void *)(regs[r1] + ofs);
492
regs[r0] = *(uint8_t *)ptr;
495
tci_args_rrs(insn, &r0, &r1, &ofs);
496
ptr = (void *)(regs[r1] + ofs);
497
regs[r0] = *(int8_t *)ptr;
500
tci_args_rrs(insn, &r0, &r1, &ofs);
501
ptr = (void *)(regs[r1] + ofs);
502
regs[r0] = *(uint16_t *)ptr;
505
tci_args_rrs(insn, &r0, &r1, &ofs);
506
ptr = (void *)(regs[r1] + ofs);
507
regs[r0] = *(int16_t *)ptr;
509
case INDEX_op_ld_i32:
511
tci_args_rrs(insn, &r0, &r1, &ofs);
512
ptr = (void *)(regs[r1] + ofs);
513
regs[r0] = *(uint32_t *)ptr;
516
tci_args_rrs(insn, &r0, &r1, &ofs);
517
ptr = (void *)(regs[r1] + ofs);
518
*(uint8_t *)ptr = regs[r0];
521
tci_args_rrs(insn, &r0, &r1, &ofs);
522
ptr = (void *)(regs[r1] + ofs);
523
*(uint16_t *)ptr = regs[r0];
525
case INDEX_op_st_i32:
527
tci_args_rrs(insn, &r0, &r1, &ofs);
528
ptr = (void *)(regs[r1] + ofs);
529
*(uint32_t *)ptr = regs[r0];
535
tci_args_rrr(insn, &r0, &r1, &r2);
536
regs[r0] = regs[r1] + regs[r2];
539
tci_args_rrr(insn, &r0, &r1, &r2);
540
regs[r0] = regs[r1] - regs[r2];
543
tci_args_rrr(insn, &r0, &r1, &r2);
544
regs[r0] = regs[r1] * regs[r2];
547
tci_args_rrr(insn, &r0, &r1, &r2);
548
regs[r0] = regs[r1] & regs[r2];
551
tci_args_rrr(insn, &r0, &r1, &r2);
552
regs[r0] = regs[r1] | regs[r2];
555
tci_args_rrr(insn, &r0, &r1, &r2);
556
regs[r0] = regs[r1] ^ regs[r2];
558
#if TCG_TARGET_HAS_andc_i32 || TCG_TARGET_HAS_andc_i64
560
tci_args_rrr(insn, &r0, &r1, &r2);
561
regs[r0] = regs[r1] & ~regs[r2];
564
#if TCG_TARGET_HAS_orc_i32 || TCG_TARGET_HAS_orc_i64
566
tci_args_rrr(insn, &r0, &r1, &r2);
567
regs[r0] = regs[r1] | ~regs[r2];
570
#if TCG_TARGET_HAS_eqv_i32 || TCG_TARGET_HAS_eqv_i64
572
tci_args_rrr(insn, &r0, &r1, &r2);
573
regs[r0] = ~(regs[r1] ^ regs[r2]);
576
#if TCG_TARGET_HAS_nand_i32 || TCG_TARGET_HAS_nand_i64
578
tci_args_rrr(insn, &r0, &r1, &r2);
579
regs[r0] = ~(regs[r1] & regs[r2]);
582
#if TCG_TARGET_HAS_nor_i32 || TCG_TARGET_HAS_nor_i64
584
tci_args_rrr(insn, &r0, &r1, &r2);
585
regs[r0] = ~(regs[r1] | regs[r2]);
591
case INDEX_op_div_i32:
592
tci_args_rrr(insn, &r0, &r1, &r2);
593
regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
595
case INDEX_op_divu_i32:
596
tci_args_rrr(insn, &r0, &r1, &r2);
597
regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
599
case INDEX_op_rem_i32:
600
tci_args_rrr(insn, &r0, &r1, &r2);
601
regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
603
case INDEX_op_remu_i32:
604
tci_args_rrr(insn, &r0, &r1, &r2);
605
regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
607
#if TCG_TARGET_HAS_clz_i32
608
case INDEX_op_clz_i32:
609
tci_args_rrr(insn, &r0, &r1, &r2);
611
regs[r0] = tmp32 ? clz32(tmp32) : regs[r2];
614
#if TCG_TARGET_HAS_ctz_i32
615
case INDEX_op_ctz_i32:
616
tci_args_rrr(insn, &r0, &r1, &r2);
618
regs[r0] = tmp32 ? ctz32(tmp32) : regs[r2];
621
#if TCG_TARGET_HAS_ctpop_i32
622
case INDEX_op_ctpop_i32:
623
tci_args_rr(insn, &r0, &r1);
624
regs[r0] = ctpop32(regs[r1]);
630
case INDEX_op_shl_i32:
631
tci_args_rrr(insn, &r0, &r1, &r2);
632
regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
634
case INDEX_op_shr_i32:
635
tci_args_rrr(insn, &r0, &r1, &r2);
636
regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
638
case INDEX_op_sar_i32:
639
tci_args_rrr(insn, &r0, &r1, &r2);
640
regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
642
#if TCG_TARGET_HAS_rot_i32
643
case INDEX_op_rotl_i32:
644
tci_args_rrr(insn, &r0, &r1, &r2);
645
regs[r0] = rol32(regs[r1], regs[r2] & 31);
647
case INDEX_op_rotr_i32:
648
tci_args_rrr(insn, &r0, &r1, &r2);
649
regs[r0] = ror32(regs[r1], regs[r2] & 31);
652
#if TCG_TARGET_HAS_deposit_i32
653
case INDEX_op_deposit_i32:
654
tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
655
regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
658
#if TCG_TARGET_HAS_extract_i32
659
case INDEX_op_extract_i32:
660
tci_args_rrbb(insn, &r0, &r1, &pos, &len);
661
regs[r0] = extract32(regs[r1], pos, len);
664
#if TCG_TARGET_HAS_sextract_i32
665
case INDEX_op_sextract_i32:
666
tci_args_rrbb(insn, &r0, &r1, &pos, &len);
667
regs[r0] = sextract32(regs[r1], pos, len);
670
case INDEX_op_brcond_i32:
671
tci_args_rl(insn, tb_ptr, &r0, &ptr);
672
if ((uint32_t)regs[r0]) {
676
#if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_add2_i32
677
case INDEX_op_add2_i32:
678
tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
679
T1 = tci_uint64(regs[r3], regs[r2]);
680
T2 = tci_uint64(regs[r5], regs[r4]);
681
tci_write_reg64(regs, r1, r0, T1 + T2);
684
#if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_sub2_i32
685
case INDEX_op_sub2_i32:
686
tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
687
T1 = tci_uint64(regs[r3], regs[r2]);
688
T2 = tci_uint64(regs[r5], regs[r4]);
689
tci_write_reg64(regs, r1, r0, T1 - T2);
692
#if TCG_TARGET_HAS_mulu2_i32
693
case INDEX_op_mulu2_i32:
694
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
695
tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3];
696
tci_write_reg64(regs, r1, r0, tmp64);
699
#if TCG_TARGET_HAS_muls2_i32
700
case INDEX_op_muls2_i32:
701
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
702
tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
703
tci_write_reg64(regs, r1, r0, tmp64);
706
#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
708
tci_args_rr(insn, &r0, &r1);
709
regs[r0] = (int8_t)regs[r1];
712
#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 || \
713
TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
715
tci_args_rr(insn, &r0, &r1);
716
regs[r0] = (int16_t)regs[r1];
719
#if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
721
tci_args_rr(insn, &r0, &r1);
722
regs[r0] = (uint8_t)regs[r1];
725
#if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
727
tci_args_rr(insn, &r0, &r1);
728
regs[r0] = (uint16_t)regs[r1];
731
#if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
733
tci_args_rr(insn, &r0, &r1);
734
regs[r0] = bswap16(regs[r1]);
737
#if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
739
tci_args_rr(insn, &r0, &r1);
740
regs[r0] = bswap32(regs[r1]);
743
#if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
745
tci_args_rr(insn, &r0, &r1);
746
regs[r0] = ~regs[r1];
750
tci_args_rr(insn, &r0, &r1);
751
regs[r0] = -regs[r1];
753
#if TCG_TARGET_REG_BITS == 64
756
case INDEX_op_ld32s_i64:
757
tci_args_rrs(insn, &r0, &r1, &ofs);
758
ptr = (void *)(regs[r1] + ofs);
759
regs[r0] = *(int32_t *)ptr;
761
case INDEX_op_ld_i64:
762
tci_args_rrs(insn, &r0, &r1, &ofs);
763
ptr = (void *)(regs[r1] + ofs);
764
regs[r0] = *(uint64_t *)ptr;
766
case INDEX_op_st_i64:
767
tci_args_rrs(insn, &r0, &r1, &ofs);
768
ptr = (void *)(regs[r1] + ofs);
769
*(uint64_t *)ptr = regs[r0];
774
case INDEX_op_div_i64:
775
tci_args_rrr(insn, &r0, &r1, &r2);
776
regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
778
case INDEX_op_divu_i64:
779
tci_args_rrr(insn, &r0, &r1, &r2);
780
regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
782
case INDEX_op_rem_i64:
783
tci_args_rrr(insn, &r0, &r1, &r2);
784
regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
786
case INDEX_op_remu_i64:
787
tci_args_rrr(insn, &r0, &r1, &r2);
788
regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
790
#if TCG_TARGET_HAS_clz_i64
791
case INDEX_op_clz_i64:
792
tci_args_rrr(insn, &r0, &r1, &r2);
793
regs[r0] = regs[r1] ? clz64(regs[r1]) : regs[r2];
796
#if TCG_TARGET_HAS_ctz_i64
797
case INDEX_op_ctz_i64:
798
tci_args_rrr(insn, &r0, &r1, &r2);
799
regs[r0] = regs[r1] ? ctz64(regs[r1]) : regs[r2];
802
#if TCG_TARGET_HAS_ctpop_i64
803
case INDEX_op_ctpop_i64:
804
tci_args_rr(insn, &r0, &r1);
805
regs[r0] = ctpop64(regs[r1]);
808
#if TCG_TARGET_HAS_mulu2_i64
809
case INDEX_op_mulu2_i64:
810
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
811
mulu64(®s[r0], ®s[r1], regs[r2], regs[r3]);
814
#if TCG_TARGET_HAS_muls2_i64
815
case INDEX_op_muls2_i64:
816
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
817
muls64(®s[r0], ®s[r1], regs[r2], regs[r3]);
820
#if TCG_TARGET_HAS_add2_i64
821
case INDEX_op_add2_i64:
822
tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
823
T1 = regs[r2] + regs[r4];
824
T2 = regs[r3] + regs[r5] + (T1 < regs[r2]);
829
#if TCG_TARGET_HAS_add2_i64
830
case INDEX_op_sub2_i64:
831
tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
832
T1 = regs[r2] - regs[r4];
833
T2 = regs[r3] - regs[r5] - (regs[r2] < regs[r4]);
841
case INDEX_op_shl_i64:
842
tci_args_rrr(insn, &r0, &r1, &r2);
843
regs[r0] = regs[r1] << (regs[r2] & 63);
845
case INDEX_op_shr_i64:
846
tci_args_rrr(insn, &r0, &r1, &r2);
847
regs[r0] = regs[r1] >> (regs[r2] & 63);
849
case INDEX_op_sar_i64:
850
tci_args_rrr(insn, &r0, &r1, &r2);
851
regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
853
#if TCG_TARGET_HAS_rot_i64
854
case INDEX_op_rotl_i64:
855
tci_args_rrr(insn, &r0, &r1, &r2);
856
regs[r0] = rol64(regs[r1], regs[r2] & 63);
858
case INDEX_op_rotr_i64:
859
tci_args_rrr(insn, &r0, &r1, &r2);
860
regs[r0] = ror64(regs[r1], regs[r2] & 63);
863
#if TCG_TARGET_HAS_deposit_i64
864
case INDEX_op_deposit_i64:
865
tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
866
regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
869
#if TCG_TARGET_HAS_extract_i64
870
case INDEX_op_extract_i64:
871
tci_args_rrbb(insn, &r0, &r1, &pos, &len);
872
regs[r0] = extract64(regs[r1], pos, len);
875
#if TCG_TARGET_HAS_sextract_i64
876
case INDEX_op_sextract_i64:
877
tci_args_rrbb(insn, &r0, &r1, &pos, &len);
878
regs[r0] = sextract64(regs[r1], pos, len);
881
case INDEX_op_brcond_i64:
882
tci_args_rl(insn, tb_ptr, &r0, &ptr);
887
case INDEX_op_ext32s_i64:
888
case INDEX_op_ext_i32_i64:
889
tci_args_rr(insn, &r0, &r1);
890
regs[r0] = (int32_t)regs[r1];
892
case INDEX_op_ext32u_i64:
893
case INDEX_op_extu_i32_i64:
894
tci_args_rr(insn, &r0, &r1);
895
regs[r0] = (uint32_t)regs[r1];
897
#if TCG_TARGET_HAS_bswap64_i64
898
case INDEX_op_bswap64_i64:
899
tci_args_rr(insn, &r0, &r1);
900
regs[r0] = bswap64(regs[r1]);
907
case INDEX_op_exit_tb:
908
tci_args_l(insn, tb_ptr, &ptr);
909
return (uintptr_t)ptr;
911
case INDEX_op_goto_tb:
912
tci_args_l(insn, tb_ptr, &ptr);
913
tb_ptr = *(void **)ptr;
916
case INDEX_op_goto_ptr:
917
tci_args_r(insn, &r0);
918
ptr = (void *)regs[r0];
925
case INDEX_op_qemu_ld_a32_i32:
926
tci_args_rrm(insn, &r0, &r1, &oi);
927
taddr = (uint32_t)regs[r1];
929
case INDEX_op_qemu_ld_a64_i32:
930
if (TCG_TARGET_REG_BITS == 64) {
931
tci_args_rrm(insn, &r0, &r1, &oi);
934
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
935
taddr = tci_uint64(regs[r2], regs[r1]);
939
regs[r0] = tci_qemu_ld(env, taddr, oi, tb_ptr);
942
case INDEX_op_qemu_ld_a32_i64:
943
if (TCG_TARGET_REG_BITS == 64) {
944
tci_args_rrm(insn, &r0, &r1, &oi);
945
taddr = (uint32_t)regs[r1];
947
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
948
taddr = (uint32_t)regs[r2];
952
case INDEX_op_qemu_ld_a64_i64:
953
if (TCG_TARGET_REG_BITS == 64) {
954
tci_args_rrm(insn, &r0, &r1, &oi);
957
tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
958
taddr = tci_uint64(regs[r3], regs[r2]);
962
tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr);
963
if (TCG_TARGET_REG_BITS == 32) {
964
tci_write_reg64(regs, r1, r0, tmp64);
970
case INDEX_op_qemu_st_a32_i32:
971
tci_args_rrm(insn, &r0, &r1, &oi);
972
taddr = (uint32_t)regs[r1];
974
case INDEX_op_qemu_st_a64_i32:
975
if (TCG_TARGET_REG_BITS == 64) {
976
tci_args_rrm(insn, &r0, &r1, &oi);
979
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
980
taddr = tci_uint64(regs[r2], regs[r1]);
984
tci_qemu_st(env, taddr, regs[r0], oi, tb_ptr);
987
case INDEX_op_qemu_st_a32_i64:
988
if (TCG_TARGET_REG_BITS == 64) {
989
tci_args_rrm(insn, &r0, &r1, &oi);
991
taddr = (uint32_t)regs[r1];
993
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
994
tmp64 = tci_uint64(regs[r1], regs[r0]);
995
taddr = (uint32_t)regs[r2];
999
case INDEX_op_qemu_st_a64_i64:
1000
if (TCG_TARGET_REG_BITS == 64) {
1001
tci_args_rrm(insn, &r0, &r1, &oi);
1005
tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1006
tmp64 = tci_uint64(regs[r1], regs[r0]);
1007
taddr = tci_uint64(regs[r3], regs[r2]);
1011
tci_qemu_st(env, taddr, tmp64, oi, tb_ptr);
1019
g_assert_not_reached();
1028
static const char *str_r(TCGReg r)
1030
static const char regs[TCG_TARGET_NB_REGS][4] = {
1031
"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
1032
"r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1035
QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
1036
QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
1038
assert((unsigned)r < TCG_TARGET_NB_REGS);
1042
static const char *str_c(TCGCond c)
1044
static const char cond[16][8] = {
1045
[TCG_COND_NEVER] = "never",
1046
[TCG_COND_ALWAYS] = "always",
1047
[TCG_COND_EQ] = "eq",
1048
[TCG_COND_NE] = "ne",
1049
[TCG_COND_LT] = "lt",
1050
[TCG_COND_GE] = "ge",
1051
[TCG_COND_LE] = "le",
1052
[TCG_COND_GT] = "gt",
1053
[TCG_COND_LTU] = "ltu",
1054
[TCG_COND_GEU] = "geu",
1055
[TCG_COND_LEU] = "leu",
1056
[TCG_COND_GTU] = "gtu",
1057
[TCG_COND_TSTEQ] = "tsteq",
1058
[TCG_COND_TSTNE] = "tstne",
1061
assert((unsigned)c < ARRAY_SIZE(cond));
1062
assert(cond[c][0] != 0);
1067
int print_insn_tci(bfd_vma addr, disassemble_info *info)
1069
const uint32_t *tb_ptr = (const void *)(uintptr_t)addr;
1070
const TCGOpDef *def;
1071
const char *op_name;
1074
TCGReg r0, r1, r2, r3, r4, r5;
1075
tcg_target_ulong i1;
1085
info->fprintf_func(info->stream, "%08x ", insn);
1087
op = extract32(insn, 0, 8);
1088
def = &tcg_op_defs[op];
1089
op_name = def->name;
1093
case INDEX_op_exit_tb:
1094
case INDEX_op_goto_tb:
1095
tci_args_l(insn, tb_ptr, &ptr);
1096
info->fprintf_func(info->stream, "%-12s %p", op_name, ptr);
1099
case INDEX_op_goto_ptr:
1100
tci_args_r(insn, &r0);
1101
info->fprintf_func(info->stream, "%-12s %s", op_name, str_r(r0));
1105
tci_args_nl(insn, tb_ptr, &len, &ptr);
1106
info->fprintf_func(info->stream, "%-12s %d, %p", op_name, len, ptr);
1109
case INDEX_op_brcond_i32:
1110
case INDEX_op_brcond_i64:
1111
tci_args_rl(insn, tb_ptr, &r0, &ptr);
1112
info->fprintf_func(info->stream, "%-12s %s, 0, ne, %p",
1113
op_name, str_r(r0), ptr);
1116
case INDEX_op_setcond_i32:
1117
case INDEX_op_setcond_i64:
1118
tci_args_rrrc(insn, &r0, &r1, &r2, &c);
1119
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1120
op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
1123
case INDEX_op_tci_movi:
1124
tci_args_ri(insn, &r0, &i1);
1125
info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
1126
op_name, str_r(r0), i1);
1129
case INDEX_op_tci_movl:
1130
tci_args_rl(insn, tb_ptr, &r0, &ptr);
1131
info->fprintf_func(info->stream, "%-12s %s, %p",
1132
op_name, str_r(r0), ptr);
1135
case INDEX_op_ld8u_i32:
1136
case INDEX_op_ld8u_i64:
1137
case INDEX_op_ld8s_i32:
1138
case INDEX_op_ld8s_i64:
1139
case INDEX_op_ld16u_i32:
1140
case INDEX_op_ld16u_i64:
1141
case INDEX_op_ld16s_i32:
1142
case INDEX_op_ld16s_i64:
1143
case INDEX_op_ld32u_i64:
1144
case INDEX_op_ld32s_i64:
1145
case INDEX_op_ld_i32:
1146
case INDEX_op_ld_i64:
1147
case INDEX_op_st8_i32:
1148
case INDEX_op_st8_i64:
1149
case INDEX_op_st16_i32:
1150
case INDEX_op_st16_i64:
1151
case INDEX_op_st32_i64:
1152
case INDEX_op_st_i32:
1153
case INDEX_op_st_i64:
1154
tci_args_rrs(insn, &r0, &r1, &s2);
1155
info->fprintf_func(info->stream, "%-12s %s, %s, %d",
1156
op_name, str_r(r0), str_r(r1), s2);
1159
case INDEX_op_mov_i32:
1160
case INDEX_op_mov_i64:
1161
case INDEX_op_ext8s_i32:
1162
case INDEX_op_ext8s_i64:
1163
case INDEX_op_ext8u_i32:
1164
case INDEX_op_ext8u_i64:
1165
case INDEX_op_ext16s_i32:
1166
case INDEX_op_ext16s_i64:
1167
case INDEX_op_ext16u_i32:
1168
case INDEX_op_ext32s_i64:
1169
case INDEX_op_ext32u_i64:
1170
case INDEX_op_ext_i32_i64:
1171
case INDEX_op_extu_i32_i64:
1172
case INDEX_op_bswap16_i32:
1173
case INDEX_op_bswap16_i64:
1174
case INDEX_op_bswap32_i32:
1175
case INDEX_op_bswap32_i64:
1176
case INDEX_op_bswap64_i64:
1177
case INDEX_op_not_i32:
1178
case INDEX_op_not_i64:
1179
case INDEX_op_neg_i32:
1180
case INDEX_op_neg_i64:
1181
case INDEX_op_ctpop_i32:
1182
case INDEX_op_ctpop_i64:
1183
tci_args_rr(insn, &r0, &r1);
1184
info->fprintf_func(info->stream, "%-12s %s, %s",
1185
op_name, str_r(r0), str_r(r1));
1188
case INDEX_op_add_i32:
1189
case INDEX_op_add_i64:
1190
case INDEX_op_sub_i32:
1191
case INDEX_op_sub_i64:
1192
case INDEX_op_mul_i32:
1193
case INDEX_op_mul_i64:
1194
case INDEX_op_and_i32:
1195
case INDEX_op_and_i64:
1196
case INDEX_op_or_i32:
1197
case INDEX_op_or_i64:
1198
case INDEX_op_xor_i32:
1199
case INDEX_op_xor_i64:
1200
case INDEX_op_andc_i32:
1201
case INDEX_op_andc_i64:
1202
case INDEX_op_orc_i32:
1203
case INDEX_op_orc_i64:
1204
case INDEX_op_eqv_i32:
1205
case INDEX_op_eqv_i64:
1206
case INDEX_op_nand_i32:
1207
case INDEX_op_nand_i64:
1208
case INDEX_op_nor_i32:
1209
case INDEX_op_nor_i64:
1210
case INDEX_op_div_i32:
1211
case INDEX_op_div_i64:
1212
case INDEX_op_rem_i32:
1213
case INDEX_op_rem_i64:
1214
case INDEX_op_divu_i32:
1215
case INDEX_op_divu_i64:
1216
case INDEX_op_remu_i32:
1217
case INDEX_op_remu_i64:
1218
case INDEX_op_shl_i32:
1219
case INDEX_op_shl_i64:
1220
case INDEX_op_shr_i32:
1221
case INDEX_op_shr_i64:
1222
case INDEX_op_sar_i32:
1223
case INDEX_op_sar_i64:
1224
case INDEX_op_rotl_i32:
1225
case INDEX_op_rotl_i64:
1226
case INDEX_op_rotr_i32:
1227
case INDEX_op_rotr_i64:
1228
case INDEX_op_clz_i32:
1229
case INDEX_op_clz_i64:
1230
case INDEX_op_ctz_i32:
1231
case INDEX_op_ctz_i64:
1232
tci_args_rrr(insn, &r0, &r1, &r2);
1233
info->fprintf_func(info->stream, "%-12s %s, %s, %s",
1234
op_name, str_r(r0), str_r(r1), str_r(r2));
1237
case INDEX_op_deposit_i32:
1238
case INDEX_op_deposit_i64:
1239
tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
1240
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %d, %d",
1241
op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
1244
case INDEX_op_extract_i32:
1245
case INDEX_op_extract_i64:
1246
case INDEX_op_sextract_i32:
1247
case INDEX_op_sextract_i64:
1248
tci_args_rrbb(insn, &r0, &r1, &pos, &len);
1249
info->fprintf_func(info->stream, "%-12s %s,%s,%d,%d",
1250
op_name, str_r(r0), str_r(r1), pos, len);
1253
case INDEX_op_movcond_i32:
1254
case INDEX_op_movcond_i64:
1255
case INDEX_op_setcond2_i32:
1256
tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &c);
1257
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1258
op_name, str_r(r0), str_r(r1), str_r(r2),
1259
str_r(r3), str_r(r4), str_c(c));
1262
case INDEX_op_mulu2_i32:
1263
case INDEX_op_mulu2_i64:
1264
case INDEX_op_muls2_i32:
1265
case INDEX_op_muls2_i64:
1266
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
1267
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1268
op_name, str_r(r0), str_r(r1),
1269
str_r(r2), str_r(r3));
1272
case INDEX_op_add2_i32:
1273
case INDEX_op_add2_i64:
1274
case INDEX_op_sub2_i32:
1275
case INDEX_op_sub2_i64:
1276
tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
1277
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1278
op_name, str_r(r0), str_r(r1), str_r(r2),
1279
str_r(r3), str_r(r4), str_r(r5));
1282
case INDEX_op_qemu_ld_a32_i32:
1283
case INDEX_op_qemu_st_a32_i32:
1286
case INDEX_op_qemu_ld_a32_i64:
1287
case INDEX_op_qemu_st_a32_i64:
1288
case INDEX_op_qemu_ld_a64_i32:
1289
case INDEX_op_qemu_st_a64_i32:
1290
len = 1 + DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1292
case INDEX_op_qemu_ld_a64_i64:
1293
case INDEX_op_qemu_st_a64_i64:
1294
len = 2 * DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1299
tci_args_rrm(insn, &r0, &r1, &oi);
1300
info->fprintf_func(info->stream, "%-12s %s, %s, %x",
1301
op_name, str_r(r0), str_r(r1), oi);
1304
tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
1305
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1306
op_name, str_r(r0), str_r(r1),
1307
str_r(r2), str_r(r3));
1310
tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1311
info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s",
1312
op_name, str_r(r0), str_r(r1),
1313
str_r(r2), str_r(r3), str_r(r4));
1316
g_assert_not_reached();
1323
info->fprintf_func(info->stream, "align");
1329
info->fprintf_func(info->stream, "illegal opcode %d", op);
1333
return sizeof(insn);