25 |
static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \ |
static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \ |
26 |
ppc_insn_t insn) |
ppc_insn_t insn) |
27 |
|
|
28 |
|
/* EFLAGS to Condition Register (CR) field - signed */ |
29 |
|
static m_uint32_t eflags_to_cr_signed[256] = { |
30 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
31 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
32 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
33 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
34 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
35 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
36 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
37 |
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
38 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
39 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
40 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
41 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
42 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
43 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
44 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
45 |
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
46 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
47 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
48 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
49 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
50 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
51 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
52 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
53 |
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
54 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
55 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
56 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
57 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
58 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
59 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
60 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
61 |
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
62 |
|
}; |
63 |
|
|
64 |
|
/* EFLAGS to Condition Register (CR) field - unsigned */ |
65 |
|
static m_uint32_t eflags_to_cr_unsigned[256] = { |
66 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
67 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
68 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
69 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
70 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
71 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
72 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
73 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
74 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
75 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
76 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
77 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
78 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
79 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
80 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
81 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
82 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
83 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
84 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
85 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
86 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
87 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
88 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
89 |
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
90 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
91 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
92 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
93 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
94 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
95 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
96 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
97 |
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
98 |
|
}; |
99 |
|
|
100 |
/* Load a 32 bit immediate value */ |
/* Load a 32 bit immediate value */ |
101 |
static inline void ppc32_load_imm(ppc32_jit_tcb_t *b,u_int reg,m_uint32_t val) |
static inline void ppc32_load_imm(ppc32_jit_tcb_t *b,u_int reg,m_uint32_t val) |
102 |
{ |
{ |
118 |
amd64_mov_membase_imm(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4); |
amd64_mov_membase_imm(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4); |
119 |
} |
} |
120 |
|
|
121 |
|
/* |
122 |
|
* Try to branch directly to the specified JIT block without returning to |
123 |
|
* main loop. |
124 |
|
*/ |
125 |
|
static void ppc32_try_direct_far_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
126 |
|
m_uint32_t new_ia) |
127 |
|
{ |
128 |
|
m_uint32_t new_page,ia_hash,ia_offset; |
129 |
|
u_char *test1,*test2,*test3; |
130 |
|
|
131 |
|
new_page = new_ia & PPC32_MIN_PAGE_MASK; |
132 |
|
ia_offset = (new_ia & PPC32_MIN_PAGE_IMASK) >> 2; |
133 |
|
ia_hash = ppc32_jit_get_ia_hash(new_ia); |
134 |
|
|
135 |
|
/* Get JIT block info in %rdx */ |
136 |
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX, |
137 |
|
AMD64_R15,OFFSET(cpu_ppc_t,exec_blk_map),8); |
138 |
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
139 |
|
AMD64_RBX,ia_hash*sizeof(void *),8); |
140 |
|
|
141 |
|
/* no JIT block found ? */ |
142 |
|
amd64_test_reg_reg(b->jit_ptr,AMD64_RDX,AMD64_RDX); |
143 |
|
test1 = b->jit_ptr; |
144 |
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
145 |
|
|
146 |
|
/* Check block IA */ |
147 |
|
ppc32_load_imm(b,AMD64_RAX,new_page); |
148 |
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_CMP,X86_EAX,AMD64_RDX, |
149 |
|
OFFSET(ppc32_jit_tcb_t,start_ia),4); |
150 |
|
test2 = b->jit_ptr; |
151 |
|
amd64_branch8(b->jit_ptr, X86_CC_NE, 0, 1); |
152 |
|
|
153 |
|
/* Jump to the code */ |
154 |
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RSI, |
155 |
|
AMD64_RDX,OFFSET(ppc32_jit_tcb_t,jit_insn_ptr),8); |
156 |
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX, |
157 |
|
AMD64_RSI,ia_offset * sizeof(void *),8); |
158 |
|
|
159 |
|
amd64_test_reg_reg(b->jit_ptr,AMD64_RBX,AMD64_RBX); |
160 |
|
test3 = b->jit_ptr; |
161 |
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
162 |
|
amd64_jump_reg(b->jit_ptr,AMD64_RBX); |
163 |
|
|
164 |
|
/* Returns to caller... */ |
165 |
|
amd64_patch(test1,b->jit_ptr); |
166 |
|
amd64_patch(test2,b->jit_ptr); |
167 |
|
amd64_patch(test3,b->jit_ptr); |
168 |
|
|
169 |
|
ppc32_set_ia(b,new_ia); |
170 |
|
ppc32_jit_tcb_push_epilog(b); |
171 |
|
} |
172 |
|
|
173 |
/* Set Jump */ |
/* Set Jump */ |
174 |
static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
175 |
m_uint32_t new_ia,int local_jump) |
m_uint32_t new_ia,int local_jump) |
189 |
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
190 |
amd64_jump32(b->jit_ptr,0); |
amd64_jump32(b->jit_ptr,0); |
191 |
} |
} |
192 |
} else { |
} else { |
193 |
/* save PC */ |
if (cpu->exec_blk_direct_jump) { |
194 |
ppc32_set_ia(b,new_ia); |
/* Block lookup optimization */ |
195 |
|
ppc32_try_direct_far_jump(cpu,b,new_ia); |
196 |
/* address is in another block, for now, returns to caller */ |
} else { |
197 |
ppc32_jit_tcb_push_epilog(b); |
ppc32_set_ia(b,new_ia); |
198 |
|
ppc32_jit_tcb_push_epilog(b); |
199 |
|
} |
200 |
} |
} |
201 |
} |
} |
202 |
|
|
|
/* Load the Condition Register (CR) into the specified host register */ |
|
|
static forced_inline void ppc32_load_cr(ppc32_jit_tcb_t *b,u_int host_reg) |
|
|
{ |
|
|
amd64_mov_reg_membase(b->jit_ptr,host_reg,AMD64_R15,OFFSET(cpu_ppc_t,cr),4); |
|
|
} |
|
|
|
|
|
/* Store the Condition Register (CR) from the specified host register */ |
|
|
static forced_inline void ppc32_store_cr(ppc32_jit_tcb_t *b,u_int host_reg) |
|
|
{ |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr),host_reg,4); |
|
|
} |
|
|
|
|
203 |
/* Load a GPR into the specified host register */ |
/* Load a GPR into the specified host register */ |
204 |
static forced_inline void ppc32_load_gpr(ppc32_jit_tcb_t *b,u_int host_reg, |
static forced_inline void ppc32_load_gpr(ppc32_jit_tcb_t *b,u_int host_reg, |
205 |
u_int ppc_reg) |
u_int ppc_reg) |
224 |
|
|
225 |
/* |
/* |
226 |
* Update CR from %eflags |
* Update CR from %eflags |
227 |
* %eax, %ecx, %edx, %esi are modified. |
* %rax, %rdx, %rsi are modified. |
228 |
*/ |
*/ |
|
#define PPC32_CR_LT_BIT 3 |
|
|
#define PPC32_CR_GT_BIT 2 |
|
|
#define PPC32_CR_EQ_BIT 1 |
|
|
#define PPC32_CR_SO_BIT 0 |
|
|
|
|
229 |
static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed) |
static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed) |
230 |
{ |
{ |
231 |
m_uint32_t cr_mask; |
/* Get status bits from EFLAGS */ |
232 |
u_int cfb; |
amd64_pushfd_size(b->jit_ptr,8); |
233 |
|
amd64_pop_reg(b->jit_ptr,AMD64_RAX); |
234 |
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0xFF); |
235 |
|
|
236 |
cr_mask = 0xF0000000 >> (field << 2); |
if (is_signed) |
237 |
cfb = 28 - (field << 2); |
amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_signed,8); |
238 |
|
else |
239 |
|
amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_unsigned,8); |
240 |
|
|
241 |
amd64_set_reg(b->jit_ptr,X86_CC_LT,AMD64_RAX,is_signed); |
amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RDX,0,AMD64_RAX,2,4); |
|
amd64_set_reg(b->jit_ptr,X86_CC_GT,AMD64_RCX,is_signed); |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RDX,is_signed); |
|
|
|
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RAX,(cfb + PPC32_CR_LT_BIT)); |
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RCX,(cfb + PPC32_CR_GT_BIT)); |
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RDX,(cfb + PPC32_CR_EQ_BIT)); |
|
|
|
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,X86_EAX,X86_ECX); |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,X86_EAX,X86_EDX); |
|
|
|
|
|
/* Load Condition Register */ |
|
|
ppc32_load_cr(b,AMD64_RDX); |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RDX,~cr_mask); |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,cr_mask); |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RAX); |
|
242 |
|
|
243 |
|
#if 0 |
244 |
/* Check XER Summary of Overflow and report it */ |
/* Check XER Summary of Overflow and report it */ |
245 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX, |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX, |
246 |
AMD64_R15,OFFSET(cpu_ppc_t,xer),4); |
AMD64_R15,OFFSET(cpu_ppc_t,xer),4); |
247 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO); |
248 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3); |
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3); |
249 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX); |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX); |
250 |
|
#endif |
251 |
|
|
252 |
/* Store modified CR */ |
/* Store modified CR field */ |
253 |
ppc32_store_cr(b,AMD64_RDX); |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(field), |
254 |
|
AMD64_RAX,4); |
255 |
} |
} |
256 |
|
|
257 |
/* |
/* |
429 |
if (write_op) { |
if (write_op) { |
430 |
amd64_test_membase_imm_size(b->jit_ptr, |
amd64_test_membase_imm_size(b->jit_ptr, |
431 |
AMD64_RCX,OFFSET(mts32_entry_t,flags), |
AMD64_RCX,OFFSET(mts32_entry_t,flags), |
432 |
MTS_FLAG_COW,4); |
MTS_FLAG_COW|MTS_FLAG_EXEC,4); |
433 |
test2 = b->jit_ptr; |
test2 = b->jit_ptr; |
434 |
amd64_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
amd64_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
435 |
} |
} |
472 |
amd64_patch(p_exception,b->jit_ptr); |
amd64_patch(p_exception,b->jit_ptr); |
473 |
} |
} |
474 |
|
|
475 |
|
/* Virtual Breakpoint */ |
476 |
|
void ppc32_emit_breakpoint(ppc32_jit_tcb_t *b) |
477 |
|
{ |
478 |
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
479 |
|
ppc32_emit_c_call(b,ppc32_run_breakpoint); |
480 |
|
} |
481 |
|
|
482 |
/* Emit unhandled instruction code */ |
/* Emit unhandled instruction code */ |
483 |
static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
484 |
ppc_insn_t opcode) |
ppc_insn_t opcode) |
904 |
int bo = bits(insn,21,25); |
int bo = bits(insn,21,25); |
905 |
int bi = bits(insn,16,20); |
int bi = bits(insn,16,20); |
906 |
int bd = bits(insn,2,15); |
int bd = bits(insn,2,15); |
907 |
|
u_int cr_field,cr_bit; |
908 |
m_uint32_t new_ia; |
m_uint32_t new_ia; |
909 |
u_char *jump_ptr; |
u_char *jump_ptr; |
910 |
int local_jump; |
int local_jump; |
923 |
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
924 |
|
|
925 |
/* Test the condition bit */ |
/* Test the condition bit */ |
926 |
amd64_test_membase_imm_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr), |
cr_field = ppc32_get_cr_field(bi); |
927 |
(1 << (31 - bi)),4); |
cr_bit = ppc32_get_cr_bit(bi); |
928 |
|
|
929 |
|
amd64_test_membase_imm_size(b->jit_ptr, |
930 |
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field), |
931 |
|
(1 << cr_bit),4); |
932 |
|
|
933 |
local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr); |
local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr); |
934 |
|
|
959 |
int bo = bits(insn,21,25); |
int bo = bits(insn,21,25); |
960 |
int bi = bits(insn,16,20); |
int bi = bits(insn,16,20); |
961 |
int bd = bits(insn,2,15); |
int bd = bits(insn,2,15); |
962 |
|
u_int cr_field,cr_bit; |
963 |
m_uint32_t new_ia; |
m_uint32_t new_ia; |
964 |
u_char *jump_ptr; |
u_char *jump_ptr; |
965 |
int local_jump; |
int local_jump; |
989 |
|
|
990 |
/* Test the condition bit */ |
/* Test the condition bit */ |
991 |
if (!((bo >> 4) & 0x01)) { |
if (!((bo >> 4) & 0x01)) { |
992 |
amd64_test_membase_imm_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr), |
cr_field = ppc32_get_cr_field(bi); |
993 |
(1 << (31 - bi)),4); |
cr_bit = ppc32_get_cr_bit(bi); |
994 |
|
|
995 |
|
amd64_test_membase_imm_size(b->jit_ptr, |
996 |
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field), |
997 |
|
(1 << cr_bit),4); |
998 |
|
|
999 |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
1000 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
1001 |
} |
} |
1031 |
int bo = bits(insn,21,25); |
int bo = bits(insn,21,25); |
1032 |
int bi = bits(insn,16,20); |
int bi = bits(insn,16,20); |
1033 |
int bd = bits(insn,2,15); |
int bd = bits(insn,2,15); |
1034 |
|
u_int cr_field,cr_bit; |
1035 |
m_uint32_t new_ia; |
m_uint32_t new_ia; |
1036 |
u_char *jump_ptr; |
u_char *jump_ptr; |
1037 |
int cond,ctr; |
int cond,ctr; |
1056 |
|
|
1057 |
/* Test the condition bit */ |
/* Test the condition bit */ |
1058 |
if (!((bo >> 4) & 0x01)) { |
if (!((bo >> 4) & 0x01)) { |
1059 |
amd64_test_membase_imm_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr), |
cr_field = ppc32_get_cr_field(bi); |
1060 |
(1 << (31 - bi)),4); |
cr_bit = ppc32_get_cr_bit(bi); |
1061 |
|
|
1062 |
|
amd64_test_membase_imm_size(b->jit_ptr, |
1063 |
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field), |
1064 |
|
(1 << cr_bit),4); |
1065 |
|
|
1066 |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
1067 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
1068 |
} |
} |
1153 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1154 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1155 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1156 |
/* test $ba bit */ |
/* test $ba bit */ |
1157 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1158 |
|
AMD64_R15, |
1159 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1160 |
|
(1 << ppc32_get_cr_bit(ba))); |
1161 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1162 |
|
|
1163 |
/* test $bb bit */ |
/* test $bb bit */ |
1164 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1165 |
|
AMD64_R15, |
1166 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1167 |
|
(1 << ppc32_get_cr_bit(bb))); |
1168 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1169 |
|
|
1170 |
/* result of AND between $ba and $bb */ |
/* result of AND between $ba and $bb */ |
1172 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1173 |
|
|
1174 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1175 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1176 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1177 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1178 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1179 |
ppc32_store_cr(b,AMD64_RSI); |
|
1180 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1181 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1182 |
|
AMD64_R15, |
1183 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1184 |
|
AMD64_RBX,4); |
1185 |
return(0); |
return(0); |
1186 |
} |
} |
1187 |
|
|
1192 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1193 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1194 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1195 |
/* test $ba bit */ |
/* test $ba bit */ |
1196 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1197 |
|
AMD64_R15, |
1198 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1199 |
|
(1 << ppc32_get_cr_bit(ba))); |
1200 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1201 |
|
|
1202 |
/* test $bb bit */ |
/* test $bb bit */ |
1203 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1204 |
|
AMD64_R15, |
1205 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1206 |
|
(1 << ppc32_get_cr_bit(bb))); |
1207 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
1208 |
|
|
1209 |
/* result of AND between $ba and $bb */ |
/* result of AND between $ba and $bb */ |
1211 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1212 |
|
|
1213 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1214 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1215 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1216 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1217 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1218 |
ppc32_store_cr(b,AMD64_RSI); |
|
1219 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1220 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1221 |
|
AMD64_R15, |
1222 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1223 |
|
AMD64_RBX,4); |
1224 |
return(0); |
return(0); |
1225 |
} |
} |
1226 |
|
|
1231 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1232 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1233 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1234 |
/* test $ba bit */ |
/* test $ba bit */ |
1235 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1236 |
|
AMD64_R15, |
1237 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1238 |
|
(1 << ppc32_get_cr_bit(ba))); |
1239 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1240 |
|
|
1241 |
/* test $bb bit */ |
/* test $bb bit */ |
1242 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1243 |
|
AMD64_R15, |
1244 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1245 |
|
(1 << ppc32_get_cr_bit(bb))); |
1246 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1247 |
|
|
1248 |
/* result of XOR between $ba and $bb */ |
/* result of XOR between $ba and $bb */ |
1251 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1252 |
|
|
1253 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1254 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1255 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1256 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1257 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1258 |
ppc32_store_cr(b,AMD64_RSI); |
|
1259 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1260 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1261 |
|
AMD64_R15, |
1262 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1263 |
|
AMD64_RBX,4); |
1264 |
return(0); |
return(0); |
1265 |
} |
} |
1266 |
|
|
1271 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1272 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1273 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1274 |
/* test $ba bit */ |
/* test $ba bit */ |
1275 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1276 |
|
AMD64_R15, |
1277 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1278 |
|
(1 << ppc32_get_cr_bit(ba))); |
1279 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1280 |
|
|
1281 |
/* test $bb bit */ |
/* test $bb bit */ |
1282 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1283 |
|
AMD64_R15, |
1284 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1285 |
|
(1 << ppc32_get_cr_bit(bb))); |
1286 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1287 |
|
|
1288 |
/* result of NAND between $ba and $bb */ |
/* result of NAND between $ba and $bb */ |
1291 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1292 |
|
|
1293 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1294 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1295 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1296 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1297 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1298 |
ppc32_store_cr(b,AMD64_RSI); |
|
1299 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1300 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1301 |
|
AMD64_R15, |
1302 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1303 |
|
AMD64_RBX,4); |
1304 |
return(0); |
return(0); |
1305 |
} |
} |
1306 |
|
|
1311 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1312 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1313 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1314 |
/* test $ba bit */ |
/* test $ba bit */ |
1315 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1316 |
|
AMD64_R15, |
1317 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1318 |
|
(1 << ppc32_get_cr_bit(ba))); |
1319 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1320 |
|
|
1321 |
/* test $bb bit */ |
/* test $bb bit */ |
1322 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1323 |
|
AMD64_R15, |
1324 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1325 |
|
(1 << ppc32_get_cr_bit(bb))); |
1326 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1327 |
|
|
1328 |
/* result of NOR between $ba and $bb */ |
/* result of NOR between $ba and $bb */ |
1331 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1332 |
|
|
1333 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1334 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1335 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1336 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1337 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1338 |
ppc32_store_cr(b,AMD64_RSI); |
|
1339 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1340 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1341 |
|
AMD64_R15, |
1342 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1343 |
|
AMD64_RBX,4); |
1344 |
return(0); |
return(0); |
1345 |
} |
} |
1346 |
|
|
1351 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1352 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1353 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1354 |
/* test $ba bit */ |
/* test $ba bit */ |
1355 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1356 |
|
AMD64_R15, |
1357 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1358 |
|
(1 << ppc32_get_cr_bit(ba))); |
1359 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1360 |
|
|
1361 |
/* test $bb bit */ |
/* test $bb bit */ |
1362 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1363 |
|
AMD64_R15, |
1364 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1365 |
|
(1 << ppc32_get_cr_bit(bb))); |
1366 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1367 |
|
|
1368 |
/* result of NOR between $ba and $bb */ |
/* result of NOR between $ba and $bb */ |
1370 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1371 |
|
|
1372 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1373 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1374 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1375 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1376 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1377 |
ppc32_store_cr(b,AMD64_RSI); |
|
1378 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1379 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1380 |
|
AMD64_R15, |
1381 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1382 |
|
AMD64_RBX,4); |
1383 |
return(0); |
return(0); |
1384 |
} |
} |
1385 |
|
|
1390 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1391 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1392 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1393 |
/* test $ba bit */ |
/* test $ba bit */ |
1394 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1395 |
|
AMD64_R15, |
1396 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1397 |
|
(1 << ppc32_get_cr_bit(ba))); |
1398 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1399 |
|
|
1400 |
/* test $bb bit */ |
/* test $bb bit */ |
1401 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1402 |
|
AMD64_R15, |
1403 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1404 |
|
(1 << ppc32_get_cr_bit(bb))); |
1405 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
1406 |
|
|
1407 |
/* result of ORC between $ba and $bb */ |
/* result of ORC between $ba and $bb */ |
1409 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1410 |
|
|
1411 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1412 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1413 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1414 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1415 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1416 |
ppc32_store_cr(b,AMD64_RSI); |
|
1417 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1418 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1419 |
|
AMD64_R15, |
1420 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1421 |
|
AMD64_RBX,4); |
1422 |
return(0); |
return(0); |
1423 |
} |
} |
1424 |
|
|
1429 |
int bb = bits(insn,16,20); |
int bb = bits(insn,16,20); |
1430 |
int ba = bits(insn,11,15); |
int ba = bits(insn,11,15); |
1431 |
|
|
|
ppc32_load_cr(b,AMD64_RSI); |
|
|
|
|
1432 |
/* test $ba bit */ |
/* test $ba bit */ |
1433 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1434 |
|
AMD64_R15, |
1435 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1436 |
|
(1 << ppc32_get_cr_bit(ba))); |
1437 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1438 |
|
|
1439 |
/* test $bb bit */ |
/* test $bb bit */ |
1440 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
amd64_test_membase_imm(b->jit_ptr, |
1441 |
|
AMD64_R15, |
1442 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1443 |
|
(1 << ppc32_get_cr_bit(bb))); |
1444 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1445 |
|
|
1446 |
/* result of XOR between $ba and $bb */ |
/* result of XOR between $ba and $bb */ |
1448 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1449 |
|
|
1450 |
/* set/clear $bd bit depending on the result */ |
/* set/clear $bd bit depending on the result */ |
1451 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1452 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
AMD64_R15, |
1453 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1454 |
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1455 |
ppc32_store_cr(b,AMD64_RSI); |
|
1456 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1457 |
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1458 |
|
AMD64_R15, |
1459 |
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1460 |
|
AMD64_RBX,4); |
1461 |
return(0); |
return(0); |
1462 |
} |
} |
1463 |
|
|
1725 |
{ |
{ |
1726 |
int rd = bits(insn,23,25); |
int rd = bits(insn,23,25); |
1727 |
int rs = bits(insn,18,20); |
int rs = bits(insn,18,20); |
|
m_uint32_t dmask; |
|
1728 |
|
|
1729 |
/* %rax = %rbx = CR */ |
/* Load "rs" field in %rdx */ |
1730 |
ppc32_load_cr(b,AMD64_RAX); |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
1731 |
amd64_mov_reg_reg(b->jit_ptr,X86_EBX,X86_EAX,8); |
AMD64_R15,PPC32_CR_FIELD_OFFSET(rs),4); |
1732 |
|
|
1733 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RBX,(28 - (rs << 2))); |
/* Store it in "rd" field */ |
1734 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x0F); |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(rd), |
1735 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(28 - (rd << 2))); |
AMD64_RDX,4); |
|
|
|
|
/* clear the destination bits */ |
|
|
dmask = (0xF0000000 >> (rd << 2)); |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,~dmask); |
|
|
|
|
|
/* set the new field value */ |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RBX); |
|
|
ppc32_store_cr(b,AMD64_RAX); |
|
1736 |
return(0); |
return(0); |
1737 |
} |
} |
1738 |
|
|
1740 |
DECLARE_INSN(MFCR) |
DECLARE_INSN(MFCR) |
1741 |
{ |
{ |
1742 |
int rd = bits(insn,21,25); |
int rd = bits(insn,21,25); |
1743 |
|
int i; |
1744 |
|
|
1745 |
|
amd64_alu_reg_reg(b->jit_ptr,X86_XOR,AMD64_RAX,AMD64_RAX); |
1746 |
|
|
1747 |
|
for(i=0;i<8;i++) { |
1748 |
|
/* load field in %rdx */ |
1749 |
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
1750 |
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(i),4); |
1751 |
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RAX,4); |
1752 |
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RDX); |
1753 |
|
} |
1754 |
|
|
|
ppc32_load_cr(b,AMD64_RAX); |
|
1755 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1756 |
return(0); |
return(0); |
1757 |
} |
} |
1784 |
{ |
{ |
1785 |
int rs = bits(insn,21,25); |
int rs = bits(insn,21,25); |
1786 |
int crm = bits(insn,12,19); |
int crm = bits(insn,12,19); |
|
m_uint32_t mask = 0; |
|
1787 |
int i; |
int i; |
1788 |
|
|
1789 |
|
ppc32_load_gpr(b,AMD64_RDX,rs); |
1790 |
|
|
1791 |
for(i=0;i<8;i++) |
for(i=0;i<8;i++) |
1792 |
if (crm & (1 << i)) |
if (crm & (1 << (7 - i))) { |
1793 |
mask |= 0xF << (i << 2); |
amd64_mov_reg_reg(b->jit_ptr,AMD64_RAX,AMD64_RDX,8); |
1794 |
|
|
1795 |
ppc32_load_cr(b,AMD64_RAX); |
if (i != 7) |
1796 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,~mask); |
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RAX,28 - (i << 2)); |
1797 |
|
|
1798 |
ppc32_load_gpr(b,AMD64_RDX,rs); |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x0F); |
1799 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RDX,mask); |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(i), |
1800 |
|
AMD64_RAX,4); |
1801 |
|
} |
1802 |
|
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RAX); |
|
|
ppc32_store_cr(b,AMD64_RDX); |
|
1803 |
return(0); |
return(0); |
1804 |
} |
} |
1805 |
|
|
2596 |
{ ppc32_emit_XORI , 0xfc000000 , 0x68000000 }, |
{ ppc32_emit_XORI , 0xfc000000 , 0x68000000 }, |
2597 |
{ ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 }, |
{ ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 }, |
2598 |
{ ppc32_emit_unknown , 0x00000000 , 0x00000000 }, |
{ ppc32_emit_unknown , 0x00000000 , 0x00000000 }, |
2599 |
|
{ NULL , 0x00000000 , 0x00000000 }, |
2600 |
}; |
}; |