xref: /illumos-kvm-cmd/target-sh4/translate.c (revision 68396ea9)
1 /*
2  *  SH4 translation
3  *
4  *  Copyright (c) 2005 Samuel Tardieu
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19 #include <stdarg.h>
20 #include <stdlib.h>
21 #include <stdio.h>
22 #include <string.h>
23 #include <inttypes.h>
24 
25 #define DEBUG_DISAS
26 #define SH4_DEBUG_DISAS
27 //#define SH4_SINGLE_STEP
28 
29 #include "cpu.h"
30 #include "exec-all.h"
31 #include "disas.h"
32 #include "tcg-op.h"
33 #include "qemu-common.h"
34 
35 #include "helper.h"
36 #define GEN_HELPER 1
37 #include "helper.h"
38 
39 typedef struct DisasContext {
40     struct TranslationBlock *tb;
41     target_ulong pc;
42     uint32_t sr;
43     uint32_t fpscr;
44     uint16_t opcode;
45     uint32_t flags;
46     int bstate;
47     int memidx;
48     uint32_t delayed_pc;
49     int singlestep_enabled;
50     uint32_t features;
51     int has_movcal;
52 } DisasContext;
53 
54 #if defined(CONFIG_USER_ONLY)
55 #define IS_USER(ctx) 1
56 #else
57 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
58 #endif
59 
60 enum {
61     BS_NONE     = 0, /* We go out of the TB without reaching a branch or an
62                       * exception condition
63                       */
64     BS_STOP     = 1, /* We want to stop translation for any reason */
65     BS_BRANCH   = 2, /* We reached a branch condition     */
66     BS_EXCP     = 3, /* We reached an exception condition */
67 };
68 
69 /* global register indexes */
70 static TCGv_ptr cpu_env;
71 static TCGv cpu_gregs[24];
72 static TCGv cpu_pc, cpu_sr, cpu_ssr, cpu_spc, cpu_gbr;
73 static TCGv cpu_vbr, cpu_sgr, cpu_dbr, cpu_mach, cpu_macl;
74 static TCGv cpu_pr, cpu_fpscr, cpu_fpul, cpu_ldst;
75 static TCGv cpu_fregs[32];
76 
77 /* internal register indexes */
78 static TCGv cpu_flags, cpu_delayed_pc;
79 
80 static uint32_t gen_opc_hflags[OPC_BUF_SIZE];
81 
82 #include "gen-icount.h"
83 
sh4_translate_init(void)84 static void sh4_translate_init(void)
85 {
86     int i;
87     static int done_init = 0;
88     static const char * const gregnames[24] = {
89         "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
90         "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
91         "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
92         "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
93         "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
94     };
95     static const char * const fregnames[32] = {
96          "FPR0_BANK0",  "FPR1_BANK0",  "FPR2_BANK0",  "FPR3_BANK0",
97          "FPR4_BANK0",  "FPR5_BANK0",  "FPR6_BANK0",  "FPR7_BANK0",
98          "FPR8_BANK0",  "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
99         "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
100          "FPR0_BANK1",  "FPR1_BANK1",  "FPR2_BANK1",  "FPR3_BANK1",
101          "FPR4_BANK1",  "FPR5_BANK1",  "FPR6_BANK1",  "FPR7_BANK1",
102          "FPR8_BANK1",  "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
103         "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
104     };
105 
106     if (done_init)
107         return;
108 
109     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
110 
111     for (i = 0; i < 24; i++)
112         cpu_gregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
113                                               offsetof(CPUState, gregs[i]),
114                                               gregnames[i]);
115 
116     cpu_pc = tcg_global_mem_new_i32(TCG_AREG0,
117                                     offsetof(CPUState, pc), "PC");
118     cpu_sr = tcg_global_mem_new_i32(TCG_AREG0,
119                                     offsetof(CPUState, sr), "SR");
120     cpu_ssr = tcg_global_mem_new_i32(TCG_AREG0,
121                                      offsetof(CPUState, ssr), "SSR");
122     cpu_spc = tcg_global_mem_new_i32(TCG_AREG0,
123                                      offsetof(CPUState, spc), "SPC");
124     cpu_gbr = tcg_global_mem_new_i32(TCG_AREG0,
125                                      offsetof(CPUState, gbr), "GBR");
126     cpu_vbr = tcg_global_mem_new_i32(TCG_AREG0,
127                                      offsetof(CPUState, vbr), "VBR");
128     cpu_sgr = tcg_global_mem_new_i32(TCG_AREG0,
129                                      offsetof(CPUState, sgr), "SGR");
130     cpu_dbr = tcg_global_mem_new_i32(TCG_AREG0,
131                                      offsetof(CPUState, dbr), "DBR");
132     cpu_mach = tcg_global_mem_new_i32(TCG_AREG0,
133                                       offsetof(CPUState, mach), "MACH");
134     cpu_macl = tcg_global_mem_new_i32(TCG_AREG0,
135                                       offsetof(CPUState, macl), "MACL");
136     cpu_pr = tcg_global_mem_new_i32(TCG_AREG0,
137                                     offsetof(CPUState, pr), "PR");
138     cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
139                                        offsetof(CPUState, fpscr), "FPSCR");
140     cpu_fpul = tcg_global_mem_new_i32(TCG_AREG0,
141                                       offsetof(CPUState, fpul), "FPUL");
142 
143     cpu_flags = tcg_global_mem_new_i32(TCG_AREG0,
144 				       offsetof(CPUState, flags), "_flags_");
145     cpu_delayed_pc = tcg_global_mem_new_i32(TCG_AREG0,
146 					    offsetof(CPUState, delayed_pc),
147 					    "_delayed_pc_");
148     cpu_ldst = tcg_global_mem_new_i32(TCG_AREG0,
149 				      offsetof(CPUState, ldst), "_ldst_");
150 
151     for (i = 0; i < 32; i++)
152         cpu_fregs[i] = tcg_global_mem_new_i32(TCG_AREG0,
153                                               offsetof(CPUState, fregs[i]),
154                                               fregnames[i]);
155 
156     /* register helpers */
157 #define GEN_HELPER 2
158 #include "helper.h"
159 
160     done_init = 1;
161 }
162 
cpu_dump_state(CPUState * env,FILE * f,int (* cpu_fprintf)(FILE * f,const char * fmt,...),int flags)163 void cpu_dump_state(CPUState * env, FILE * f,
164 		    int (*cpu_fprintf) (FILE * f, const char *fmt, ...),
165 		    int flags)
166 {
167     int i;
168     cpu_fprintf(f, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
169 		env->pc, env->sr, env->pr, env->fpscr);
170     cpu_fprintf(f, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
171 		env->spc, env->ssr, env->gbr, env->vbr);
172     cpu_fprintf(f, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
173 		env->sgr, env->dbr, env->delayed_pc, env->fpul);
174     for (i = 0; i < 24; i += 4) {
175 	cpu_fprintf(f, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
176 		    i, env->gregs[i], i + 1, env->gregs[i + 1],
177 		    i + 2, env->gregs[i + 2], i + 3, env->gregs[i + 3]);
178     }
179     if (env->flags & DELAY_SLOT) {
180 	cpu_fprintf(f, "in delay slot (delayed_pc=0x%08x)\n",
181 		    env->delayed_pc);
182     } else if (env->flags & DELAY_SLOT_CONDITIONAL) {
183 	cpu_fprintf(f, "in conditional delay slot (delayed_pc=0x%08x)\n",
184 		    env->delayed_pc);
185     }
186 }
187 
cpu_reset(CPUSH4State * env)188 void cpu_reset(CPUSH4State * env)
189 {
190     if (qemu_loglevel_mask(CPU_LOG_RESET)) {
191         qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
192         log_cpu_state(env, 0);
193     }
194 
195     memset(env, 0, offsetof(CPUSH4State, breakpoints));
196     tlb_flush(env, 1);
197 
198     env->pc = 0xA0000000;
199 #if defined(CONFIG_USER_ONLY)
200     env->fpscr = FPSCR_PR; /* value for userspace according to the kernel */
201     set_float_rounding_mode(float_round_nearest_even, &env->fp_status); /* ?! */
202 #else
203     env->sr = SR_MD | SR_RB | SR_BL | SR_I3 | SR_I2 | SR_I1 | SR_I0;
204     env->fpscr = FPSCR_DN | FPSCR_RM_ZERO; /* CPU reset value according to SH4 manual */
205     set_float_rounding_mode(float_round_to_zero, &env->fp_status);
206     set_flush_to_zero(1, &env->fp_status);
207 #endif
208     set_default_nan_mode(1, &env->fp_status);
209 }
210 
211 typedef struct {
212     const char *name;
213     int id;
214     uint32_t pvr;
215     uint32_t prr;
216     uint32_t cvr;
217     uint32_t features;
218 } sh4_def_t;
219 
220 static sh4_def_t sh4_defs[] = {
221     {
222 	.name = "SH7750R",
223 	.id = SH_CPU_SH7750R,
224 	.pvr = 0x00050000,
225 	.prr = 0x00000100,
226 	.cvr = 0x00110000,
227 	.features = SH_FEATURE_BCR3_AND_BCR4,
228     }, {
229 	.name = "SH7751R",
230 	.id = SH_CPU_SH7751R,
231 	.pvr = 0x04050005,
232 	.prr = 0x00000113,
233 	.cvr = 0x00110000,	/* Neutered caches, should be 0x20480000 */
234 	.features = SH_FEATURE_BCR3_AND_BCR4,
235     }, {
236 	.name = "SH7785",
237 	.id = SH_CPU_SH7785,
238 	.pvr = 0x10300700,
239 	.prr = 0x00000200,
240 	.cvr = 0x71440211,
241 	.features = SH_FEATURE_SH4A,
242      },
243 };
244 
cpu_sh4_find_by_name(const char * name)245 static const sh4_def_t *cpu_sh4_find_by_name(const char *name)
246 {
247     int i;
248 
249     if (strcasecmp(name, "any") == 0)
250 	return &sh4_defs[0];
251 
252     for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
253 	if (strcasecmp(name, sh4_defs[i].name) == 0)
254 	    return &sh4_defs[i];
255 
256     return NULL;
257 }
258 
sh4_cpu_list(FILE * f,fprintf_function cpu_fprintf)259 void sh4_cpu_list(FILE *f, fprintf_function cpu_fprintf)
260 {
261     int i;
262 
263     for (i = 0; i < ARRAY_SIZE(sh4_defs); i++)
264 	(*cpu_fprintf)(f, "%s\n", sh4_defs[i].name);
265 }
266 
cpu_register(CPUSH4State * env,const sh4_def_t * def)267 static void cpu_register(CPUSH4State *env, const sh4_def_t *def)
268 {
269     env->pvr = def->pvr;
270     env->prr = def->prr;
271     env->cvr = def->cvr;
272     env->id = def->id;
273 }
274 
cpu_sh4_init(const char * cpu_model)275 CPUSH4State *cpu_sh4_init(const char *cpu_model)
276 {
277     CPUSH4State *env;
278     const sh4_def_t *def;
279 
280     def = cpu_sh4_find_by_name(cpu_model);
281     if (!def)
282 	return NULL;
283     env = qemu_mallocz(sizeof(CPUSH4State));
284     env->features = def->features;
285     cpu_exec_init(env);
286     env->movcal_backup_tail = &(env->movcal_backup);
287     sh4_translate_init();
288     env->cpu_model_str = cpu_model;
289     cpu_reset(env);
290     cpu_register(env, def);
291     qemu_init_vcpu(env);
292     return env;
293 }
294 
gen_goto_tb(DisasContext * ctx,int n,target_ulong dest)295 static void gen_goto_tb(DisasContext * ctx, int n, target_ulong dest)
296 {
297     TranslationBlock *tb;
298     tb = ctx->tb;
299 
300     if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
301 	!ctx->singlestep_enabled) {
302 	/* Use a direct jump if in same page and singlestep not enabled */
303         tcg_gen_goto_tb(n);
304         tcg_gen_movi_i32(cpu_pc, dest);
305         tcg_gen_exit_tb((long) tb + n);
306     } else {
307         tcg_gen_movi_i32(cpu_pc, dest);
308         if (ctx->singlestep_enabled)
309             gen_helper_debug();
310         tcg_gen_exit_tb(0);
311     }
312 }
313 
gen_jump(DisasContext * ctx)314 static void gen_jump(DisasContext * ctx)
315 {
316     if (ctx->delayed_pc == (uint32_t) - 1) {
317 	/* Target is not statically known, it comes necessarily from a
318 	   delayed jump as immediate jump are conditinal jumps */
319 	tcg_gen_mov_i32(cpu_pc, cpu_delayed_pc);
320 	if (ctx->singlestep_enabled)
321 	    gen_helper_debug();
322 	tcg_gen_exit_tb(0);
323     } else {
324 	gen_goto_tb(ctx, 0, ctx->delayed_pc);
325     }
326 }
327 
gen_branch_slot(uint32_t delayed_pc,int t)328 static inline void gen_branch_slot(uint32_t delayed_pc, int t)
329 {
330     TCGv sr;
331     int label = gen_new_label();
332     tcg_gen_movi_i32(cpu_delayed_pc, delayed_pc);
333     sr = tcg_temp_new();
334     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
335     tcg_gen_brcondi_i32(t ? TCG_COND_EQ:TCG_COND_NE, sr, 0, label);
336     tcg_gen_ori_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
337     gen_set_label(label);
338 }
339 
340 /* Immediate conditional jump (bt or bf) */
gen_conditional_jump(DisasContext * ctx,target_ulong ift,target_ulong ifnott)341 static void gen_conditional_jump(DisasContext * ctx,
342 				 target_ulong ift, target_ulong ifnott)
343 {
344     int l1;
345     TCGv sr;
346 
347     l1 = gen_new_label();
348     sr = tcg_temp_new();
349     tcg_gen_andi_i32(sr, cpu_sr, SR_T);
350     tcg_gen_brcondi_i32(TCG_COND_NE, sr, 0, l1);
351     gen_goto_tb(ctx, 0, ifnott);
352     gen_set_label(l1);
353     gen_goto_tb(ctx, 1, ift);
354 }
355 
356 /* Delayed conditional jump (bt or bf) */
gen_delayed_conditional_jump(DisasContext * ctx)357 static void gen_delayed_conditional_jump(DisasContext * ctx)
358 {
359     int l1;
360     TCGv ds;
361 
362     l1 = gen_new_label();
363     ds = tcg_temp_new();
364     tcg_gen_andi_i32(ds, cpu_flags, DELAY_SLOT_TRUE);
365     tcg_gen_brcondi_i32(TCG_COND_NE, ds, 0, l1);
366     gen_goto_tb(ctx, 1, ctx->pc + 2);
367     gen_set_label(l1);
368     tcg_gen_andi_i32(cpu_flags, cpu_flags, ~DELAY_SLOT_TRUE);
369     gen_jump(ctx);
370 }
371 
gen_set_t(void)372 static inline void gen_set_t(void)
373 {
374     tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_T);
375 }
376 
gen_clr_t(void)377 static inline void gen_clr_t(void)
378 {
379     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
380 }
381 
gen_cmp(int cond,TCGv t0,TCGv t1)382 static inline void gen_cmp(int cond, TCGv t0, TCGv t1)
383 {
384     TCGv t;
385 
386     t = tcg_temp_new();
387     tcg_gen_setcond_i32(cond, t, t1, t0);
388     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
389     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
390 
391     tcg_temp_free(t);
392 }
393 
gen_cmp_imm(int cond,TCGv t0,int32_t imm)394 static inline void gen_cmp_imm(int cond, TCGv t0, int32_t imm)
395 {
396     TCGv t;
397 
398     t = tcg_temp_new();
399     tcg_gen_setcondi_i32(cond, t, t0, imm);
400     tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
401     tcg_gen_or_i32(cpu_sr, cpu_sr, t);
402 
403     tcg_temp_free(t);
404 }
405 
gen_store_flags(uint32_t flags)406 static inline void gen_store_flags(uint32_t flags)
407 {
408     tcg_gen_andi_i32(cpu_flags, cpu_flags, DELAY_SLOT_TRUE);
409     tcg_gen_ori_i32(cpu_flags, cpu_flags, flags);
410 }
411 
gen_copy_bit_i32(TCGv t0,int p0,TCGv t1,int p1)412 static inline void gen_copy_bit_i32(TCGv t0, int p0, TCGv t1, int p1)
413 {
414     TCGv tmp = tcg_temp_new();
415 
416     p0 &= 0x1f;
417     p1 &= 0x1f;
418 
419     tcg_gen_andi_i32(tmp, t1, (1 << p1));
420     tcg_gen_andi_i32(t0, t0, ~(1 << p0));
421     if (p0 < p1)
422         tcg_gen_shri_i32(tmp, tmp, p1 - p0);
423     else if (p0 > p1)
424         tcg_gen_shli_i32(tmp, tmp, p0 - p1);
425     tcg_gen_or_i32(t0, t0, tmp);
426 
427     tcg_temp_free(tmp);
428 }
429 
gen_load_fpr64(TCGv_i64 t,int reg)430 static inline void gen_load_fpr64(TCGv_i64 t, int reg)
431 {
432     tcg_gen_concat_i32_i64(t, cpu_fregs[reg + 1], cpu_fregs[reg]);
433 }
434 
gen_store_fpr64(TCGv_i64 t,int reg)435 static inline void gen_store_fpr64 (TCGv_i64 t, int reg)
436 {
437     TCGv_i32 tmp = tcg_temp_new_i32();
438     tcg_gen_trunc_i64_i32(tmp, t);
439     tcg_gen_mov_i32(cpu_fregs[reg + 1], tmp);
440     tcg_gen_shri_i64(t, t, 32);
441     tcg_gen_trunc_i64_i32(tmp, t);
442     tcg_gen_mov_i32(cpu_fregs[reg], tmp);
443     tcg_temp_free_i32(tmp);
444 }
445 
446 #define B3_0 (ctx->opcode & 0xf)
447 #define B6_4 ((ctx->opcode >> 4) & 0x7)
448 #define B7_4 ((ctx->opcode >> 4) & 0xf)
449 #define B7_0 (ctx->opcode & 0xff)
450 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
451 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
452   (ctx->opcode & 0xfff))
453 #define B11_8 ((ctx->opcode >> 8) & 0xf)
454 #define B15_12 ((ctx->opcode >> 12) & 0xf)
455 
456 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
457 		(cpu_gregs[x + 16]) : (cpu_gregs[x]))
458 
459 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
460 		? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
461 
462 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
463 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
464 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
465 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
466 
467 #define CHECK_NOT_DELAY_SLOT \
468   if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))     \
469   {                                                           \
470       gen_helper_raise_slot_illegal_instruction();            \
471       ctx->bstate = BS_EXCP;                                  \
472       return;                                                 \
473   }
474 
475 #define CHECK_PRIVILEGED                                        \
476   if (IS_USER(ctx)) {                                           \
477       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
478          gen_helper_raise_slot_illegal_instruction();           \
479       } else {                                                  \
480          gen_helper_raise_illegal_instruction();                \
481       }                                                         \
482       ctx->bstate = BS_EXCP;                                    \
483       return;                                                   \
484   }
485 
486 #define CHECK_FPU_ENABLED                                       \
487   if (ctx->flags & SR_FD) {                                     \
488       if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
489           gen_helper_raise_slot_fpu_disable();                  \
490       } else {                                                  \
491           gen_helper_raise_fpu_disable();                       \
492       }                                                         \
493       ctx->bstate = BS_EXCP;                                    \
494       return;                                                   \
495   }
496 
_decode_opc(DisasContext * ctx)497 static void _decode_opc(DisasContext * ctx)
498 {
499     /* This code tries to make movcal emulation sufficiently
500        accurate for Linux purposes.  This instruction writes
501        memory, and prior to that, always allocates a cache line.
502        It is used in two contexts:
503        - in memcpy, where data is copied in blocks, the first write
504        of to a block uses movca.l for performance.
505        - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
506        to flush the cache. Here, the data written by movcal.l is never
507        written to memory, and the data written is just bogus.
508 
509        To simulate this, we simulate movcal.l, we store the value to memory,
510        but we also remember the previous content. If we see ocbi, we check
511        if movcal.l for that address was done previously. If so, the write should
512        not have hit the memory, so we restore the previous content.
513        When we see an instruction that is neither movca.l
514        nor ocbi, the previous content is discarded.
515 
516        To optimize, we only try to flush stores when we're at the start of
517        TB, or if we already saw movca.l in this TB and did not flush stores
518        yet.  */
519     if (ctx->has_movcal)
520 	{
521 	  int opcode = ctx->opcode & 0xf0ff;
522 	  if (opcode != 0x0093 /* ocbi */
523 	      && opcode != 0x00c3 /* movca.l */)
524 	      {
525 		  gen_helper_discard_movcal_backup ();
526 		  ctx->has_movcal = 0;
527 	      }
528 	}
529 
530 #if 0
531     fprintf(stderr, "Translating opcode 0x%04x\n", ctx->opcode);
532 #endif
533 
534     switch (ctx->opcode) {
535     case 0x0019:		/* div0u */
536 	tcg_gen_andi_i32(cpu_sr, cpu_sr, ~(SR_M | SR_Q | SR_T));
537 	return;
538     case 0x000b:		/* rts */
539 	CHECK_NOT_DELAY_SLOT
540 	tcg_gen_mov_i32(cpu_delayed_pc, cpu_pr);
541 	ctx->flags |= DELAY_SLOT;
542 	ctx->delayed_pc = (uint32_t) - 1;
543 	return;
544     case 0x0028:		/* clrmac */
545 	tcg_gen_movi_i32(cpu_mach, 0);
546 	tcg_gen_movi_i32(cpu_macl, 0);
547 	return;
548     case 0x0048:		/* clrs */
549 	tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_S);
550 	return;
551     case 0x0008:		/* clrt */
552 	gen_clr_t();
553 	return;
554     case 0x0038:		/* ldtlb */
555 	CHECK_PRIVILEGED
556 	gen_helper_ldtlb();
557 	return;
558     case 0x002b:		/* rte */
559 	CHECK_PRIVILEGED
560 	CHECK_NOT_DELAY_SLOT
561 	tcg_gen_mov_i32(cpu_sr, cpu_ssr);
562 	tcg_gen_mov_i32(cpu_delayed_pc, cpu_spc);
563 	ctx->flags |= DELAY_SLOT;
564 	ctx->delayed_pc = (uint32_t) - 1;
565 	return;
566     case 0x0058:		/* sets */
567 	tcg_gen_ori_i32(cpu_sr, cpu_sr, SR_S);
568 	return;
569     case 0x0018:		/* sett */
570 	gen_set_t();
571 	return;
572     case 0xfbfd:		/* frchg */
573 	tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_FR);
574 	ctx->bstate = BS_STOP;
575 	return;
576     case 0xf3fd:		/* fschg */
577 	tcg_gen_xori_i32(cpu_fpscr, cpu_fpscr, FPSCR_SZ);
578 	ctx->bstate = BS_STOP;
579 	return;
580     case 0x0009:		/* nop */
581 	return;
582     case 0x001b:		/* sleep */
583 	CHECK_PRIVILEGED
584 	gen_helper_sleep(tcg_const_i32(ctx->pc + 2));
585 	return;
586     }
587 
588     switch (ctx->opcode & 0xf000) {
589     case 0x1000:		/* mov.l Rm,@(disp,Rn) */
590 	{
591 	    TCGv addr = tcg_temp_new();
592 	    tcg_gen_addi_i32(addr, REG(B11_8), B3_0 * 4);
593 	    tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
594 	    tcg_temp_free(addr);
595 	}
596 	return;
597     case 0x5000:		/* mov.l @(disp,Rm),Rn */
598 	{
599 	    TCGv addr = tcg_temp_new();
600 	    tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 4);
601 	    tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
602 	    tcg_temp_free(addr);
603 	}
604 	return;
605     case 0xe000:		/* mov #imm,Rn */
606 	tcg_gen_movi_i32(REG(B11_8), B7_0s);
607 	return;
608     case 0x9000:		/* mov.w @(disp,PC),Rn */
609 	{
610 	    TCGv addr = tcg_const_i32(ctx->pc + 4 + B7_0 * 2);
611 	    tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
612 	    tcg_temp_free(addr);
613 	}
614 	return;
615     case 0xd000:		/* mov.l @(disp,PC),Rn */
616 	{
617 	    TCGv addr = tcg_const_i32((ctx->pc + 4 + B7_0 * 4) & ~3);
618 	    tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
619 	    tcg_temp_free(addr);
620 	}
621 	return;
622     case 0x7000:		/* add #imm,Rn */
623 	tcg_gen_addi_i32(REG(B11_8), REG(B11_8), B7_0s);
624 	return;
625     case 0xa000:		/* bra disp */
626 	CHECK_NOT_DELAY_SLOT
627 	ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
628 	tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
629 	ctx->flags |= DELAY_SLOT;
630 	return;
631     case 0xb000:		/* bsr disp */
632 	CHECK_NOT_DELAY_SLOT
633 	tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
634 	ctx->delayed_pc = ctx->pc + 4 + B11_0s * 2;
635 	tcg_gen_movi_i32(cpu_delayed_pc, ctx->delayed_pc);
636 	ctx->flags |= DELAY_SLOT;
637 	return;
638     }
639 
640     switch (ctx->opcode & 0xf00f) {
641     case 0x6003:		/* mov Rm,Rn */
642 	tcg_gen_mov_i32(REG(B11_8), REG(B7_4));
643 	return;
644     case 0x2000:		/* mov.b Rm,@Rn */
645 	tcg_gen_qemu_st8(REG(B7_4), REG(B11_8), ctx->memidx);
646 	return;
647     case 0x2001:		/* mov.w Rm,@Rn */
648 	tcg_gen_qemu_st16(REG(B7_4), REG(B11_8), ctx->memidx);
649 	return;
650     case 0x2002:		/* mov.l Rm,@Rn */
651 	tcg_gen_qemu_st32(REG(B7_4), REG(B11_8), ctx->memidx);
652 	return;
653     case 0x6000:		/* mov.b @Rm,Rn */
654 	tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
655 	return;
656     case 0x6001:		/* mov.w @Rm,Rn */
657 	tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
658 	return;
659     case 0x6002:		/* mov.l @Rm,Rn */
660 	tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
661 	return;
662     case 0x2004:		/* mov.b Rm,@-Rn */
663 	{
664 	    TCGv addr = tcg_temp_new();
665 	    tcg_gen_subi_i32(addr, REG(B11_8), 1);
666 	    tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);	/* might cause re-execution */
667 	    tcg_gen_mov_i32(REG(B11_8), addr);			/* modify register status */
668 	    tcg_temp_free(addr);
669 	}
670 	return;
671     case 0x2005:		/* mov.w Rm,@-Rn */
672 	{
673 	    TCGv addr = tcg_temp_new();
674 	    tcg_gen_subi_i32(addr, REG(B11_8), 2);
675 	    tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
676 	    tcg_gen_mov_i32(REG(B11_8), addr);
677 	    tcg_temp_free(addr);
678 	}
679 	return;
680     case 0x2006:		/* mov.l Rm,@-Rn */
681 	{
682 	    TCGv addr = tcg_temp_new();
683 	    tcg_gen_subi_i32(addr, REG(B11_8), 4);
684 	    tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
685 	    tcg_gen_mov_i32(REG(B11_8), addr);
686 	}
687 	return;
688     case 0x6004:		/* mov.b @Rm+,Rn */
689 	tcg_gen_qemu_ld8s(REG(B11_8), REG(B7_4), ctx->memidx);
690 	if ( B11_8 != B7_4 )
691 		tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 1);
692 	return;
693     case 0x6005:		/* mov.w @Rm+,Rn */
694 	tcg_gen_qemu_ld16s(REG(B11_8), REG(B7_4), ctx->memidx);
695 	if ( B11_8 != B7_4 )
696 		tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
697 	return;
698     case 0x6006:		/* mov.l @Rm+,Rn */
699 	tcg_gen_qemu_ld32s(REG(B11_8), REG(B7_4), ctx->memidx);
700 	if ( B11_8 != B7_4 )
701 		tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
702 	return;
703     case 0x0004:		/* mov.b Rm,@(R0,Rn) */
704 	{
705 	    TCGv addr = tcg_temp_new();
706 	    tcg_gen_add_i32(addr, REG(B11_8), REG(0));
707 	    tcg_gen_qemu_st8(REG(B7_4), addr, ctx->memidx);
708 	    tcg_temp_free(addr);
709 	}
710 	return;
711     case 0x0005:		/* mov.w Rm,@(R0,Rn) */
712 	{
713 	    TCGv addr = tcg_temp_new();
714 	    tcg_gen_add_i32(addr, REG(B11_8), REG(0));
715 	    tcg_gen_qemu_st16(REG(B7_4), addr, ctx->memidx);
716 	    tcg_temp_free(addr);
717 	}
718 	return;
719     case 0x0006:		/* mov.l Rm,@(R0,Rn) */
720 	{
721 	    TCGv addr = tcg_temp_new();
722 	    tcg_gen_add_i32(addr, REG(B11_8), REG(0));
723 	    tcg_gen_qemu_st32(REG(B7_4), addr, ctx->memidx);
724 	    tcg_temp_free(addr);
725 	}
726 	return;
727     case 0x000c:		/* mov.b @(R0,Rm),Rn */
728 	{
729 	    TCGv addr = tcg_temp_new();
730 	    tcg_gen_add_i32(addr, REG(B7_4), REG(0));
731 	    tcg_gen_qemu_ld8s(REG(B11_8), addr, ctx->memidx);
732 	    tcg_temp_free(addr);
733 	}
734 	return;
735     case 0x000d:		/* mov.w @(R0,Rm),Rn */
736 	{
737 	    TCGv addr = tcg_temp_new();
738 	    tcg_gen_add_i32(addr, REG(B7_4), REG(0));
739 	    tcg_gen_qemu_ld16s(REG(B11_8), addr, ctx->memidx);
740 	    tcg_temp_free(addr);
741 	}
742 	return;
743     case 0x000e:		/* mov.l @(R0,Rm),Rn */
744 	{
745 	    TCGv addr = tcg_temp_new();
746 	    tcg_gen_add_i32(addr, REG(B7_4), REG(0));
747 	    tcg_gen_qemu_ld32s(REG(B11_8), addr, ctx->memidx);
748 	    tcg_temp_free(addr);
749 	}
750 	return;
751     case 0x6008:		/* swap.b Rm,Rn */
752 	{
753 	    TCGv high, low;
754 	    high = tcg_temp_new();
755 	    tcg_gen_andi_i32(high, REG(B7_4), 0xffff0000);
756 	    low = tcg_temp_new();
757 	    tcg_gen_ext16u_i32(low, REG(B7_4));
758 	    tcg_gen_bswap16_i32(low, low);
759 	    tcg_gen_or_i32(REG(B11_8), high, low);
760 	    tcg_temp_free(low);
761 	    tcg_temp_free(high);
762 	}
763 	return;
764     case 0x6009:		/* swap.w Rm,Rn */
765 	{
766 	    TCGv high, low;
767 	    high = tcg_temp_new();
768 	    tcg_gen_shli_i32(high, REG(B7_4), 16);
769 	    low = tcg_temp_new();
770 	    tcg_gen_shri_i32(low, REG(B7_4), 16);
771 	    tcg_gen_ext16u_i32(low, low);
772 	    tcg_gen_or_i32(REG(B11_8), high, low);
773 	    tcg_temp_free(low);
774 	    tcg_temp_free(high);
775 	}
776 	return;
777     case 0x200d:		/* xtrct Rm,Rn */
778 	{
779 	    TCGv high, low;
780 	    high = tcg_temp_new();
781 	    tcg_gen_shli_i32(high, REG(B7_4), 16);
782 	    low = tcg_temp_new();
783 	    tcg_gen_shri_i32(low, REG(B11_8), 16);
784 	    tcg_gen_ext16u_i32(low, low);
785 	    tcg_gen_or_i32(REG(B11_8), high, low);
786 	    tcg_temp_free(low);
787 	    tcg_temp_free(high);
788 	}
789 	return;
790     case 0x300c:		/* add Rm,Rn */
791 	tcg_gen_add_i32(REG(B11_8), REG(B11_8), REG(B7_4));
792 	return;
793     case 0x300e:		/* addc Rm,Rn */
794 	gen_helper_addc(REG(B11_8), REG(B7_4), REG(B11_8));
795 	return;
796     case 0x300f:		/* addv Rm,Rn */
797 	gen_helper_addv(REG(B11_8), REG(B7_4), REG(B11_8));
798 	return;
799     case 0x2009:		/* and Rm,Rn */
800 	tcg_gen_and_i32(REG(B11_8), REG(B11_8), REG(B7_4));
801 	return;
802     case 0x3000:		/* cmp/eq Rm,Rn */
803 	gen_cmp(TCG_COND_EQ, REG(B7_4), REG(B11_8));
804 	return;
805     case 0x3003:		/* cmp/ge Rm,Rn */
806 	gen_cmp(TCG_COND_GE, REG(B7_4), REG(B11_8));
807 	return;
808     case 0x3007:		/* cmp/gt Rm,Rn */
809 	gen_cmp(TCG_COND_GT, REG(B7_4), REG(B11_8));
810 	return;
811     case 0x3006:		/* cmp/hi Rm,Rn */
812 	gen_cmp(TCG_COND_GTU, REG(B7_4), REG(B11_8));
813 	return;
814     case 0x3002:		/* cmp/hs Rm,Rn */
815 	gen_cmp(TCG_COND_GEU, REG(B7_4), REG(B11_8));
816 	return;
817     case 0x200c:		/* cmp/str Rm,Rn */
818 	{
819 	    TCGv cmp1 = tcg_temp_new();
820 	    TCGv cmp2 = tcg_temp_new();
821 	    tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
822 	    tcg_gen_xor_i32(cmp1, REG(B7_4), REG(B11_8));
823 	    tcg_gen_andi_i32(cmp2, cmp1, 0xff000000);
824 	    tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
825 	    tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
826 	    tcg_gen_andi_i32(cmp2, cmp1, 0x00ff0000);
827 	    tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
828 	    tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
829 	    tcg_gen_andi_i32(cmp2, cmp1, 0x0000ff00);
830 	    tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
831 	    tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
832 	    tcg_gen_andi_i32(cmp2, cmp1, 0x000000ff);
833 	    tcg_gen_setcondi_i32(TCG_COND_EQ, cmp2, cmp2, 0);
834 	    tcg_gen_or_i32(cpu_sr, cpu_sr, cmp2);
835 	    tcg_temp_free(cmp2);
836 	    tcg_temp_free(cmp1);
837 	}
838 	return;
839     case 0x2007:		/* div0s Rm,Rn */
840 	{
841 	    gen_copy_bit_i32(cpu_sr, 8, REG(B11_8), 31);	/* SR_Q */
842 	    gen_copy_bit_i32(cpu_sr, 9, REG(B7_4), 31);		/* SR_M */
843 	    TCGv val = tcg_temp_new();
844 	    tcg_gen_xor_i32(val, REG(B7_4), REG(B11_8));
845 	    gen_copy_bit_i32(cpu_sr, 0, val, 31);		/* SR_T */
846 	    tcg_temp_free(val);
847 	}
848 	return;
849     case 0x3004:		/* div1 Rm,Rn */
850 	gen_helper_div1(REG(B11_8), REG(B7_4), REG(B11_8));
851 	return;
852     case 0x300d:		/* dmuls.l Rm,Rn */
853 	{
854 	    TCGv_i64 tmp1 = tcg_temp_new_i64();
855 	    TCGv_i64 tmp2 = tcg_temp_new_i64();
856 
857 	    tcg_gen_ext_i32_i64(tmp1, REG(B7_4));
858 	    tcg_gen_ext_i32_i64(tmp2, REG(B11_8));
859 	    tcg_gen_mul_i64(tmp1, tmp1, tmp2);
860 	    tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
861 	    tcg_gen_shri_i64(tmp1, tmp1, 32);
862 	    tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
863 
864 	    tcg_temp_free_i64(tmp2);
865 	    tcg_temp_free_i64(tmp1);
866 	}
867 	return;
868     case 0x3005:		/* dmulu.l Rm,Rn */
869 	{
870 	    TCGv_i64 tmp1 = tcg_temp_new_i64();
871 	    TCGv_i64 tmp2 = tcg_temp_new_i64();
872 
873 	    tcg_gen_extu_i32_i64(tmp1, REG(B7_4));
874 	    tcg_gen_extu_i32_i64(tmp2, REG(B11_8));
875 	    tcg_gen_mul_i64(tmp1, tmp1, tmp2);
876 	    tcg_gen_trunc_i64_i32(cpu_macl, tmp1);
877 	    tcg_gen_shri_i64(tmp1, tmp1, 32);
878 	    tcg_gen_trunc_i64_i32(cpu_mach, tmp1);
879 
880 	    tcg_temp_free_i64(tmp2);
881 	    tcg_temp_free_i64(tmp1);
882 	}
883 	return;
884     case 0x600e:		/* exts.b Rm,Rn */
885 	tcg_gen_ext8s_i32(REG(B11_8), REG(B7_4));
886 	return;
887     case 0x600f:		/* exts.w Rm,Rn */
888 	tcg_gen_ext16s_i32(REG(B11_8), REG(B7_4));
889 	return;
890     case 0x600c:		/* extu.b Rm,Rn */
891 	tcg_gen_ext8u_i32(REG(B11_8), REG(B7_4));
892 	return;
893     case 0x600d:		/* extu.w Rm,Rn */
894 	tcg_gen_ext16u_i32(REG(B11_8), REG(B7_4));
895 	return;
896     case 0x000f:		/* mac.l @Rm+,@Rn+ */
897 	{
898 	    TCGv arg0, arg1;
899 	    arg0 = tcg_temp_new();
900 	    tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
901 	    arg1 = tcg_temp_new();
902 	    tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
903 	    gen_helper_macl(arg0, arg1);
904 	    tcg_temp_free(arg1);
905 	    tcg_temp_free(arg0);
906 	    tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
907 	    tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
908 	}
909 	return;
910     case 0x400f:		/* mac.w @Rm+,@Rn+ */
911 	{
912 	    TCGv arg0, arg1;
913 	    arg0 = tcg_temp_new();
914 	    tcg_gen_qemu_ld32s(arg0, REG(B7_4), ctx->memidx);
915 	    arg1 = tcg_temp_new();
916 	    tcg_gen_qemu_ld32s(arg1, REG(B11_8), ctx->memidx);
917 	    gen_helper_macw(arg0, arg1);
918 	    tcg_temp_free(arg1);
919 	    tcg_temp_free(arg0);
920 	    tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 2);
921 	    tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 2);
922 	}
923 	return;
924     case 0x0007:		/* mul.l Rm,Rn */
925 	tcg_gen_mul_i32(cpu_macl, REG(B7_4), REG(B11_8));
926 	return;
927     case 0x200f:		/* muls.w Rm,Rn */
928 	{
929 	    TCGv arg0, arg1;
930 	    arg0 = tcg_temp_new();
931 	    tcg_gen_ext16s_i32(arg0, REG(B7_4));
932 	    arg1 = tcg_temp_new();
933 	    tcg_gen_ext16s_i32(arg1, REG(B11_8));
934 	    tcg_gen_mul_i32(cpu_macl, arg0, arg1);
935 	    tcg_temp_free(arg1);
936 	    tcg_temp_free(arg0);
937 	}
938 	return;
939     case 0x200e:		/* mulu.w Rm,Rn */
940 	{
941 	    TCGv arg0, arg1;
942 	    arg0 = tcg_temp_new();
943 	    tcg_gen_ext16u_i32(arg0, REG(B7_4));
944 	    arg1 = tcg_temp_new();
945 	    tcg_gen_ext16u_i32(arg1, REG(B11_8));
946 	    tcg_gen_mul_i32(cpu_macl, arg0, arg1);
947 	    tcg_temp_free(arg1);
948 	    tcg_temp_free(arg0);
949 	}
950 	return;
951     case 0x600b:		/* neg Rm,Rn */
952 	tcg_gen_neg_i32(REG(B11_8), REG(B7_4));
953 	return;
954     case 0x600a:		/* negc Rm,Rn */
955         {
956 	    TCGv t0, t1;
957             t0 = tcg_temp_new();
958             tcg_gen_neg_i32(t0, REG(B7_4));
959             t1 = tcg_temp_new();
960             tcg_gen_andi_i32(t1, cpu_sr, SR_T);
961             tcg_gen_sub_i32(REG(B11_8), t0, t1);
962             tcg_gen_andi_i32(cpu_sr, cpu_sr, ~SR_T);
963             tcg_gen_setcondi_i32(TCG_COND_GTU, t1, t0, 0);
964             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
965             tcg_gen_setcond_i32(TCG_COND_GTU, t1, REG(B11_8), t0);
966             tcg_gen_or_i32(cpu_sr, cpu_sr, t1);
967             tcg_temp_free(t0);
968             tcg_temp_free(t1);
969         }
970 	return;
971     case 0x6007:		/* not Rm,Rn */
972 	tcg_gen_not_i32(REG(B11_8), REG(B7_4));
973 	return;
974     case 0x200b:		/* or Rm,Rn */
975 	tcg_gen_or_i32(REG(B11_8), REG(B11_8), REG(B7_4));
976 	return;
977     case 0x400c:		/* shad Rm,Rn */
978 	{
979 	    int label1 = gen_new_label();
980 	    int label2 = gen_new_label();
981 	    int label3 = gen_new_label();
982 	    int label4 = gen_new_label();
983 	    TCGv shift;
984 	    tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
985 	    /* Rm positive, shift to the left */
986             shift = tcg_temp_new();
987 	    tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
988 	    tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
989 	    tcg_temp_free(shift);
990 	    tcg_gen_br(label4);
991 	    /* Rm negative, shift to the right */
992 	    gen_set_label(label1);
993             shift = tcg_temp_new();
994 	    tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
995 	    tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
996 	    tcg_gen_not_i32(shift, REG(B7_4));
997 	    tcg_gen_andi_i32(shift, shift, 0x1f);
998 	    tcg_gen_addi_i32(shift, shift, 1);
999 	    tcg_gen_sar_i32(REG(B11_8), REG(B11_8), shift);
1000 	    tcg_temp_free(shift);
1001 	    tcg_gen_br(label4);
1002 	    /* Rm = -32 */
1003 	    gen_set_label(label2);
1004 	    tcg_gen_brcondi_i32(TCG_COND_LT, REG(B11_8), 0, label3);
1005 	    tcg_gen_movi_i32(REG(B11_8), 0);
1006 	    tcg_gen_br(label4);
1007 	    gen_set_label(label3);
1008 	    tcg_gen_movi_i32(REG(B11_8), 0xffffffff);
1009 	    gen_set_label(label4);
1010 	}
1011 	return;
1012     case 0x400d:		/* shld Rm,Rn */
1013 	{
1014 	    int label1 = gen_new_label();
1015 	    int label2 = gen_new_label();
1016 	    int label3 = gen_new_label();
1017 	    TCGv shift;
1018 	    tcg_gen_brcondi_i32(TCG_COND_LT, REG(B7_4), 0, label1);
1019 	    /* Rm positive, shift to the left */
1020             shift = tcg_temp_new();
1021 	    tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1022 	    tcg_gen_shl_i32(REG(B11_8), REG(B11_8), shift);
1023 	    tcg_temp_free(shift);
1024 	    tcg_gen_br(label3);
1025 	    /* Rm negative, shift to the right */
1026 	    gen_set_label(label1);
1027             shift = tcg_temp_new();
1028 	    tcg_gen_andi_i32(shift, REG(B7_4), 0x1f);
1029 	    tcg_gen_brcondi_i32(TCG_COND_EQ, shift, 0, label2);
1030 	    tcg_gen_not_i32(shift, REG(B7_4));
1031 	    tcg_gen_andi_i32(shift, shift, 0x1f);
1032 	    tcg_gen_addi_i32(shift, shift, 1);
1033 	    tcg_gen_shr_i32(REG(B11_8), REG(B11_8), shift);
1034 	    tcg_temp_free(shift);
1035 	    tcg_gen_br(label3);
1036 	    /* Rm = -32 */
1037 	    gen_set_label(label2);
1038 	    tcg_gen_movi_i32(REG(B11_8), 0);
1039 	    gen_set_label(label3);
1040 	}
1041 	return;
1042     case 0x3008:		/* sub Rm,Rn */
1043 	tcg_gen_sub_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1044 	return;
1045     case 0x300a:		/* subc Rm,Rn */
1046 	gen_helper_subc(REG(B11_8), REG(B7_4), REG(B11_8));
1047 	return;
1048     case 0x300b:		/* subv Rm,Rn */
1049 	gen_helper_subv(REG(B11_8), REG(B7_4), REG(B11_8));
1050 	return;
1051     case 0x2008:		/* tst Rm,Rn */
1052 	{
1053 	    TCGv val = tcg_temp_new();
1054 	    tcg_gen_and_i32(val, REG(B7_4), REG(B11_8));
1055 	    gen_cmp_imm(TCG_COND_EQ, val, 0);
1056 	    tcg_temp_free(val);
1057 	}
1058 	return;
1059     case 0x200a:		/* xor Rm,Rn */
1060 	tcg_gen_xor_i32(REG(B11_8), REG(B11_8), REG(B7_4));
1061 	return;
1062     case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1063 	CHECK_FPU_ENABLED
1064 	if (ctx->fpscr & FPSCR_SZ) {
1065 	    TCGv_i64 fp = tcg_temp_new_i64();
1066 	    gen_load_fpr64(fp, XREG(B7_4));
1067 	    gen_store_fpr64(fp, XREG(B11_8));
1068 	    tcg_temp_free_i64(fp);
1069 	} else {
1070 	    tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1071 	}
1072 	return;
1073     case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1074 	CHECK_FPU_ENABLED
1075 	if (ctx->fpscr & FPSCR_SZ) {
1076 	    TCGv addr_hi = tcg_temp_new();
1077 	    int fr = XREG(B7_4);
1078 	    tcg_gen_addi_i32(addr_hi, REG(B11_8), 4);
1079 	    tcg_gen_qemu_st32(cpu_fregs[fr  ], REG(B11_8), ctx->memidx);
1080 	    tcg_gen_qemu_st32(cpu_fregs[fr+1], addr_hi,	   ctx->memidx);
1081 	    tcg_temp_free(addr_hi);
1082 	} else {
1083 	    tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], REG(B11_8), ctx->memidx);
1084 	}
1085 	return;
1086     case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1087 	CHECK_FPU_ENABLED
1088 	if (ctx->fpscr & FPSCR_SZ) {
1089 	    TCGv addr_hi = tcg_temp_new();
1090 	    int fr = XREG(B11_8);
1091 	    tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1092 	    tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1093 	    tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1094 	    tcg_temp_free(addr_hi);
1095 	} else {
1096 	    tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1097 	}
1098 	return;
1099     case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1100 	CHECK_FPU_ENABLED
1101 	if (ctx->fpscr & FPSCR_SZ) {
1102 	    TCGv addr_hi = tcg_temp_new();
1103 	    int fr = XREG(B11_8);
1104 	    tcg_gen_addi_i32(addr_hi, REG(B7_4), 4);
1105 	    tcg_gen_qemu_ld32u(cpu_fregs[fr  ], REG(B7_4), ctx->memidx);
1106 	    tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr_hi,   ctx->memidx);
1107 	    tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 8);
1108 	    tcg_temp_free(addr_hi);
1109 	} else {
1110 	    tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], REG(B7_4), ctx->memidx);
1111 	    tcg_gen_addi_i32(REG(B7_4), REG(B7_4), 4);
1112 	}
1113 	return;
1114     case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1115 	CHECK_FPU_ENABLED
1116 	if (ctx->fpscr & FPSCR_SZ) {
1117 	    TCGv addr = tcg_temp_new_i32();
1118 	    int fr = XREG(B7_4);
1119 	    tcg_gen_subi_i32(addr, REG(B11_8), 4);
1120 	    tcg_gen_qemu_st32(cpu_fregs[fr+1], addr, ctx->memidx);
1121 	    tcg_gen_subi_i32(addr, addr, 4);
1122 	    tcg_gen_qemu_st32(cpu_fregs[fr  ], addr, ctx->memidx);
1123 	    tcg_gen_mov_i32(REG(B11_8), addr);
1124 	    tcg_temp_free(addr);
1125 	} else {
1126 	    TCGv addr;
1127 	    addr = tcg_temp_new_i32();
1128 	    tcg_gen_subi_i32(addr, REG(B11_8), 4);
1129 	    tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1130 	    tcg_gen_mov_i32(REG(B11_8), addr);
1131 	    tcg_temp_free(addr);
1132 	}
1133 	return;
1134     case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1135 	CHECK_FPU_ENABLED
1136 	{
1137 	    TCGv addr = tcg_temp_new_i32();
1138 	    tcg_gen_add_i32(addr, REG(B7_4), REG(0));
1139 	    if (ctx->fpscr & FPSCR_SZ) {
1140 		int fr = XREG(B11_8);
1141 		tcg_gen_qemu_ld32u(cpu_fregs[fr	 ], addr, ctx->memidx);
1142 		tcg_gen_addi_i32(addr, addr, 4);
1143 		tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1144 	    } else {
1145 		tcg_gen_qemu_ld32u(cpu_fregs[FREG(B11_8)], addr, ctx->memidx);
1146 	    }
1147 	    tcg_temp_free(addr);
1148 	}
1149 	return;
1150     case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1151 	CHECK_FPU_ENABLED
1152 	{
1153 	    TCGv addr = tcg_temp_new();
1154 	    tcg_gen_add_i32(addr, REG(B11_8), REG(0));
1155 	    if (ctx->fpscr & FPSCR_SZ) {
1156 		int fr = XREG(B7_4);
1157 		tcg_gen_qemu_ld32u(cpu_fregs[fr	 ], addr, ctx->memidx);
1158 		tcg_gen_addi_i32(addr, addr, 4);
1159 		tcg_gen_qemu_ld32u(cpu_fregs[fr+1], addr, ctx->memidx);
1160 	    } else {
1161 		tcg_gen_qemu_st32(cpu_fregs[FREG(B7_4)], addr, ctx->memidx);
1162 	    }
1163 	    tcg_temp_free(addr);
1164 	}
1165 	return;
1166     case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1167     case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1168     case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1169     case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1170     case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1171     case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1172 	{
1173 	    CHECK_FPU_ENABLED
1174 	    if (ctx->fpscr & FPSCR_PR) {
1175                 TCGv_i64 fp0, fp1;
1176 
1177 		if (ctx->opcode & 0x0110)
1178 		    break; /* illegal instruction */
1179 		fp0 = tcg_temp_new_i64();
1180 		fp1 = tcg_temp_new_i64();
1181 		gen_load_fpr64(fp0, DREG(B11_8));
1182 		gen_load_fpr64(fp1, DREG(B7_4));
1183                 switch (ctx->opcode & 0xf00f) {
1184                 case 0xf000:		/* fadd Rm,Rn */
1185                     gen_helper_fadd_DT(fp0, fp0, fp1);
1186                     break;
1187                 case 0xf001:		/* fsub Rm,Rn */
1188                     gen_helper_fsub_DT(fp0, fp0, fp1);
1189                     break;
1190                 case 0xf002:		/* fmul Rm,Rn */
1191                     gen_helper_fmul_DT(fp0, fp0, fp1);
1192                     break;
1193                 case 0xf003:		/* fdiv Rm,Rn */
1194                     gen_helper_fdiv_DT(fp0, fp0, fp1);
1195                     break;
1196                 case 0xf004:		/* fcmp/eq Rm,Rn */
1197                     gen_helper_fcmp_eq_DT(fp0, fp1);
1198                     return;
1199                 case 0xf005:		/* fcmp/gt Rm,Rn */
1200                     gen_helper_fcmp_gt_DT(fp0, fp1);
1201                     return;
1202                 }
1203 		gen_store_fpr64(fp0, DREG(B11_8));
1204                 tcg_temp_free_i64(fp0);
1205                 tcg_temp_free_i64(fp1);
1206 	    } else {
1207                 switch (ctx->opcode & 0xf00f) {
1208                 case 0xf000:		/* fadd Rm,Rn */
1209                     gen_helper_fadd_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1210                     break;
1211                 case 0xf001:		/* fsub Rm,Rn */
1212                     gen_helper_fsub_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1213                     break;
1214                 case 0xf002:		/* fmul Rm,Rn */
1215                     gen_helper_fmul_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1216                     break;
1217                 case 0xf003:		/* fdiv Rm,Rn */
1218                     gen_helper_fdiv_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1219                     break;
1220                 case 0xf004:		/* fcmp/eq Rm,Rn */
1221                     gen_helper_fcmp_eq_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1222                     return;
1223                 case 0xf005:		/* fcmp/gt Rm,Rn */
1224                     gen_helper_fcmp_gt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B7_4)]);
1225                     return;
1226                 }
1227 	    }
1228 	}
1229 	return;
1230     case 0xf00e: /* fmac FR0,RM,Rn */
1231         {
1232             CHECK_FPU_ENABLED
1233             if (ctx->fpscr & FPSCR_PR) {
1234                 break; /* illegal instruction */
1235             } else {
1236                 gen_helper_fmac_FT(cpu_fregs[FREG(B11_8)],
1237                                    cpu_fregs[FREG(0)], cpu_fregs[FREG(B7_4)], cpu_fregs[FREG(B11_8)]);
1238                 return;
1239             }
1240         }
1241     }
1242 
1243     switch (ctx->opcode & 0xff00) {
1244     case 0xc900:		/* and #imm,R0 */
1245 	tcg_gen_andi_i32(REG(0), REG(0), B7_0);
1246 	return;
1247     case 0xcd00:		/* and.b #imm,@(R0,GBR) */
1248 	{
1249 	    TCGv addr, val;
1250 	    addr = tcg_temp_new();
1251 	    tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1252 	    val = tcg_temp_new();
1253 	    tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1254 	    tcg_gen_andi_i32(val, val, B7_0);
1255 	    tcg_gen_qemu_st8(val, addr, ctx->memidx);
1256 	    tcg_temp_free(val);
1257 	    tcg_temp_free(addr);
1258 	}
1259 	return;
1260     case 0x8b00:		/* bf label */
1261 	CHECK_NOT_DELAY_SLOT
1262 	    gen_conditional_jump(ctx, ctx->pc + 2,
1263 				 ctx->pc + 4 + B7_0s * 2);
1264 	ctx->bstate = BS_BRANCH;
1265 	return;
1266     case 0x8f00:		/* bf/s label */
1267 	CHECK_NOT_DELAY_SLOT
1268 	gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 0);
1269 	ctx->flags |= DELAY_SLOT_CONDITIONAL;
1270 	return;
1271     case 0x8900:		/* bt label */
1272 	CHECK_NOT_DELAY_SLOT
1273 	    gen_conditional_jump(ctx, ctx->pc + 4 + B7_0s * 2,
1274 				 ctx->pc + 2);
1275 	ctx->bstate = BS_BRANCH;
1276 	return;
1277     case 0x8d00:		/* bt/s label */
1278 	CHECK_NOT_DELAY_SLOT
1279 	gen_branch_slot(ctx->delayed_pc = ctx->pc + 4 + B7_0s * 2, 1);
1280 	ctx->flags |= DELAY_SLOT_CONDITIONAL;
1281 	return;
1282     case 0x8800:		/* cmp/eq #imm,R0 */
1283 	gen_cmp_imm(TCG_COND_EQ, REG(0), B7_0s);
1284 	return;
1285     case 0xc400:		/* mov.b @(disp,GBR),R0 */
1286 	{
1287 	    TCGv addr = tcg_temp_new();
1288 	    tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1289 	    tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1290 	    tcg_temp_free(addr);
1291 	}
1292 	return;
1293     case 0xc500:		/* mov.w @(disp,GBR),R0 */
1294 	{
1295 	    TCGv addr = tcg_temp_new();
1296 	    tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1297 	    tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1298 	    tcg_temp_free(addr);
1299 	}
1300 	return;
1301     case 0xc600:		/* mov.l @(disp,GBR),R0 */
1302 	{
1303 	    TCGv addr = tcg_temp_new();
1304 	    tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1305 	    tcg_gen_qemu_ld32s(REG(0), addr, ctx->memidx);
1306 	    tcg_temp_free(addr);
1307 	}
1308 	return;
1309     case 0xc000:		/* mov.b R0,@(disp,GBR) */
1310 	{
1311 	    TCGv addr = tcg_temp_new();
1312 	    tcg_gen_addi_i32(addr, cpu_gbr, B7_0);
1313 	    tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1314 	    tcg_temp_free(addr);
1315 	}
1316 	return;
1317     case 0xc100:		/* mov.w R0,@(disp,GBR) */
1318 	{
1319 	    TCGv addr = tcg_temp_new();
1320 	    tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 2);
1321 	    tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1322 	    tcg_temp_free(addr);
1323 	}
1324 	return;
1325     case 0xc200:		/* mov.l R0,@(disp,GBR) */
1326 	{
1327 	    TCGv addr = tcg_temp_new();
1328 	    tcg_gen_addi_i32(addr, cpu_gbr, B7_0 * 4);
1329 	    tcg_gen_qemu_st32(REG(0), addr, ctx->memidx);
1330 	    tcg_temp_free(addr);
1331 	}
1332 	return;
1333     case 0x8000:		/* mov.b R0,@(disp,Rn) */
1334 	{
1335 	    TCGv addr = tcg_temp_new();
1336 	    tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1337 	    tcg_gen_qemu_st8(REG(0), addr, ctx->memidx);
1338 	    tcg_temp_free(addr);
1339 	}
1340 	return;
1341     case 0x8100:		/* mov.w R0,@(disp,Rn) */
1342 	{
1343 	    TCGv addr = tcg_temp_new();
1344 	    tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1345 	    tcg_gen_qemu_st16(REG(0), addr, ctx->memidx);
1346 	    tcg_temp_free(addr);
1347 	}
1348 	return;
1349     case 0x8400:		/* mov.b @(disp,Rn),R0 */
1350 	{
1351 	    TCGv addr = tcg_temp_new();
1352 	    tcg_gen_addi_i32(addr, REG(B7_4), B3_0);
1353 	    tcg_gen_qemu_ld8s(REG(0), addr, ctx->memidx);
1354 	    tcg_temp_free(addr);
1355 	}
1356 	return;
1357     case 0x8500:		/* mov.w @(disp,Rn),R0 */
1358 	{
1359 	    TCGv addr = tcg_temp_new();
1360 	    tcg_gen_addi_i32(addr, REG(B7_4), B3_0 * 2);
1361 	    tcg_gen_qemu_ld16s(REG(0), addr, ctx->memidx);
1362 	    tcg_temp_free(addr);
1363 	}
1364 	return;
1365     case 0xc700:		/* mova @(disp,PC),R0 */
1366 	tcg_gen_movi_i32(REG(0), ((ctx->pc & 0xfffffffc) + 4 + B7_0 * 4) & ~3);
1367 	return;
1368     case 0xcb00:		/* or #imm,R0 */
1369 	tcg_gen_ori_i32(REG(0), REG(0), B7_0);
1370 	return;
1371     case 0xcf00:		/* or.b #imm,@(R0,GBR) */
1372 	{
1373 	    TCGv addr, val;
1374 	    addr = tcg_temp_new();
1375 	    tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1376 	    val = tcg_temp_new();
1377 	    tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1378 	    tcg_gen_ori_i32(val, val, B7_0);
1379 	    tcg_gen_qemu_st8(val, addr, ctx->memidx);
1380 	    tcg_temp_free(val);
1381 	    tcg_temp_free(addr);
1382 	}
1383 	return;
1384     case 0xc300:		/* trapa #imm */
1385 	{
1386 	    TCGv imm;
1387 	    CHECK_NOT_DELAY_SLOT
1388 	    imm = tcg_const_i32(B7_0);
1389 	    gen_helper_trapa(imm);
1390 	    tcg_temp_free(imm);
1391 	    ctx->bstate = BS_BRANCH;
1392 	}
1393 	return;
1394     case 0xc800:		/* tst #imm,R0 */
1395 	{
1396 	    TCGv val = tcg_temp_new();
1397 	    tcg_gen_andi_i32(val, REG(0), B7_0);
1398 	    gen_cmp_imm(TCG_COND_EQ, val, 0);
1399 	    tcg_temp_free(val);
1400 	}
1401 	return;
1402     case 0xcc00:		/* tst.b #imm,@(R0,GBR) */
1403 	{
1404 	    TCGv val = tcg_temp_new();
1405 	    tcg_gen_add_i32(val, REG(0), cpu_gbr);
1406 	    tcg_gen_qemu_ld8u(val, val, ctx->memidx);
1407 	    tcg_gen_andi_i32(val, val, B7_0);
1408 	    gen_cmp_imm(TCG_COND_EQ, val, 0);
1409 	    tcg_temp_free(val);
1410 	}
1411 	return;
1412     case 0xca00:		/* xor #imm,R0 */
1413 	tcg_gen_xori_i32(REG(0), REG(0), B7_0);
1414 	return;
1415     case 0xce00:		/* xor.b #imm,@(R0,GBR) */
1416 	{
1417 	    TCGv addr, val;
1418 	    addr = tcg_temp_new();
1419 	    tcg_gen_add_i32(addr, REG(0), cpu_gbr);
1420 	    val = tcg_temp_new();
1421 	    tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1422 	    tcg_gen_xori_i32(val, val, B7_0);
1423 	    tcg_gen_qemu_st8(val, addr, ctx->memidx);
1424 	    tcg_temp_free(val);
1425 	    tcg_temp_free(addr);
1426 	}
1427 	return;
1428     }
1429 
1430     switch (ctx->opcode & 0xf08f) {
1431     case 0x408e:		/* ldc Rm,Rn_BANK */
1432 	CHECK_PRIVILEGED
1433 	tcg_gen_mov_i32(ALTREG(B6_4), REG(B11_8));
1434 	return;
1435     case 0x4087:		/* ldc.l @Rm+,Rn_BANK */
1436 	CHECK_PRIVILEGED
1437 	tcg_gen_qemu_ld32s(ALTREG(B6_4), REG(B11_8), ctx->memidx);
1438 	tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1439 	return;
1440     case 0x0082:		/* stc Rm_BANK,Rn */
1441 	CHECK_PRIVILEGED
1442 	tcg_gen_mov_i32(REG(B11_8), ALTREG(B6_4));
1443 	return;
1444     case 0x4083:		/* stc.l Rm_BANK,@-Rn */
1445 	CHECK_PRIVILEGED
1446 	{
1447 	    TCGv addr = tcg_temp_new();
1448 	    tcg_gen_subi_i32(addr, REG(B11_8), 4);
1449 	    tcg_gen_qemu_st32(ALTREG(B6_4), addr, ctx->memidx);
1450 	    tcg_gen_mov_i32(REG(B11_8), addr);
1451 	    tcg_temp_free(addr);
1452 	}
1453 	return;
1454     }
1455 
1456     switch (ctx->opcode & 0xf0ff) {
1457     case 0x0023:		/* braf Rn */
1458 	CHECK_NOT_DELAY_SLOT
1459 	tcg_gen_addi_i32(cpu_delayed_pc, REG(B11_8), ctx->pc + 4);
1460 	ctx->flags |= DELAY_SLOT;
1461 	ctx->delayed_pc = (uint32_t) - 1;
1462 	return;
1463     case 0x0003:		/* bsrf Rn */
1464 	CHECK_NOT_DELAY_SLOT
1465 	tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1466 	tcg_gen_add_i32(cpu_delayed_pc, REG(B11_8), cpu_pr);
1467 	ctx->flags |= DELAY_SLOT;
1468 	ctx->delayed_pc = (uint32_t) - 1;
1469 	return;
1470     case 0x4015:		/* cmp/pl Rn */
1471 	gen_cmp_imm(TCG_COND_GT, REG(B11_8), 0);
1472 	return;
1473     case 0x4011:		/* cmp/pz Rn */
1474 	gen_cmp_imm(TCG_COND_GE, REG(B11_8), 0);
1475 	return;
1476     case 0x4010:		/* dt Rn */
1477 	tcg_gen_subi_i32(REG(B11_8), REG(B11_8), 1);
1478 	gen_cmp_imm(TCG_COND_EQ, REG(B11_8), 0);
1479 	return;
1480     case 0x402b:		/* jmp @Rn */
1481 	CHECK_NOT_DELAY_SLOT
1482 	tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1483 	ctx->flags |= DELAY_SLOT;
1484 	ctx->delayed_pc = (uint32_t) - 1;
1485 	return;
1486     case 0x400b:		/* jsr @Rn */
1487 	CHECK_NOT_DELAY_SLOT
1488 	tcg_gen_movi_i32(cpu_pr, ctx->pc + 4);
1489 	tcg_gen_mov_i32(cpu_delayed_pc, REG(B11_8));
1490 	ctx->flags |= DELAY_SLOT;
1491 	ctx->delayed_pc = (uint32_t) - 1;
1492 	return;
1493     case 0x400e:		/* ldc Rm,SR */
1494 	CHECK_PRIVILEGED
1495 	tcg_gen_andi_i32(cpu_sr, REG(B11_8), 0x700083f3);
1496 	ctx->bstate = BS_STOP;
1497 	return;
1498     case 0x4007:		/* ldc.l @Rm+,SR */
1499 	CHECK_PRIVILEGED
1500 	{
1501 	    TCGv val = tcg_temp_new();
1502 	    tcg_gen_qemu_ld32s(val, REG(B11_8), ctx->memidx);
1503 	    tcg_gen_andi_i32(cpu_sr, val, 0x700083f3);
1504 	    tcg_temp_free(val);
1505 	    tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1506 	    ctx->bstate = BS_STOP;
1507 	}
1508 	return;
1509     case 0x0002:		/* stc SR,Rn */
1510 	CHECK_PRIVILEGED
1511 	tcg_gen_mov_i32(REG(B11_8), cpu_sr);
1512 	return;
1513     case 0x4003:		/* stc SR,@-Rn */
1514 	CHECK_PRIVILEGED
1515 	{
1516 	    TCGv addr = tcg_temp_new();
1517 	    tcg_gen_subi_i32(addr, REG(B11_8), 4);
1518 	    tcg_gen_qemu_st32(cpu_sr, addr, ctx->memidx);
1519 	    tcg_gen_mov_i32(REG(B11_8), addr);
1520 	    tcg_temp_free(addr);
1521 	}
1522 	return;
1523 #define LD(reg,ldnum,ldpnum,prechk)		\
1524   case ldnum:							\
1525     prechk    							\
1526     tcg_gen_mov_i32 (cpu_##reg, REG(B11_8));			\
1527     return;							\
1528   case ldpnum:							\
1529     prechk    							\
1530     tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx);	\
1531     tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);		\
1532     return;
1533 #define ST(reg,stnum,stpnum,prechk)		\
1534   case stnum:							\
1535     prechk    							\
1536     tcg_gen_mov_i32 (REG(B11_8), cpu_##reg);			\
1537     return;							\
1538   case stpnum:							\
1539     prechk    							\
1540     {								\
1541 	TCGv addr = tcg_temp_new();				\
1542 	tcg_gen_subi_i32(addr, REG(B11_8), 4);			\
1543 	tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx);	\
1544 	tcg_gen_mov_i32(REG(B11_8), addr);			\
1545 	tcg_temp_free(addr);					\
1546     }								\
1547     return;
1548 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk)		\
1549 	LD(reg,ldnum,ldpnum,prechk)				\
1550 	ST(reg,stnum,stpnum,prechk)
1551 	LDST(gbr,  0x401e, 0x4017, 0x0012, 0x4013, {})
1552 	LDST(vbr,  0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED)
1553 	LDST(ssr,  0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED)
1554 	LDST(spc,  0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED)
1555 	ST(sgr,  0x003a, 0x4032, CHECK_PRIVILEGED)
1556 	LD(sgr,  0x403a, 0x4036, CHECK_PRIVILEGED if (!(ctx->features & SH_FEATURE_SH4A)) break;)
1557 	LDST(dbr,  0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED)
1558 	LDST(mach, 0x400a, 0x4006, 0x000a, 0x4002, {})
1559 	LDST(macl, 0x401a, 0x4016, 0x001a, 0x4012, {})
1560 	LDST(pr,   0x402a, 0x4026, 0x002a, 0x4022, {})
1561 	LDST(fpul, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED})
1562     case 0x406a:		/* lds Rm,FPSCR */
1563 	CHECK_FPU_ENABLED
1564 	gen_helper_ld_fpscr(REG(B11_8));
1565 	ctx->bstate = BS_STOP;
1566 	return;
1567     case 0x4066:		/* lds.l @Rm+,FPSCR */
1568 	CHECK_FPU_ENABLED
1569 	{
1570 	    TCGv addr = tcg_temp_new();
1571 	    tcg_gen_qemu_ld32s(addr, REG(B11_8), ctx->memidx);
1572 	    tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1573 	    gen_helper_ld_fpscr(addr);
1574 	    tcg_temp_free(addr);
1575 	    ctx->bstate = BS_STOP;
1576 	}
1577 	return;
1578     case 0x006a:		/* sts FPSCR,Rn */
1579 	CHECK_FPU_ENABLED
1580 	tcg_gen_andi_i32(REG(B11_8), cpu_fpscr, 0x003fffff);
1581 	return;
1582     case 0x4062:		/* sts FPSCR,@-Rn */
1583 	CHECK_FPU_ENABLED
1584 	{
1585 	    TCGv addr, val;
1586 	    val = tcg_temp_new();
1587 	    tcg_gen_andi_i32(val, cpu_fpscr, 0x003fffff);
1588 	    addr = tcg_temp_new();
1589 	    tcg_gen_subi_i32(addr, REG(B11_8), 4);
1590 	    tcg_gen_qemu_st32(val, addr, ctx->memidx);
1591 	    tcg_gen_mov_i32(REG(B11_8), addr);
1592 	    tcg_temp_free(addr);
1593 	    tcg_temp_free(val);
1594 	}
1595 	return;
1596     case 0x00c3:		/* movca.l R0,@Rm */
1597         {
1598             TCGv val = tcg_temp_new();
1599             tcg_gen_qemu_ld32u(val, REG(B11_8), ctx->memidx);
1600             gen_helper_movcal (REG(B11_8), val);
1601             tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1602         }
1603         ctx->has_movcal = 1;
1604 	return;
1605     case 0x40a9:
1606 	/* MOVUA.L @Rm,R0 (Rm) -> R0
1607 	   Load non-boundary-aligned data */
1608 	tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1609 	return;
1610     case 0x40e9:
1611 	/* MOVUA.L @Rm+,R0   (Rm) -> R0, Rm + 4 -> Rm
1612 	   Load non-boundary-aligned data */
1613 	tcg_gen_qemu_ld32u(REG(0), REG(B11_8), ctx->memidx);
1614 	tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4);
1615 	return;
1616     case 0x0029:		/* movt Rn */
1617 	tcg_gen_andi_i32(REG(B11_8), cpu_sr, SR_T);
1618 	return;
1619     case 0x0073:
1620         /* MOVCO.L
1621 	       LDST -> T
1622                If (T == 1) R0 -> (Rn)
1623                0 -> LDST
1624         */
1625         if (ctx->features & SH_FEATURE_SH4A) {
1626 	    int label = gen_new_label();
1627 	    gen_clr_t();
1628 	    tcg_gen_or_i32(cpu_sr, cpu_sr, cpu_ldst);
1629 	    tcg_gen_brcondi_i32(TCG_COND_EQ, cpu_ldst, 0, label);
1630 	    tcg_gen_qemu_st32(REG(0), REG(B11_8), ctx->memidx);
1631 	    gen_set_label(label);
1632 	    tcg_gen_movi_i32(cpu_ldst, 0);
1633 	    return;
1634 	} else
1635 	    break;
1636     case 0x0063:
1637         /* MOVLI.L @Rm,R0
1638                1 -> LDST
1639                (Rm) -> R0
1640                When interrupt/exception
1641                occurred 0 -> LDST
1642         */
1643 	if (ctx->features & SH_FEATURE_SH4A) {
1644 	    tcg_gen_movi_i32(cpu_ldst, 0);
1645 	    tcg_gen_qemu_ld32s(REG(0), REG(B11_8), ctx->memidx);
1646 	    tcg_gen_movi_i32(cpu_ldst, 1);
1647 	    return;
1648 	} else
1649 	    break;
1650     case 0x0093:		/* ocbi @Rn */
1651 	{
1652 	    gen_helper_ocbi (REG(B11_8));
1653 	}
1654 	return;
1655     case 0x00a3:		/* ocbp @Rn */
1656 	{
1657 	    TCGv dummy = tcg_temp_new();
1658 	    tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1659 	    tcg_temp_free(dummy);
1660 	}
1661 	return;
1662     case 0x00b3:		/* ocbwb @Rn */
1663 	{
1664 	    TCGv dummy = tcg_temp_new();
1665 	    tcg_gen_qemu_ld32s(dummy, REG(B11_8), ctx->memidx);
1666 	    tcg_temp_free(dummy);
1667 	}
1668 	return;
1669     case 0x0083:		/* pref @Rn */
1670 	return;
1671     case 0x00d3:		/* prefi @Rn */
1672 	if (ctx->features & SH_FEATURE_SH4A)
1673 	    return;
1674 	else
1675 	    break;
1676     case 0x00e3:		/* icbi @Rn */
1677 	if (ctx->features & SH_FEATURE_SH4A)
1678 	    return;
1679 	else
1680 	    break;
1681     case 0x00ab:		/* synco */
1682 	if (ctx->features & SH_FEATURE_SH4A)
1683 	    return;
1684 	else
1685 	    break;
1686     case 0x4024:		/* rotcl Rn */
1687 	{
1688 	    TCGv tmp = tcg_temp_new();
1689 	    tcg_gen_mov_i32(tmp, cpu_sr);
1690 	    gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1691 	    tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1692 	    gen_copy_bit_i32(REG(B11_8), 0, tmp, 0);
1693 	    tcg_temp_free(tmp);
1694 	}
1695 	return;
1696     case 0x4025:		/* rotcr Rn */
1697 	{
1698 	    TCGv tmp = tcg_temp_new();
1699 	    tcg_gen_mov_i32(tmp, cpu_sr);
1700 	    gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1701 	    tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1702 	    gen_copy_bit_i32(REG(B11_8), 31, tmp, 0);
1703 	    tcg_temp_free(tmp);
1704 	}
1705 	return;
1706     case 0x4004:		/* rotl Rn */
1707 	tcg_gen_rotli_i32(REG(B11_8), REG(B11_8), 1);
1708 	gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1709 	return;
1710     case 0x4005:		/* rotr Rn */
1711 	gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1712 	tcg_gen_rotri_i32(REG(B11_8), REG(B11_8), 1);
1713 	return;
1714     case 0x4000:		/* shll Rn */
1715     case 0x4020:		/* shal Rn */
1716 	gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 31);
1717 	tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 1);
1718 	return;
1719     case 0x4021:		/* shar Rn */
1720 	gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1721 	tcg_gen_sari_i32(REG(B11_8), REG(B11_8), 1);
1722 	return;
1723     case 0x4001:		/* shlr Rn */
1724 	gen_copy_bit_i32(cpu_sr, 0, REG(B11_8), 0);
1725 	tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 1);
1726 	return;
1727     case 0x4008:		/* shll2 Rn */
1728 	tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 2);
1729 	return;
1730     case 0x4018:		/* shll8 Rn */
1731 	tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 8);
1732 	return;
1733     case 0x4028:		/* shll16 Rn */
1734 	tcg_gen_shli_i32(REG(B11_8), REG(B11_8), 16);
1735 	return;
1736     case 0x4009:		/* shlr2 Rn */
1737 	tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 2);
1738 	return;
1739     case 0x4019:		/* shlr8 Rn */
1740 	tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 8);
1741 	return;
1742     case 0x4029:		/* shlr16 Rn */
1743 	tcg_gen_shri_i32(REG(B11_8), REG(B11_8), 16);
1744 	return;
1745     case 0x401b:		/* tas.b @Rn */
1746 	{
1747 	    TCGv addr, val;
1748 	    addr = tcg_temp_local_new();
1749 	    tcg_gen_mov_i32(addr, REG(B11_8));
1750 	    val = tcg_temp_local_new();
1751 	    tcg_gen_qemu_ld8u(val, addr, ctx->memidx);
1752 	    gen_cmp_imm(TCG_COND_EQ, val, 0);
1753 	    tcg_gen_ori_i32(val, val, 0x80);
1754 	    tcg_gen_qemu_st8(val, addr, ctx->memidx);
1755 	    tcg_temp_free(val);
1756 	    tcg_temp_free(addr);
1757 	}
1758 	return;
1759     case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1760 	CHECK_FPU_ENABLED
1761 	tcg_gen_mov_i32(cpu_fregs[FREG(B11_8)], cpu_fpul);
1762 	return;
1763     case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1764 	CHECK_FPU_ENABLED
1765 	tcg_gen_mov_i32(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1766 	return;
1767     case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1768 	CHECK_FPU_ENABLED
1769 	if (ctx->fpscr & FPSCR_PR) {
1770 	    TCGv_i64 fp;
1771 	    if (ctx->opcode & 0x0100)
1772 		break; /* illegal instruction */
1773 	    fp = tcg_temp_new_i64();
1774 	    gen_helper_float_DT(fp, cpu_fpul);
1775 	    gen_store_fpr64(fp, DREG(B11_8));
1776 	    tcg_temp_free_i64(fp);
1777 	}
1778 	else {
1779 	    gen_helper_float_FT(cpu_fregs[FREG(B11_8)], cpu_fpul);
1780 	}
1781 	return;
1782     case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1783 	CHECK_FPU_ENABLED
1784 	if (ctx->fpscr & FPSCR_PR) {
1785 	    TCGv_i64 fp;
1786 	    if (ctx->opcode & 0x0100)
1787 		break; /* illegal instruction */
1788 	    fp = tcg_temp_new_i64();
1789 	    gen_load_fpr64(fp, DREG(B11_8));
1790 	    gen_helper_ftrc_DT(cpu_fpul, fp);
1791 	    tcg_temp_free_i64(fp);
1792 	}
1793 	else {
1794 	    gen_helper_ftrc_FT(cpu_fpul, cpu_fregs[FREG(B11_8)]);
1795 	}
1796 	return;
1797     case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1798 	CHECK_FPU_ENABLED
1799 	{
1800 	    gen_helper_fneg_T(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1801 	}
1802 	return;
1803     case 0xf05d: /* fabs FRn/DRn */
1804 	CHECK_FPU_ENABLED
1805 	if (ctx->fpscr & FPSCR_PR) {
1806 	    if (ctx->opcode & 0x0100)
1807 		break; /* illegal instruction */
1808 	    TCGv_i64 fp = tcg_temp_new_i64();
1809 	    gen_load_fpr64(fp, DREG(B11_8));
1810 	    gen_helper_fabs_DT(fp, fp);
1811 	    gen_store_fpr64(fp, DREG(B11_8));
1812 	    tcg_temp_free_i64(fp);
1813 	} else {
1814 	    gen_helper_fabs_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1815 	}
1816 	return;
1817     case 0xf06d: /* fsqrt FRn */
1818 	CHECK_FPU_ENABLED
1819 	if (ctx->fpscr & FPSCR_PR) {
1820 	    if (ctx->opcode & 0x0100)
1821 		break; /* illegal instruction */
1822 	    TCGv_i64 fp = tcg_temp_new_i64();
1823 	    gen_load_fpr64(fp, DREG(B11_8));
1824 	    gen_helper_fsqrt_DT(fp, fp);
1825 	    gen_store_fpr64(fp, DREG(B11_8));
1826 	    tcg_temp_free_i64(fp);
1827 	} else {
1828 	    gen_helper_fsqrt_FT(cpu_fregs[FREG(B11_8)], cpu_fregs[FREG(B11_8)]);
1829 	}
1830 	return;
1831     case 0xf07d: /* fsrra FRn */
1832 	CHECK_FPU_ENABLED
1833 	break;
1834     case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1835 	CHECK_FPU_ENABLED
1836 	if (!(ctx->fpscr & FPSCR_PR)) {
1837 	    tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0);
1838 	}
1839 	return;
1840     case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1841 	CHECK_FPU_ENABLED
1842 	if (!(ctx->fpscr & FPSCR_PR)) {
1843 	    tcg_gen_movi_i32(cpu_fregs[FREG(B11_8)], 0x3f800000);
1844 	}
1845 	return;
1846     case 0xf0ad: /* fcnvsd FPUL,DRn */
1847 	CHECK_FPU_ENABLED
1848 	{
1849 	    TCGv_i64 fp = tcg_temp_new_i64();
1850 	    gen_helper_fcnvsd_FT_DT(fp, cpu_fpul);
1851 	    gen_store_fpr64(fp, DREG(B11_8));
1852 	    tcg_temp_free_i64(fp);
1853 	}
1854 	return;
1855     case 0xf0bd: /* fcnvds DRn,FPUL */
1856 	CHECK_FPU_ENABLED
1857 	{
1858 	    TCGv_i64 fp = tcg_temp_new_i64();
1859 	    gen_load_fpr64(fp, DREG(B11_8));
1860 	    gen_helper_fcnvds_DT_FT(cpu_fpul, fp);
1861 	    tcg_temp_free_i64(fp);
1862 	}
1863 	return;
1864     case 0xf0ed: /* fipr FVm,FVn */
1865         CHECK_FPU_ENABLED
1866         if ((ctx->fpscr & FPSCR_PR) == 0) {
1867             TCGv m, n;
1868             m = tcg_const_i32((ctx->opcode >> 16) & 3);
1869             n = tcg_const_i32((ctx->opcode >> 18) & 3);
1870             gen_helper_fipr(m, n);
1871             tcg_temp_free(m);
1872             tcg_temp_free(n);
1873             return;
1874         }
1875         break;
1876     case 0xf0fd: /* ftrv XMTRX,FVn */
1877         CHECK_FPU_ENABLED
1878         if ((ctx->opcode & 0x0300) == 0x0100 &&
1879             (ctx->fpscr & FPSCR_PR) == 0) {
1880             TCGv n;
1881             n = tcg_const_i32((ctx->opcode >> 18) & 3);
1882             gen_helper_ftrv(n);
1883             tcg_temp_free(n);
1884             return;
1885         }
1886         break;
1887     }
1888 #if 0
1889     fprintf(stderr, "unknown instruction 0x%04x at pc 0x%08x\n",
1890 	    ctx->opcode, ctx->pc);
1891     fflush(stderr);
1892 #endif
1893     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1894        gen_helper_raise_slot_illegal_instruction();
1895     } else {
1896        gen_helper_raise_illegal_instruction();
1897     }
1898     ctx->bstate = BS_EXCP;
1899 }
1900 
decode_opc(DisasContext * ctx)1901 static void decode_opc(DisasContext * ctx)
1902 {
1903     uint32_t old_flags = ctx->flags;
1904 
1905     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) {
1906         tcg_gen_debug_insn_start(ctx->pc);
1907     }
1908 
1909     _decode_opc(ctx);
1910 
1911     if (old_flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) {
1912         if (ctx->flags & DELAY_SLOT_CLEARME) {
1913             gen_store_flags(0);
1914         } else {
1915 	    /* go out of the delay slot */
1916 	    uint32_t new_flags = ctx->flags;
1917 	    new_flags &= ~(DELAY_SLOT | DELAY_SLOT_CONDITIONAL);
1918 	    gen_store_flags(new_flags);
1919         }
1920         ctx->flags = 0;
1921         ctx->bstate = BS_BRANCH;
1922         if (old_flags & DELAY_SLOT_CONDITIONAL) {
1923 	    gen_delayed_conditional_jump(ctx);
1924         } else if (old_flags & DELAY_SLOT) {
1925             gen_jump(ctx);
1926 	}
1927 
1928     }
1929 
1930     /* go into a delay slot */
1931     if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL))
1932         gen_store_flags(ctx->flags);
1933 }
1934 
1935 static inline void
gen_intermediate_code_internal(CPUState * env,TranslationBlock * tb,int search_pc)1936 gen_intermediate_code_internal(CPUState * env, TranslationBlock * tb,
1937                                int search_pc)
1938 {
1939     DisasContext ctx;
1940     target_ulong pc_start;
1941     static uint16_t *gen_opc_end;
1942     CPUBreakpoint *bp;
1943     int i, ii;
1944     int num_insns;
1945     int max_insns;
1946 
1947     pc_start = tb->pc;
1948     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1949     ctx.pc = pc_start;
1950     ctx.flags = (uint32_t)tb->flags;
1951     ctx.bstate = BS_NONE;
1952     ctx.sr = env->sr;
1953     ctx.fpscr = env->fpscr;
1954     ctx.memidx = (env->sr & SR_MD) == 0 ? 1 : 0;
1955     /* We don't know if the delayed pc came from a dynamic or static branch,
1956        so assume it is a dynamic branch.  */
1957     ctx.delayed_pc = -1; /* use delayed pc from env pointer */
1958     ctx.tb = tb;
1959     ctx.singlestep_enabled = env->singlestep_enabled;
1960     ctx.features = env->features;
1961     ctx.has_movcal = (tb->flags & TB_FLAG_PENDING_MOVCA);
1962 
1963     ii = -1;
1964     num_insns = 0;
1965     max_insns = tb->cflags & CF_COUNT_MASK;
1966     if (max_insns == 0)
1967         max_insns = CF_COUNT_MASK;
1968     gen_icount_start();
1969     while (ctx.bstate == BS_NONE && gen_opc_ptr < gen_opc_end) {
1970         if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
1971             QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
1972                 if (ctx.pc == bp->pc) {
1973 		    /* We have hit a breakpoint - make sure PC is up-to-date */
1974 		    tcg_gen_movi_i32(cpu_pc, ctx.pc);
1975 		    gen_helper_debug();
1976 		    ctx.bstate = BS_EXCP;
1977 		    break;
1978 		}
1979 	    }
1980 	}
1981         if (search_pc) {
1982             i = gen_opc_ptr - gen_opc_buf;
1983             if (ii < i) {
1984                 ii++;
1985                 while (ii < i)
1986                     gen_opc_instr_start[ii++] = 0;
1987             }
1988             gen_opc_pc[ii] = ctx.pc;
1989             gen_opc_hflags[ii] = ctx.flags;
1990             gen_opc_instr_start[ii] = 1;
1991             gen_opc_icount[ii] = num_insns;
1992         }
1993         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1994             gen_io_start();
1995 #if 0
1996 	fprintf(stderr, "Loading opcode at address 0x%08x\n", ctx.pc);
1997 	fflush(stderr);
1998 #endif
1999 	ctx.opcode = lduw_code(ctx.pc);
2000 	decode_opc(&ctx);
2001         num_insns++;
2002 	ctx.pc += 2;
2003 	if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
2004 	    break;
2005 	if (env->singlestep_enabled)
2006 	    break;
2007         if (num_insns >= max_insns)
2008             break;
2009         if (singlestep)
2010             break;
2011     }
2012     if (tb->cflags & CF_LAST_IO)
2013         gen_io_end();
2014     if (env->singlestep_enabled) {
2015         tcg_gen_movi_i32(cpu_pc, ctx.pc);
2016         gen_helper_debug();
2017     } else {
2018 	switch (ctx.bstate) {
2019         case BS_STOP:
2020             /* gen_op_interrupt_restart(); */
2021             /* fall through */
2022         case BS_NONE:
2023             if (ctx.flags) {
2024                 gen_store_flags(ctx.flags | DELAY_SLOT_CLEARME);
2025 	    }
2026             gen_goto_tb(&ctx, 0, ctx.pc);
2027             break;
2028         case BS_EXCP:
2029             /* gen_op_interrupt_restart(); */
2030             tcg_gen_exit_tb(0);
2031             break;
2032         case BS_BRANCH:
2033         default:
2034             break;
2035 	}
2036     }
2037 
2038     gen_icount_end(tb, num_insns);
2039     *gen_opc_ptr = INDEX_op_end;
2040     if (search_pc) {
2041         i = gen_opc_ptr - gen_opc_buf;
2042         ii++;
2043         while (ii <= i)
2044             gen_opc_instr_start[ii++] = 0;
2045     } else {
2046         tb->size = ctx.pc - pc_start;
2047         tb->icount = num_insns;
2048     }
2049 
2050 #ifdef DEBUG_DISAS
2051 #ifdef SH4_DEBUG_DISAS
2052     qemu_log_mask(CPU_LOG_TB_IN_ASM, "\n");
2053 #endif
2054     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
2055 	qemu_log("IN:\n");	/* , lookup_symbol(pc_start)); */
2056 	log_target_disas(pc_start, ctx.pc - pc_start, 0);
2057 	qemu_log("\n");
2058     }
2059 #endif
2060 }
2061 
gen_intermediate_code(CPUState * env,struct TranslationBlock * tb)2062 void gen_intermediate_code(CPUState * env, struct TranslationBlock *tb)
2063 {
2064     gen_intermediate_code_internal(env, tb, 0);
2065 }
2066 
gen_intermediate_code_pc(CPUState * env,struct TranslationBlock * tb)2067 void gen_intermediate_code_pc(CPUState * env, struct TranslationBlock *tb)
2068 {
2069     gen_intermediate_code_internal(env, tb, 1);
2070 }
2071 
gen_pc_load(CPUState * env,TranslationBlock * tb,unsigned long searched_pc,int pc_pos,void * puc)2072 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2073                 unsigned long searched_pc, int pc_pos, void *puc)
2074 {
2075     env->pc = gen_opc_pc[pc_pos];
2076     env->flags = gen_opc_hflags[pc_pos];
2077 }
2078