--- /dev/null
+#include <stdlib.h>
+#include "cpu_65c02.h"
+
+// gcc specific
+#define INLINE __attribute__((always_inline))
+
+// convenient register access
+#define A (self->regs[CPU_65C02_REG_A])
+#define X (self->regs[CPU_65C02_REG_X])
+#define Y (self->regs[CPU_65C02_REG_Y])
+#define S (self->regs[CPU_65C02_REG_S])
+#define P (self->regs[CPU_65C02_REG_P])
+#define PC (self->pc)
+
+// convenient flags bit access
+#define FLAG_C (1 << CPU_65C02_BIT_C)
+#define FLAG_Z (1 << CPU_65C02_BIT_Z)
+#define FLAG_I (1 << CPU_65C02_BIT_I)
+#define FLAG_D (1 << CPU_65C02_BIT_D)
+#define FLAG_V (1 << CPU_65C02_BIT_V)
+#define FLAG_N (1 << CPU_65C02_BIT_N)
+
+// read, write, fetch, push, pop byte/word for instruction decode/execute
+// these routines automatically apply cycle count of 1 (byte) or 2 (word)
+#define RB(addr) cpu_65c02_rb(self, (addr))
+#define RW(addr) cpu_65c02_rw(self, (addr))
+#define RW_ZPG(addr) cpu_65c02_rw_zpg(self, (addr))
+#define WB(addr, data) cpu_65c02_wb(self, (addr), (data))
+#define WW(addr, data) cpu_65c02_ww(self, (addr), (data))
+#define FB(addr) cpu_65c02_fb(self)
+#define FW(addr) cpu_65c02_fw(self)
+#define PHB(data) cpu_65c02_phb(self, (data))
+#define PHW(data) cpu_65c02_phw(self, (data))
+#define PLB(addr) cpu_65c02_plb(self)
+#define PLW(addr) cpu_65c02_plw(self)
+
+// routines that take an effective address can also operate on a register
+// by means of special memory locations that are inaccessible by software
+#define EA_A (CPU_65C02_REG_A - CPU_65C02_N_REGS)
+#define EA_X (CPU_65C02_REG_X - CPU_65C02_N_REGS)
+#define EA_Y (CPU_65C02_REG_Y - CPU_65C02_N_REGS)
+#define EA_S (CPU_65C02_REG_S - CPU_65C02_N_REGS)
+#define EA_P (CPU_65C02_REG_P - CPU_65C02_N_REGS)
+
+// lvalue modes
+// effective address calculations that involve indexing require their
+// own callable routines (which are INLINE) to check for penalty cycles
+// those without indexing have their own routine just for readability
+#define EA_ABS() cpu_65c02_ea_abs(self)
+#define EA_ABS_IDX(rvalue) cpu_65c02_ea_abs_idx(self, (rvalue))
+#define EA_ABS_IDX_IND(rvalue) cpu_65c02_ea_abs_idx_ind(self, (rvalue))
+#define EA_ABS_IND() cpu_65c02_ea_abs_ind(self)
+#define EA_REL() cpu_65c02_ea_rel(self)
+#define EA_ZPG() cpu_65c02_ea_zpg(self)
+#define EA_ZPG_IDX(rvalue) cpu_65c02_ea_zpg_idx(self, (rvalue))
+#define EA_ZPG_IDX_IND(rvalue) cpu_65c02_ea_zpg_idx_ind(self, (rvalue))
+#define EA_ZPG_IND() cpu_65c02_ea_zpg_ind(self)
+#define EA_ZPG_IND_IDX(rvalue) cpu_65c02_ea_zpg_ind_idx(self, (rvalue))
+
+// rvalue modes
+#define IMM() FB()
+#define ABS() RB(EA_ABS())
+#define ABS_IDX(rvalue) RB(EA_ABS_IDX(rvalue))
+#define ABS_IDX_IND(rvalue) RB(EA_ABS_IDX_IND(rvalue))
+#define ABS_IND() RB(EA_ABS_IND())
+#define REL() RB(EA_REL())
+#define ZPG() RB(EA_ZPG())
+#define ZPG_IDX(rvalue) RB(EA_ZPG_IDX(rvalue))
+#define ZPG_IDX_IND(rvalue) RB(EA_ZPG_IDX_IND(rvalue))
+#define ZPG_IND() RB(EA_ZPG_IND())
+#define ZPG_IND_IDX(rvalue) RB(EA_ZPG_IND_IDX(rvalue))
+
+// decompose the conventional opcode here using special EA values, etc
+// write and read-modify-write instructions take an lvalue (EA) and do
+// their own memory accesses; read instructions take an rvalue, so we
+// either wrap an EA in RB(), or use FB() to fetch an immediate rvalue
+#define ADC(rvalue) cpu_65c02_adc(self, (rvalue))
+#define AND(rvalue) cpu_65c02_and(self, (rvalue))
+#define ASL(lvalue) cpu_65c02_asl(self, (lvalue))
+#define BCC(lvalue) cpu_65c02_bc(self, FLAG_C, (lvalue))
+#define BCS(lvalue) cpu_65c02_bs(self, FLAG_C, (lvalue))
+#define BEQ(lvalue) cpu_65c02_bs(self, FLAG_Z, (lvalue))
+#define BIT(rvalue) cpu_65c02_bit(self, (rvalue))
+#define BMI(lvalue) cpu_65c02_bs(self, FLAG_N, (lvalue))
+#define BNE(lvalue) cpu_65c02_bc(self, FLAG_Z, (lvalue))
+#define BPL(lvalue) cpu_65c02_bc(self, FLAG_N, (lvalue))
+#define BRA(lvalue) cpu_65c02_jmp(self, (lvalue))
+#define BRK() cpu_65c02_brk(self)
+#define BVC(lvalue) cpu_65c02_bc(self, FLAG_V, (lvalue))
+#define BVS(lvalue) cpu_65c02_bs(self, FLAG_V, (lvalue))
+#define CLC() cpu_65c02_cl(self, FLAG_C)
+#define CLD() cpu_65c02_cl(self, FLAG_D)
+#define CLI() cpu_65c02_cl(self, FLAG_I)
+#define CLV() cpu_65c02_cl(self, FLAG_V)
+#define CMP(rvalue) cpu_65c02_cmp(self, A, (rvalue))
+#define CPX(rvalue) cpu_65c02_cmp(self, X, (rvalue))
+#define CPY(rvalue) cpu_65c02_cmp(self, Y, (rvalue))
+#define DEC(lvalue) cpu_65c02_dec(self, (lvalue))
+#define DEX() cpu_65c02_dec(self, EA_X)
+#define DEY() cpu_65c02_dec(self, EA_Y)
+#define EOR(rvalue) cpu_65c02_eor(self, (rvalue))
+#define ILL() cpu_65c02_ill(self)
+#define INC(lvalue) cpu_65c02_inc(self, (lvalue))
+#define INX() cpu_65c02_inc(self, EA_X)
+#define INY() cpu_65c02_inc(self, EA_Y)
+#define JMP(lvalue) cpu_65c02_jmp(self, (lvalue))
+#define JSR(lvalue) cpu_65c02_jsr(self, (lvalue))
+#define LDA(rvalue) cpu_65c02_t(self, (rvalue), EA_A)
+#define LDX(rvalue) cpu_65c02_t(self, (rvalue), EA_X)
+#define LDY(rvalue) cpu_65c02_t(self, (rvalue), EA_Y)
+#define LSR(lvalue) cpu_65c02_lsr(self, (lvalue))
+#define NOP() cpu_65c02_nop(self)
+#define ORA(rvalue) cpu_65c02_ora(self, (rvalue))
+#define PHA() cpu_65c02_ph(self, A)
+#define PHP() cpu_65c02_ph(self, P)
+#define PHX() cpu_65c02_ph(self, X)
+#define PHY() cpu_65c02_ph(self, Y)
+#define PLA() cpu_65c02_pl(self, EA_A)
+#define PLP() cpu_65c02_pl(self, EA_P)
+#define PLX() cpu_65c02_pl(self, EA_X)
+#define PLY() cpu_65c02_pl(self, EA_Y)
+#define RMB0(lvalue) cpu_65c02_rmb(self, 1, (lvalue))
+#define RMB1(lvalue) cpu_65c02_rmb(self, 2, (lvalue))
+#define RMB2(lvalue) cpu_65c02_rmb(self, 4, (lvalue))
+#define RMB3(lvalue) cpu_65c02_rmb(self, 8, (lvalue))
+#define RMB4(lvalue) cpu_65c02_rmb(self, 0x10, (lvalue))
+#define RMB5(lvalue) cpu_65c02_rmb(self, 0x20, (lvalue))
+#define RMB6(lvalue) cpu_65c02_rmb(self, 0x40, (lvalue))
+#define RMB7(lvalue) cpu_65c02_rmb(self, 0x80, (lvalue))
+#define ROL(lvalue) cpu_65c02_rol(self, (lvalue))
+#define ROR(lvalue) cpu_65c02_ror(self, (lvalue))
+#define RTI() cpu_65c02_rti(self)
+#define RTS() cpu_65c02_rts(self)
+#define SBC(rvalue) cpu_65c02_adc(self, (rvalue) ^ 0xff)
+#define SEC() cpu_65c02_se(self, FLAG_C)
+#define SED() cpu_65c02_se(self, FLAG_D)
+#define SEI() cpu_65c02_se(self, FLAG_I)
+#define SMB0(lvalue) cpu_65c02_smb(self, 1, (lvalue))
+#define SMB1(lvalue) cpu_65c02_smb(self, 2, (lvalue))
+#define SMB2(lvalue) cpu_65c02_smb(self, 4, (lvalue))
+#define SMB3(lvalue) cpu_65c02_smb(self, 8, (lvalue))
+#define SMB4(lvalue) cpu_65c02_smb(self, 0x10, (lvalue))
+#define SMB5(lvalue) cpu_65c02_smb(self, 0x20, (lvalue))
+#define SMB6(lvalue) cpu_65c02_smb(self, 0x40, (lvalue))
+#define SMB7(lvalue) cpu_65c02_smb(self, 0x80, (lvalue))
+// note: cpu_65c02_st() is like cpu_65c02_t() but no flags
+#define STA(lvalue) cpu_65c02_st(self, A, (lvalue))
+#define STX(lvalue) cpu_65c02_st(self, X, (lvalue))
+#define STY(lvalue) cpu_65c02_st(self, Y, (lvalue))
+#define STZ(lvalue) cpu_65c02_st(self, 0, (lvalue))
+#define TAX() cpu_65c02_t(self, A, EA_X)
+#define TAY() cpu_65c02_t(self, A, EA_Y)
+#define TRB(lvalue) cpu_65c02_trb(self, (lvalue))
+#define TSB(lvalue) cpu_65c02_tsb(self, (lvalue))
+#define TSX() cpu_65c02_t(self, S, EA_X)
+#define TXA() cpu_65c02_t(self, X, EA_A)
+#define TXS() cpu_65c02_t(self, X, EA_S)
+#define TYA() cpu_65c02_t(self, Y, EA_A)
+#define WAI() cpu_65c02_wai(self)
+
+// memory (or internal register memory) access
+INLINE int cpu_65c02_rb(struct cpu_65c02 *self, int addr) {
+ if (addr < 0)
+ return self->regs[CPU_65C02_N_REGS + addr];
+ self->cycles += 1;
+ return self->read_byte(addr);
+}
+
+INLINE int cpu_65c02_rw(struct cpu_65c02 *self, int addr) {
+ int data = RB(addr);
+ return data | (RB((addr + 1) & 0xffff) << 8);
+}
+
+INLINE int cpu_65c02_rw_zpg(struct cpu_65c02 *self, int addr) {
+ int data = RB(addr);
+ return data | (RB((addr + 1) & 0xff) << 8);
+}
+
+INLINE void cpu_65c02_wb(struct cpu_65c02 *self, int addr, int data) {
+ self->cycles += 1;
+ if (addr < 0)
+ self->regs[CPU_65C02_N_REGS + addr] = data;
+ else
+ self->write_byte(addr, data);
+}
+
+INLINE void cpu_65c02_ww(struct cpu_65c02 *self, int addr, int data) {
+ WB(addr, data & 0xff);
+ WB((addr + 1) & 0xffff, data >> 8);
+}
+
+INLINE int cpu_65c02_fb(struct cpu_65c02 *self) {
+ int data = RB(PC);
+ PC = (PC + 1) & 0xffff;
+ return data;
+}
+
+INLINE int cpu_65c02_fw(struct cpu_65c02 *self) {
+ int data = FB();
+ return data | (FB() << 8);
+}
+
+INLINE void cpu_65c02_phb(struct cpu_65c02 *self, int data) {
+ WB(S | 0x100, data);
+ S = (S - 1) & 0xff;
+}
+
+INLINE void cpu_65c02_phw(struct cpu_65c02 *self, int data) {
+ PHB(data >> 8);
+ PHB(data & 0xff);
+}
+
+INLINE int cpu_65c02_plb(struct cpu_65c02 *self) {
+ S = (S + 1) & 0xff;
+ return RB(S | 0x100);
+}
+
+INLINE int cpu_65c02_plw(struct cpu_65c02 *self) {
+ int data = PLB();
+ return data | (PLB() << 8);
+}
+
+// effective address calculation
+INLINE int cpu_65c02_ea_abs(struct cpu_65c02 *self) {
+ return FW();
+}
+
+INLINE int cpu_65c02_ea_abs_idx(struct cpu_65c02 *self, int rvalue) {
+ int addr = EA_ABS();
+ self->cycles += ((addr & 0xff) + (rvalue & 0xff)) >> 8;
+ return addr + rvalue;
+}
+
+INLINE int cpu_65c02_ea_abs_idx_ind(struct cpu_65c02 *self, int rvalue) {
+ return RW(EA_ABS_IDX(rvalue));
+}
+
+INLINE int cpu_65c02_ea_abs_ind(struct cpu_65c02 *self) {
+ return RW(EA_ABS());
+}
+
+INLINE int cpu_65c02_ea_rel(struct cpu_65c02 *self) {
+ int addr = PC;
+ int rvalue = FB();
+ rvalue -= (rvalue << 1) & 0x100; // sign extend
+ self->cycles += ((addr & 0xff) + (rvalue & 0xff)) >> 8;
+ return addr + rvalue;
+}
+
+INLINE int cpu_65c02_ea_zpg(struct cpu_65c02 *self) {
+ return FB();
+}
+
+INLINE int cpu_65c02_ea_zpg_idx(struct cpu_65c02 *self, int rvalue) {
+ return (EA_ZPG() + rvalue) & 0xff;
+}
+
+INLINE int cpu_65c02_ea_zpg_idx_ind(struct cpu_65c02 *self, int rvalue) {
+ return RW_ZPG(EA_ZPG_IDX(rvalue));
+}
+
+INLINE int cpu_65c02_ea_zpg_ind(struct cpu_65c02 *self) {
+ return RW_ZPG(EA_ZPG());
+}
+
+INLINE int cpu_65c02_ea_zpg_ind_idx(struct cpu_65c02 *self, int rvalue) {
+ int addr = RW_ZPG(EA_ZPG());
+ self->cycles += ((addr & 0xff) + (rvalue & 0xff)) >> 8;
+ return addr + rvalue;
+}
+
+// instruction execute
+INLINE void cpu_65c02_adc(struct cpu_65c02 *self, int rvalue) {
+ if (P & FLAG_D)
+ abort();
+ int partial = (A & 0x7f) + (rvalue & 0x7f) + (P & FLAG_C);
+ int result = A + rvalue + (P & FLAG_C);
+ A = result & 0xff;
+ P =
+ (P & ~(FLAG_C | FLAG_Z | FLAG_V | FLAG_N)) |
+ (result >> 8) | // C
+ (((result & 0xff) == 0) << 1) | // Z
+ (((partial >> 1) ^ (result >> 2)) & FLAG_V) |
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_and(struct cpu_65c02 *self, int rvalue) {
+ int result = A & rvalue;
+ A = result;
+ P =
+ (P & ~(FLAG_Z | FLAG_N)) |
+ ((result == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_asl(struct cpu_65c02 *self, int lvalue) {
+ int result = RB(lvalue) << 1;
+ ++self->cycles;
+ WB(lvalue, result & 0xff);
+ P =
+ (P & ~(FLAG_C | FLAG_Z | FLAG_N)) |
+ (result >> 8) | // C
+ (((result & 0xff) == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_bc(struct cpu_65c02 *self, int flag, int lvalue) {
+ if ((P & flag) == 0)
+ PC = lvalue;
+}
+
+INLINE void cpu_65c02_bs(struct cpu_65c02 *self, int flag, int lvalue) {
+ if (P & flag)
+ PC = lvalue;
+}
+
+INLINE void cpu_65c02_bit(struct cpu_65c02 *self, int rvalue) {
+ int result = A & rvalue;
+ P =
+ (P & ~(FLAG_Z | FLAG_V | FLAG_N)) |
+ ((result == 0) << 1) | // Z
+ (rvalue & (FLAG_V | FLAG_N));
+}
+
+INLINE void cpu_65c02_brk(struct cpu_65c02 *self) {
+ abort();
+}
+
+INLINE void cpu_65c02_cl(struct cpu_65c02 *self, int flag) {
+ P &= ~flag;
+}
+
+INLINE void cpu_65c02_cmp(struct cpu_65c02 *self, int rvalue0, int rvalue1) {
+ rvalue1 ^= 0xff;
+ int partial = (rvalue0 & 0x7f) + (rvalue1 & 0x7f);
+ int result = rvalue0 + rvalue1;
+ P =
+ (P & ~(FLAG_C | FLAG_Z | FLAG_V | FLAG_N)) |
+ (result >> 8) | // C
+ (((result & 0xff) == 0) << 1) | // Z
+ (((partial >> 1) ^ (result >> 2)) & FLAG_V) |
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_dec(struct cpu_65c02 *self, int lvalue) {
+ int result = (RB(lvalue) - 1) & 0xff;
+ ++self->cycles;
+ WB(lvalue, result);
+ P =
+ (P & ~(FLAG_Z | FLAG_N)) |
+ ((result == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_eor(struct cpu_65c02 *self, int rvalue) {
+ int result = A ^ rvalue;
+ A = result;
+ P =
+ (P & ~(FLAG_Z | FLAG_N)) |
+ ((result == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_ill(struct cpu_65c02 *self) {
+ abort();
+}
+
+INLINE void cpu_65c02_inc(struct cpu_65c02 *self, int lvalue) {
+ int result = (RB(lvalue) + 1) & 0xff;
+ ++self->cycles;
+ WB(lvalue, result);
+ P =
+ (P & ~(FLAG_Z | FLAG_N)) |
+ ((result == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_jmp(struct cpu_65c02 *self, int lvalue) {
+ PC = lvalue;
+}
+
+INLINE void cpu_65c02_jsr(struct cpu_65c02 *self, int lvalue) {
+ PHW((PC - 1) & 0xffff);
+ PC = lvalue;
+}
+
+INLINE void cpu_65c02_lsr(struct cpu_65c02 *self, int lvalue) {
+ int result = RB(lvalue);
+ ++self->cycles;
+ WB(lvalue, result >> 1);
+ P =
+ (P & ~(FLAG_C | FLAG_Z | FLAG_N)) |
+ (result & FLAG_C) |
+ (((result & 0xfe) == 0) << 1); // Z
+}
+
+INLINE void cpu_65c02_nop(struct cpu_65c02 *self) {
+}
+
+INLINE void cpu_65c02_ora(struct cpu_65c02 *self, int rvalue) {
+ int result = A | rvalue;
+ A = result;
+ P =
+ (P & ~(FLAG_Z | FLAG_N)) |
+ ((result == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_ph(struct cpu_65c02 *self, int rvalue) {
+ PHB(rvalue);
+}
+
+INLINE void cpu_65c02_pl(struct cpu_65c02 *self, int lvalue) {
+ WB(lvalue, PLB());
+}
+
+INLINE void cpu_65c02_rmb(struct cpu_65c02 *self, int n, int lvalue) {
+ abort();
+}
+
+INLINE void cpu_65c02_rol(struct cpu_65c02 *self, int lvalue) {
+ int result = (RB(lvalue) << 1) | (P & FLAG_C);
+ ++self->cycles;
+ WB(lvalue, result & 0xff);
+ P =
+ (P & ~(FLAG_C | FLAG_Z | FLAG_N)) |
+ (result >> 8) | // C
+ (((result & 0xff) == 0) << 1) | // Z
+ (result & FLAG_N);
+}
+
+INLINE void cpu_65c02_ror(struct cpu_65c02 *self, int lvalue) {
+ int result = RB(lvalue) | ((P & FLAG_C) << 8);
+ ++self->cycles;
+ WB(lvalue, result >> 1);
+ P =
+ (P & ~(FLAG_C | FLAG_Z | FLAG_N)) |
+ (result & FLAG_C) |
+ (((result & 0x1fe) == 0) << 1) | // Z
+ ((result >> 1) & FLAG_N);
+}
+
+INLINE void cpu_65c02_rti(struct cpu_65c02 *self) {
+ abort();
+}
+
+INLINE void cpu_65c02_rts(struct cpu_65c02 *self) {
+ PC = (PLW() + 1) & 0xffff;
+}
+
+INLINE void cpu_65c02_se(struct cpu_65c02 *self, int flag) {
+ P |= flag;
+}
+
+INLINE void cpu_65c02_smb(struct cpu_65c02 *self, int n, int lvalue) {
+ abort();
+}
+
+INLINE void cpu_65c02_st(struct cpu_65c02 *self, int rvalue, int lvalue) {
+ WB(lvalue, rvalue);
+}
+
+INLINE void cpu_65c02_t(struct cpu_65c02 *self, int rvalue, int lvalue) {
+ WB(lvalue, rvalue);
+ P =
+ (P & ~(FLAG_Z | FLAG_N)) |
+ ((rvalue == 0) << 1) | // Z
+ (rvalue & FLAG_N);
+}
+
+INLINE void cpu_65c02_trb(struct cpu_65c02 *self, int lvalue) {
+ abort();
+}
+
+INLINE void cpu_65c02_tsb(struct cpu_65c02 *self, int lvalue) {
+ abort();
+}
+
+INLINE void cpu_65c02_wai(struct cpu_65c02 *self) {
+ abort();
+}
+
+// instruction decode
+void cpu_65c02_execute(struct cpu_65c02 *self) {
+ switch (FB()) {
+ case 0x00:
+ BRK();
+ break;
+ case 0x01:
+ ORA(ZPG_IDX_IND(X));
+ break;
+ case 0x02:
+ ILL();
+ break;
+ case 0x03:
+ ILL();
+ break;
+ case 0x04:
+ TSB(EA_ZPG());
+ break;
+ case 0x05:
+ ORA(ZPG());
+ break;
+ case 0x06:
+ ASL(EA_ZPG());
+ break;
+ case 0x07:
+ RMB0(EA_ZPG());
+ break;
+ case 0x08:
+ PHP();
+ break;
+ case 0x09:
+ ORA(IMM());
+ break;
+ case 0x0a:
+ ASL(EA_A);
+ break;
+ case 0x0b:
+ ILL();
+ break;
+ case 0x0c:
+ TSB(EA_ABS());
+ break;
+ case 0x0d:
+ ORA(ABS());
+ break;
+ case 0x0e:
+ ASL(EA_ABS());
+ break;
+ case 0x0f:
+ ILL();
+ break;
+ case 0x10:
+ BPL(EA_REL());
+ break;
+ case 0x11:
+ ORA(ZPG_IND_IDX(Y));
+ break;
+ case 0x12:
+ ORA(ZPG_IND());
+ break;
+ case 0x13:
+ ILL();
+ break;
+ case 0x14:
+ TRB(EA_ZPG());
+ break;
+ case 0x15:
+ ORA(ZPG_IDX(X));
+ break;
+ case 0x16:
+ ASL(EA_ZPG_IDX(X));
+ break;
+ case 0x17:
+ RMB1(EA_ZPG());
+ break;
+ case 0x18:
+ CLC();
+ break;
+ case 0x19:
+ ORA(ABS_IDX(Y));
+ break;
+ case 0x1a:
+ INC(EA_A);
+ break;
+ case 0x1b:
+ ILL();
+ break;
+ case 0x1c:
+ TRB(EA_ABS());
+ break;
+ case 0x1d:
+ ORA(ABS_IDX(X));
+ break;
+ case 0x1e:
+ ASL(EA_ABS_IDX(X));
+ break;
+ case 0x1f:
+ ILL();
+ break;
+ case 0x20:
+ JSR(EA_ABS());
+ break;
+ case 0x21:
+ AND(ZPG_IDX_IND(X));
+ break;
+ case 0x22:
+ ILL();
+ break;
+ case 0x23:
+ ILL();
+ break;
+ case 0x24:
+ BIT(ZPG());
+ break;
+ case 0x25:
+ AND(ZPG());
+ break;
+ case 0x26:
+ ROL(EA_ZPG());
+ break;
+ case 0x27:
+ RMB2(EA_ZPG());
+ break;
+ case 0x28:
+ PLP();
+ break;
+ case 0x29:
+ AND(IMM());
+ break;
+ case 0x2a:
+ ROL(EA_A);
+ break;
+ case 0x2b:
+ ILL();
+ break;
+ case 0x2c:
+ BIT(ABS());
+ break;
+ case 0x2d:
+ AND(ABS());
+ break;
+ case 0x2e:
+ ROL(EA_ABS());
+ break;
+ case 0x2f:
+ ILL();
+ break;
+ case 0x30:
+ BMI(EA_REL());
+ break;
+ case 0x31:
+ AND(ZPG_IND_IDX(Y));
+ break;
+ case 0x32:
+ AND(ZPG_IND());
+ break;
+ case 0x33:
+ ILL();
+ break;
+ case 0x34:
+ BIT(ZPG_IDX(X));
+ break;
+ case 0x35:
+ AND(ZPG_IDX(X));
+ break;
+ case 0x36:
+ ROL(EA_ZPG_IDX(X));
+ break;
+ case 0x37:
+ RMB3(EA_ZPG());
+ break;
+ case 0x38:
+ SEC();
+ break;
+ case 0x39:
+ AND(ABS_IDX(Y));
+ break;
+ case 0x3a:
+ DEC(EA_A);
+ break;
+ case 0x3b:
+ ILL();
+ break;
+ case 0x3c:
+ BIT(ABS_IDX(X));
+ break;
+ case 0x3d:
+ AND(ABS_IDX(X));
+ break;
+ case 0x3e:
+ ROL(EA_ABS_IDX(X));
+ break;
+ case 0x3f:
+ ILL();
+ break;
+ case 0x40:
+ RTI();
+ break;
+ case 0x41:
+ EOR(ZPG_IDX_IND(X));
+ break;
+ case 0x42:
+ ILL();
+ break;
+ case 0x43:
+ ILL();
+ break;
+ case 0x44:
+ ILL();
+ break;
+ case 0x45:
+ EOR(ZPG());
+ break;
+ case 0x46:
+ LSR(EA_ZPG());
+ break;
+ case 0x47:
+ RMB4(EA_ZPG());
+ break;
+ case 0x48:
+ PHA();
+ break;
+ case 0x49:
+ EOR(IMM());
+ break;
+ case 0x4a:
+ LSR(EA_A);
+ break;
+ case 0x4b:
+ ILL();
+ break;
+ case 0x4c:
+ JMP(EA_ABS());
+ break;
+ case 0x4d:
+ EOR(ABS());
+ break;
+ case 0x4e:
+ LSR(EA_ABS());
+ break;
+ case 0x4f:
+ ILL();
+ break;
+ case 0x50:
+ BVC(EA_REL());
+ break;
+ case 0x51:
+ EOR(ZPG_IND_IDX(Y));
+ break;
+ case 0x52:
+ EOR(ZPG_IND());
+ break;
+ case 0x53:
+ ILL();
+ break;
+ case 0x54:
+ ILL();
+ break;
+ case 0x55:
+ EOR(ZPG_IDX(X));
+ break;
+ case 0x56:
+ LSR(EA_ZPG_IDX(X));
+ break;
+ case 0x57:
+ RMB5(EA_ZPG());
+ break;
+ case 0x58:
+ CLI();
+ break;
+ case 0x59:
+ EOR(ABS_IDX(Y));
+ break;
+ case 0x5a:
+ PHY();
+ break;
+ case 0x5b:
+ ILL();
+ break;
+ case 0x5c:
+ ILL();
+ break;
+ case 0x5d:
+ EOR(ABS_IDX(X));
+ break;
+ case 0x5e:
+ LSR(EA_ABS_IDX(X));
+ break;
+ case 0x5f:
+ ILL();
+ break;
+ case 0x60:
+ RTS();
+ break;
+ case 0x61:
+ ADC(ZPG_IDX_IND(X));
+ break;
+ case 0x62:
+ ILL();
+ break;
+ case 0x63:
+ ILL();
+ break;
+ case 0x64:
+ STZ(EA_ZPG());
+ break;
+ case 0x65:
+ ADC(ZPG());
+ break;
+ case 0x66:
+ ROR(EA_ZPG());
+ break;
+ case 0x67:
+ RMB6(EA_ZPG());
+ break;
+ case 0x68:
+ PLA();
+ break;
+ case 0x69:
+ ADC(IMM());
+ break;
+ case 0x6a:
+ ROR(EA_A);
+ break;
+ case 0x6b:
+ ILL();
+ break;
+ case 0x6c:
+ JMP(EA_ABS_IND());
+ break;
+ case 0x6d:
+ ADC(ABS());
+ break;
+ case 0x6e:
+ ROR(EA_ABS());
+ break;
+ case 0x6f:
+ ILL();
+ break;
+ case 0x70:
+ BVS(EA_REL());
+ break;
+ case 0x71:
+ ADC(ZPG_IND_IDX(Y));
+ break;
+ case 0x72:
+ ADC(ZPG_IND());
+ break;
+ case 0x73:
+ ILL();
+ break;
+ case 0x74:
+ STZ(EA_ZPG_IDX(X));
+ break;
+ case 0x75:
+ ADC(ZPG_IDX(X));
+ break;
+ case 0x76:
+ ROR(EA_ZPG_IDX(X));
+ break;
+ case 0x77:
+ RMB7(EA_ZPG());
+ break;
+ case 0x78:
+ SEI();
+ break;
+ case 0x79:
+ ADC(ABS_IDX(Y));
+ break;
+ case 0x7a:
+ PLY();
+ break;
+ case 0x7b:
+ ILL();
+ break;
+ case 0x7c:
+ JMP(EA_ABS_IDX_IND(X));
+ break;
+ case 0x7d:
+ ADC(ABS_IDX(X));
+ break;
+ case 0x7e:
+ ROR(EA_ABS_IDX(X));
+ break;
+ case 0x7f:
+ ILL();
+ break;
+ case 0x80:
+ BRA(EA_REL());
+ break;
+ case 0x81:
+ STA(EA_ZPG_IDX_IND(X));
+ break;
+ case 0x82:
+ ILL();
+ break;
+ case 0x83:
+ ILL();
+ break;
+ case 0x84:
+ STY(EA_ZPG());
+ break;
+ case 0x85:
+ STA(EA_ZPG());
+ break;
+ case 0x86:
+ STX(EA_ZPG());
+ break;
+ case 0x87:
+ SMB0(EA_ZPG());
+ break;
+ case 0x88:
+ DEY();
+ break;
+ case 0x89:
+ BIT(IMM());
+ break;
+ case 0x8a:
+ TXA();
+ break;
+ case 0x8b:
+ ILL();
+ break;
+ case 0x8c:
+ STY(EA_ABS());
+ break;
+ case 0x8d:
+ STA(EA_ABS());
+ break;
+ case 0x8e:
+ STX(EA_ABS());
+ break;
+ case 0x8f:
+ ILL();
+ break;
+ case 0x90:
+ BCC(EA_REL());
+ break;
+ case 0x91:
+ STA(EA_ZPG_IND_IDX(Y));
+ break;
+ case 0x92:
+ STA(EA_ZPG_IND());
+ break;
+ case 0x93:
+ ILL();
+ break;
+ case 0x94:
+ STY(EA_ZPG_IDX(X));
+ break;
+ case 0x95:
+ STA(EA_ZPG_IDX(X));
+ break;
+ case 0x96:
+ STX(EA_ZPG_IDX(Y));
+ break;
+ case 0x97:
+ SMB1(EA_ZPG());
+ break;
+ case 0x98:
+ TYA();
+ break;
+ case 0x99:
+ STA(EA_ABS_IDX(Y));
+ break;
+ case 0x9a:
+ TXS();
+ break;
+ case 0x9b:
+ ILL();
+ break;
+ case 0x9c:
+ STZ(EA_ABS());
+ break;
+ case 0x9d:
+ STA(EA_ABS_IDX(X));
+ break;
+ case 0x9e:
+ STZ(EA_ABS_IDX(X));
+ break;
+ case 0x9f:
+ ILL();
+ break;
+ case 0xa0:
+ LDY(IMM());
+ break;
+ case 0xa1:
+ LDA(ZPG_IDX_IND(X));
+ break;
+ case 0xa2:
+ LDX(IMM());
+ break;
+ case 0xa3:
+ ILL();
+ break;
+ case 0xa4:
+ LDY(ZPG());
+ break;
+ case 0xa5:
+ LDA(ZPG());
+ break;
+ case 0xa6:
+ LDX(ZPG());
+ break;
+ case 0xa7:
+ SMB2(EA_ZPG());
+ break;
+ case 0xa8:
+ TAY();
+ break;
+ case 0xa9:
+ LDA(IMM());
+ break;
+ case 0xaa:
+ TAX();
+ break;
+ case 0xab:
+ ILL();
+ break;
+ case 0xac:
+ LDY(ABS());
+ break;
+ case 0xad:
+ LDA(ABS());
+ break;
+ case 0xae:
+ LDX(ABS());
+ break;
+ case 0xaf:
+ ILL();
+ break;
+ case 0xb0:
+ BCS(EA_REL());
+ break;
+ case 0xb1:
+ LDA(ZPG_IND_IDX(Y));
+ break;
+ case 0xb2:
+ LDA(ZPG_IND());
+ break;
+ case 0xb3:
+ ILL();
+ break;
+ case 0xb4:
+ LDY(ZPG_IDX(X));
+ break;
+ case 0xb5:
+ LDA(ZPG_IDX(X));
+ break;
+ case 0xb6:
+ LDX(ZPG_IDX(Y));
+ break;
+ case 0xb7:
+ SMB3(EA_ZPG());
+ break;
+ case 0xb8:
+ CLV();
+ break;
+ case 0xb9:
+ LDA(ABS_IDX(Y));
+ break;
+ case 0xba:
+ TSX();
+ break;
+ case 0xbb:
+ ILL();
+ break;
+ case 0xbc:
+ LDY(ABS_IDX(X));
+ break;
+ case 0xbd:
+ LDA(ABS_IDX(X));
+ break;
+ case 0xbe:
+ LDX(ABS_IDX(Y));
+ break;
+ case 0xbf:
+ ILL();
+ break;
+ case 0xc0:
+ CPY(IMM());
+ break;
+ case 0xc1:
+ CMP(ZPG_IDX_IND(X));
+ break;
+ case 0xc2:
+ ILL();
+ break;
+ case 0xc3:
+ ILL();
+ break;
+ case 0xc4:
+ CPY(ZPG());
+ break;
+ case 0xc5:
+ CMP(ZPG());
+ break;
+ case 0xc6:
+ DEC(EA_ZPG());
+ break;
+ case 0xc7:
+ SMB4(EA_ZPG());
+ break;
+ case 0xc8:
+ INY();
+ break;
+ case 0xc9:
+ CMP(IMM());
+ break;
+ case 0xca:
+ DEX();
+ break;
+ case 0xcb:
+ WAI();
+ break;
+ case 0xcc:
+ CPY(ABS());
+ break;
+ case 0xcd:
+ CMP(ABS());
+ break;
+ case 0xce:
+ DEC(EA_ABS());
+ break;
+ case 0xcf:
+ ILL();
+ break;
+ case 0xd0:
+ BNE(EA_REL());
+ break;
+ case 0xd1:
+ CMP(ZPG_IND_IDX(Y));
+ break;
+ case 0xd2:
+ CMP(ZPG_IND());
+ break;
+ case 0xd3:
+ ILL();
+ break;
+ case 0xd4:
+ ILL();
+ break;
+ case 0xd5:
+ CMP(ZPG_IDX(X));
+ break;
+ case 0xd6:
+ DEC(EA_ZPG_IDX(X));
+ break;
+ case 0xd7:
+ SMB5(EA_ZPG());
+ break;
+ case 0xd8:
+ CLD();
+ break;
+ case 0xd9:
+ CMP(ABS_IDX(Y));
+ break;
+ case 0xda:
+ PHX();
+ break;
+ case 0xdb:
+ ILL();
+ break;
+ case 0xdc:
+ ILL();
+ break;
+ case 0xdd:
+ CMP(ABS_IDX(X));
+ break;
+ case 0xde:
+ DEC(EA_ABS_IDX(X));
+ break;
+ case 0xdf:
+ ILL();
+ break;
+ case 0xe0:
+ CPX(IMM());
+ break;
+ case 0xe1:
+ SBC(ZPG_IDX_IND(X));
+ break;
+ case 0xe2:
+ ILL();
+ break;
+ case 0xe3:
+ ILL();
+ break;
+ case 0xe4:
+ CPX(ZPG());
+ break;
+ case 0xe5:
+ SBC(ZPG());
+ break;
+ case 0xe6:
+ INC(EA_ZPG());
+ break;
+ case 0xe7:
+ SMB6(EA_ZPG());
+ break;
+ case 0xe8:
+ INX();
+ break;
+ case 0xe9:
+ SBC(IMM());
+ break;
+ case 0xea:
+ NOP();
+ break;
+ case 0xeb:
+ ILL();
+ break;
+ case 0xec:
+ CPX(ABS());
+ break;
+ case 0xed:
+ SBC(ABS());
+ break;
+ case 0xee:
+ INC(EA_ABS());
+ break;
+ case 0xef:
+ ILL();
+ break;
+ case 0xf0:
+ BEQ(EA_REL());
+ break;
+ case 0xf1:
+ SBC(ZPG_IND_IDX(Y));
+ break;
+ case 0xf2:
+ SBC(ZPG_IND());
+ break;
+ case 0xf3:
+ ILL();
+ break;
+ case 0xf4:
+ ILL();
+ break;
+ case 0xf5:
+ SBC(ZPG_IDX(X));
+ break;
+ case 0xf6:
+ INC(EA_ZPG_IDX(X));
+ break;
+ case 0xf7:
+ SMB7(EA_ZPG());
+ break;
+ case 0xf8:
+ SED();
+ break;
+ case 0xf9:
+ SBC(ABS_IDX(Y));
+ break;
+ case 0xfa:
+ PLX();
+ break;
+ case 0xfb:
+ ILL();
+ break;
+ case 0xfc:
+ ILL();
+ break;
+ case 0xfd:
+ SBC(ABS_IDX(X));
+ break;
+ case 0xfe:
+ INC(EA_ABS_IDX(X));
+ break;
+ case 0xff:
+ ILL();
+ break;
+ }
+}