--- np2/i386c/ia32/instructions/system_inst.c 2004/02/20 16:09:05 1.18 +++ np2/i386c/ia32/instructions/system_inst.c 2008/01/25 18:02:18 1.32 @@ -1,4 +1,4 @@ -/* $Id: system_inst.c,v 1.18 2004/02/20 16:09:05 monaka Exp $ */ +/* $Id: system_inst.c,v 1.32 2008/01/25 18:02:18 monaka Exp $ */ /* * Copyright (c) 2003 NONAKA Kimihiro @@ -12,8 +12,6 @@ * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES @@ -90,7 +88,8 @@ SGDT_Ms(UINT32 op) void LLDT_Ew(UINT32 op) { - UINT32 src, madr; + UINT32 madr; + UINT16 src; if (CPU_STAT_PM && !CPU_STAT_VM86) { if (CPU_STAT_CPL == 0) { @@ -102,7 +101,7 @@ LLDT_Ew(UINT32 op) madr = calc_ea_dst(op); src = cpu_vmemoryread_w(CPU_INST_SEGREG_INDEX, madr); } - load_ldtr((UINT16)src, GP_EXCEPTION); + load_ldtr(src, GP_EXCEPTION); return; } VERBOSE(("LLDT: CPL(%d) != 0", CPU_STAT_CPL)); @@ -141,7 +140,8 @@ SLDT_Ew(UINT32 op) void LTR_Ew(UINT32 op) { - UINT32 src, madr; + UINT32 madr; + UINT16 src; if (CPU_STAT_PM && !CPU_STAT_VM86) { if (CPU_STAT_CPL == 0) { @@ -153,7 +153,7 @@ LTR_Ew(UINT32 op) madr = calc_ea_dst(op); src = cpu_vmemoryread_w(CPU_INST_SEGREG_INDEX, madr); } - load_tr((UINT16)src); + load_tr(src); return; } VERBOSE(("LTR: CPL(%d) != 0", CPU_STAT_CPL)); @@ -285,17 +285,18 @@ MOV_CdRd(void) } reg = CPU_CR0; - src &= 0xe005003f; -#ifndef USE_FPU - src &= ~CPU_CR0_ET; /* FPU not present */ -#else + src &= CPU_CR0_ALL; +#if defined(USE_FPU) src |= CPU_CR0_ET; /* FPU present */ +#else + src |= CPU_CR0_EM | CPU_CR0_NE; + src &= ~(CPU_CR0_MP | CPU_CR0_ET); #endif CPU_CR0 = src; VERBOSE(("MOV_CdRd: %04x:%08x: cr0: 0x%08x <- 0x%08x(%s)", CPU_CS, CPU_PREV_EIP, reg, CPU_CR0, reg32_str[op & 7])); if ((reg ^ CPU_CR0) & (CPU_CR0_PE|CPU_CR0_PG)) { - tlb_flush(FALSE); + tlb_flush(TRUE); } if ((reg ^ CPU_CR0) & CPU_CR0_PE) { if (CPU_CR0 & CPU_CR0_PE) { @@ -335,7 +336,6 @@ MOV_CdRd(void) VERBOSE(("MOV_CdRd: %04x:%08x: cr3: 0x%08x <- 0x%08x(%s)", CPU_CS, CPU_PREV_EIP, reg, CPU_CR3, reg32_str[op & 7])); break; -#if CPU_FAMILY >= 5 case 4: /* CR4 */ /* * 10 = OSXMMEXCPT (support non masking exception by OS) @@ -350,7 +350,11 @@ MOV_CdRd(void) * 1 = PVI (protected mode virtual interrupt) * 0 = VME (VM8086 mode extention) */ - reg = 0; /* allow */ + reg = 0 /* allow bit */ +#if (CPU_FEATURES & CPU_FEATURE_PGE) == CPU_FEATURE_PGE + | CPU_CR4_PGE +#endif + ; if (src & ~reg) { if (src & 0xfffffc00) { EXCEPTION(GP_EXCEPTION, 0); @@ -363,10 +367,9 @@ MOV_CdRd(void) VERBOSE(("MOV_CdRd: %04x:%08x: cr4: 0x%08x <- 0x%08x(%s)", CPU_CS, CPU_PREV_EIP, reg, CPU_CR4, reg32_str[op & 7])); if ((reg ^ CPU_CR4) & (CPU_CR4_PSE|CPU_CR4_PGE|CPU_CR4_PAE)) { - tlb_flush(FALSE); + tlb_flush(TRUE); } break; -#endif /* CPU_FAMILY >= 5 */ default: ia32_panic("MOV_CdRd: CR reg index (%d)", idx); @@ -389,7 +392,7 @@ MOV_RdCd(void) GET_PCBYTE(op); if (op >= 0xc0) { if (CPU_STAT_PM && (CPU_STAT_VM86 || CPU_STAT_CPL != 0)) { - VERBOSE(("MOV_CdRd: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); + VERBOSE(("MOV_RdCd: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); EXCEPTION(GP_EXCEPTION, 0); } @@ -409,11 +412,9 @@ MOV_RdCd(void) *out = CPU_CR3; break; -#if CPU_FAMILY >= 5 case 4: *out = CPU_CR4; break; -#endif /* CPU_FAMILY >= 5 */ default: ia32_panic("MOV_RdCd: CR reg index (%d)", idx); @@ -484,7 +485,8 @@ CLTS(void) void ARPL_EwGw(void) { - UINT32 op, src, dst, madr; + UINT32 op, madr; + UINT src, dst; if (CPU_STAT_PM && !CPU_STAT_VM86) { PREPART_EA_REG16(op, src); @@ -840,15 +842,142 @@ VERW_Ew(UINT32 op) void MOV_DdRd(void) { + UINT32 src; + UINT op; + int idx; +#if defined(IA32_SUPPORT_DEBUG_REGISTER) + int i; +#endif + + CPU_WORKCLOCK(11); + GET_PCBYTE(op); + if (op >= 0xc0) { + if (CPU_STAT_PM && (CPU_STAT_VM86 || CPU_STAT_CPL != 0)) { + VERBOSE(("MOV_DdRd: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); + EXCEPTION(GP_EXCEPTION, 0); + } + + if (CPU_DR7 & CPU_DR7_GD) { + CPU_DR6 |= CPU_DR6_BD; + CPU_DR7 &= ~CPU_DR7_GD; + EXCEPTION(DB_EXCEPTION, 0); + } + + src = *(reg32_b20[op]); + idx = (op >> 3) & 7; + + CPU_DR(idx) = src; + switch (idx) { + case 0: + case 1: + case 2: + case 3: + CPU_DR(idx) = src; + break; - ia32_panic("MOV_DdRd: not implemented yet!"); +#if CPU_FAMILY >= 5 + case 4: + if (CPU_CR4 & CPU_CR4_DE) { + EXCEPTION(UD_EXCEPTION, 0); + } +#endif + case 6: + CPU_DR6 = src; + break; + +#if CPU_FAMILY >= 5 + case 5: + if (CPU_CR4 & CPU_CR4_DE) { + EXCEPTION(UD_EXCEPTION, 0); + } +#endif + case 7: + CPU_DR7 = src; + CPU_STAT_BP = 0; +#if defined(IA32_SUPPORT_DEBUG_REGISTER) + for (i = 0; i < CPU_DEBUG_REG_INDEX_NUM; i++) { + if (CPU_DR7 & (CPU_DR7_L(i)|CPU_DR7_G(i))) { + CPU_STAT_BP |= (1 << i); + } + } +#endif /* IA32_SUPPORT_DEBUG_REGISTER */ + break; + + default: + ia32_panic("MOV_DdRd: DR reg index (%d)", idx); + /*NOTREACHED*/ + break; + } + + return; + } + EXCEPTION(UD_EXCEPTION, 0); } void MOV_RdDd(void) { + UINT32 *out; + UINT op; + int idx; + + CPU_WORKCLOCK(11); + GET_PCBYTE(op); + if (op >= 0xc0) { + if (CPU_STAT_PM && (CPU_STAT_VM86 || CPU_STAT_CPL != 0)) { + VERBOSE(("MOV_RdDd: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); + EXCEPTION(GP_EXCEPTION, 0); + } + + if (CPU_DR7 & CPU_DR7_GD) { + CPU_DR6 |= CPU_DR6_BD; + CPU_DR7 &= ~CPU_DR7_GD; + EXCEPTION(DB_EXCEPTION, 0); + } + + out = reg32_b20[op]; + idx = (op >> 3) & 7; + + switch (idx) { + case 0: + case 1: + case 2: + case 3: + *out = CPU_DR(idx); + break; + + case 4: +#if CPU_FAMILY >= 5 + if (CPU_CR4 & CPU_CR4_DE) { + EXCEPTION(UD_EXCEPTION, 0); + } +#endif + case 6: +#if CPU_FAMILY == 4 + *out = (CPU_DR6 & 0x0000f00f) | 0xffff0ff0; +#elif CPU_FAMILY >= 5 + *out = (CPU_DR6 & 0x0000e00f) | 0xffff0ff0; +#endif + break; - ia32_panic("MOV_DdRd: not implemented yet!"); +#if CPU_FAMILY >= 5 + case 5: + if (CPU_CR4 & CPU_CR4_DE) { + EXCEPTION(UD_EXCEPTION, 0); + } +#endif + case 7: + *out = CPU_DR7; + break; + + default: + ia32_panic("MOV_RdDd: DR reg index (%d)", idx); + /*NOTREACHED*/ + break; + } + return; + } + EXCEPTION(UD_EXCEPTION, 0); } void @@ -860,7 +989,6 @@ INVD(void) VERBOSE(("INVD: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); EXCEPTION(GP_EXCEPTION, 0); } - /* nothing to do */ } void @@ -872,13 +1000,15 @@ WBINVD(void) VERBOSE(("WBINVD: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); EXCEPTION(GP_EXCEPTION, 0); } - /* nothing to do */ } void INVLPG(UINT32 op) { + descriptor_t *sd; UINT32 madr; + int idx; + int exc; if (CPU_STAT_PM && (CPU_STAT_VM86 || CPU_STAT_CPL != 0)) { VERBOSE(("INVLPG: VM86(%s) or CPL(%d) != 0", CPU_STAT_VM86 ? "true" : "false", CPU_STAT_CPL)); @@ -888,10 +1018,40 @@ INVLPG(UINT32 op) if (op < 0xc0) { CPU_WORKCLOCK(11); madr = calc_ea_dst(op); - tlb_flush_page(madr); + + idx = CPU_INST_SEGREG_INDEX; + sd = &CPU_STAT_SREG(idx); + if (!sd->valid) { + exc = GP_EXCEPTION; + goto err; + } + switch (sd->type) { + case 4: case 5: case 6: case 7: + if (madr <= sd->u.seg.limit) { + if (idx == CPU_SS_INDEX) + exc = SS_EXCEPTION; + else + exc = GP_EXCEPTION; + goto err; + } + break; + + default: + if (madr > sd->u.seg.limit) { + if (idx == CPU_SS_INDEX) + exc = SS_EXCEPTION; + else + exc = GP_EXCEPTION; + goto err; + } + break; + } + tlb_flush_page(sd->u.seg.segbase + madr); return; } - EXCEPTION(UD_EXCEPTION, 0); + exc = UD_EXCEPTION; +err: + EXCEPTION(exc, 0); } void @@ -952,7 +1112,7 @@ WRMSR(void) idx = CPU_ECX; switch (idx) { - /* MTRR への書き込み時 tlb_flush(FALSE); */ + /* MTRR への書き込み時 tlb_flush(TRUE); */ default: EXCEPTION(GP_EXCEPTION, 0);