|
|
| version 1.23, 2011/01/15 17:17:23 | version 1.35, 2012/06/18 14:30:27 |
|---|---|
| Line 31 | Line 31 |
| /* | /* |
| * memory access check | * memory access check |
| */ | */ |
| static int MEMCALL check_limit_upstairs(descriptor_t *sdp, UINT32 offset, UINT len); | static int MEMCALL check_limit_upstairs(descriptor_t *sdp, UINT32 offset, UINT len, BOOL is32bit); |
| static void MEMCALL cpu_memoryread_check(descriptor_t *sdp, UINT32 offset, UINT len, int e); | static void MEMCALL cpu_memoryread_check(descriptor_t *sdp, UINT32 offset, UINT len, int e); |
| static void MEMCALL cpu_memorywrite_check(descriptor_t *sdp, UINT32 offset, UINT len, int e); | static void MEMCALL cpu_memorywrite_check(descriptor_t *sdp, UINT32 offset, UINT len, int e); |
| static int MEMCALL | static int MEMCALL |
| check_limit_upstairs(descriptor_t *sdp, UINT32 offset, UINT len) | check_limit_upstairs(descriptor_t *sdp, UINT32 offset, UINT len, BOOL is32bit) |
| { | { |
| UINT32 limit; | UINT32 limit; |
| UINT32 end; | UINT32 end; |
| Line 46 check_limit_upstairs(descriptor_t *sdp, | Line 46 check_limit_upstairs(descriptor_t *sdp, |
| len--; | len--; |
| end = offset + len; | end = offset + len; |
| limit = SEG_IS_32BIT(sdp) ? 0xffffffff : 0x0000ffff; | |
| if (SEG_IS_DATA(sdp) && SEG_IS_EXPANDDOWN_DATA(sdp)) { | if (SEG_IS_DATA(sdp) && SEG_IS_EXPANDDOWN_DATA(sdp)) { |
| /* expand-down data segment */ | /* expand-down data segment */ |
| limit = SEG_IS_32BIT(sdp) ? 0xffffffff : 0x0000ffff; | |
| if (sdp->u.seg.limit == 0) { | if (sdp->u.seg.limit == 0) { |
| /* | /* |
| * 32bit 16bit | * 32bit 16bit |
| Line 64 check_limit_upstairs(descriptor_t *sdp, | Line 64 check_limit_upstairs(descriptor_t *sdp, |
| if (!SEG_IS_32BIT(sdp)) { | if (!SEG_IS_32BIT(sdp)) { |
| if ((len > limit) /* len check */ | if ((len > limit) /* len check */ |
| || (end > limit)) { /* [1] */ | || (end > limit)) { /* [1] */ |
| return 0; | goto exc; |
| } | } |
| } else { | } else { |
| sdp->flag |= CPU_DESC_FLAG_WHOLEADR; | sdp->flag |= CPU_DESC_FLAG_WHOLEADR; |
| Line 88 check_limit_upstairs(descriptor_t *sdp, | Line 88 check_limit_upstairs(descriptor_t *sdp, |
| || (end < offset) /* wrap check */ | || (end < offset) /* wrap check */ |
| || (offset < sdp->u.seg.limit) /* [1] */ | || (offset < sdp->u.seg.limit) /* [1] */ |
| || (end > limit)) { /* [2] */ | || (end > limit)) { /* [2] */ |
| return 0; | goto exc; |
| } | } |
| } | } |
| } else { | } else { |
| /* expand-up data or code segment */ | /* expand-up data or code segment */ |
| if (sdp->u.seg.limit == limit) { | if (sdp->u.seg.limit == 0xffffffff) { |
| /* | /* |
| * 32bit 16bit | * 16/32bit |
| * +-------+ +-------+ FFFFFFFFh | * +-------+ FFFFFFFFh |
| * | | | | | * | | |
| * | | + [1] + 0000FFFFh | * | | |
| * | valid | | | | * | valid | |
| * | | +-------+ 0000FFFFh - len - 1 | * | | |
| * | | | valid | | * | | |
| * +-------+ +-------+ 00000000h | * +-------+ 00000000h |
| */ | */ |
| if (!SEG_IS_32BIT(sdp)) { | sdp->flag |= CPU_DESC_FLAG_WHOLEADR; |
| if ((len > limit) /* len check */ | |
| || (offset + len > limit)) { /* [1] */ | |
| return 0; | |
| } | |
| } else { | |
| sdp->flag |= CPU_DESC_FLAG_WHOLEADR; | |
| } | |
| } else { | } else { |
| /* | /* |
| * 32bit 16bit | * 16/32bit |
| * +-------+ +-------+ FFFFFFFFh | * +-------+ FFFFFFFFh |
| * | | | | | * | | |
| * | | +.......+ 0000FFFFh | * | | |
| * | [1] | | [1] | | * | [1] | |
| * +.......+ +.......+ seg.limit | * +.......+ seg.limit |
| * | | | | | * | | |
| * +-------+ +-------+ seg.limit - len - 1 | * +-------+ seg.limit - len - 1 |
| * | valid | | valid | | * | valid | |
| * +-------+ +-------+ 00000000h | * +-------+ 00000000h |
| */ | */ |
| if ((len > sdp->u.seg.limit) /* len check */ | if ((len > sdp->u.seg.limit) /* len check */ |
| || (end < offset) /* wrap check */ | || (end < offset) /* wrap check */ |
| || (end > sdp->u.seg.limit)) { /* [1] */ | || (end > sdp->u.seg.limit + 1)) { /* [1] */ |
| return 0; | goto exc; |
| } | } |
| } | } |
| } | } |
| return 1; /* Ok! */ | return 1; /* Ok! */ |
| exc: | |
| VERBOSE(("check_limit_upstairs: check failure: offset = 0x%08x, len = %d", offset, len + 1)); | |
| #if defined(DEBUG) | |
| segdesc_dump(sdp); | |
| #endif | |
| return 0; | |
| } | } |
| static void MEMCALL | static void MEMCALL |
| Line 159 cpu_memoryread_check(descriptor_t *sdp, | Line 159 cpu_memoryread_check(descriptor_t *sdp, |
| case 6: case 7: /* rw (expand down) */ | case 6: case 7: /* rw (expand down) */ |
| case 10: case 11: /* rx */ | case 10: case 11: /* rx */ |
| case 14: case 15: /* rxc */ | case 14: case 15: /* rxc */ |
| if (!check_limit_upstairs(sdp, offset, len)) | if (!check_limit_upstairs(sdp, offset, len, SEG_IS_32BIT(sdp))) |
| goto exc; | goto exc; |
| break; | break; |
| Line 170 cpu_memoryread_check(descriptor_t *sdp, | Line 170 cpu_memoryread_check(descriptor_t *sdp, |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_memoryread_check: check failure.")); | VERBOSE(("cpu_memoryread_check: check failure: offset = 0x%08x, len = %d", offset, len)); |
| VERBOSE(("offset = 0x%08x, len = %d", offset, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| Line 199 cpu_memorywrite_check(descriptor_t *sdp, | Line 198 cpu_memorywrite_check(descriptor_t *sdp, |
| switch (sdp->type) { | switch (sdp->type) { |
| case 2: case 3: /* rw */ | case 2: case 3: /* rw */ |
| case 6: case 7: /* rw (expand down) */ | case 6: case 7: /* rw (expand down) */ |
| if (!check_limit_upstairs(sdp, offset, len)) | if (!check_limit_upstairs(sdp, offset, len, SEG_IS_32BIT(sdp))) |
| goto exc; | goto exc; |
| break; | break; |
| Line 210 cpu_memorywrite_check(descriptor_t *sdp, | Line 209 cpu_memorywrite_check(descriptor_t *sdp, |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_memorywrite_check: check failure.")); | VERBOSE(("cpu_memorywrite_check: check failure: offset = 0x%08x, len = %d", offset, len)); |
| VERBOSE(("offset = 0x%08x, len = %d", offset, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| EXCEPTION(e, 0); | EXCEPTION(e, 0); |
| } | } |
| void | void MEMCALL |
| cpu_stack_push_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len) | cpu_stack_push_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len, |
| BOOL is32bit) | |
| { | { |
| UINT32 limit; | UINT32 limit; |
| UINT32 start; | UINT32 start; |
| Line 227 cpu_stack_push_check(UINT16 s, descripto | Line 226 cpu_stack_push_check(UINT16 s, descripto |
| __ASSERT(sdp != NULL); | __ASSERT(sdp != NULL); |
| __ASSERT(len > 0); | __ASSERT(len > 0); |
| len--; | |
| if (!SEG_IS_VALID(sdp) | if (!SEG_IS_VALID(sdp) |
| || !SEG_IS_PRESENT(sdp) | || !SEG_IS_PRESENT(sdp) |
| || SEG_IS_SYSTEM(sdp) | || SEG_IS_SYSTEM(sdp) |
| Line 235 cpu_stack_push_check(UINT16 s, descripto | Line 236 cpu_stack_push_check(UINT16 s, descripto |
| goto exc; | goto exc; |
| } | } |
| len--; | |
| start = sp - len; | start = sp - len; |
| limit = SEG_IS_32BIT(sdp) ? 0xffffffff : 0x0000ffff; | limit = is32bit ? 0xffffffff : 0x0000ffff; |
| if (SEG_IS_EXPANDDOWN_DATA(sdp)) { | if (SEG_IS_EXPANDDOWN_DATA(sdp)) { |
| /* expand-down stack */ | /* expand-down stack */ |
| Line 320 cpu_stack_push_check(UINT16 s, descripto | Line 320 cpu_stack_push_check(UINT16 s, descripto |
| */ | */ |
| if ((len > sdp->u.seg.limit) /* len check */ | if ((len > sdp->u.seg.limit) /* len check */ |
| || (start > sp) /* wrap check */ | || (start > sp) /* wrap check */ |
| || (sp > sdp->u.seg.limit)) { /* [1] */ | || (sp > sdp->u.seg.limit + 1)) { /* [1] */ |
| goto exc; | goto exc; |
| } | } |
| } | } |
| Line 328 cpu_stack_push_check(UINT16 s, descripto | Line 328 cpu_stack_push_check(UINT16 s, descripto |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_stack_push_check: check failure.")); | VERBOSE(("cpu_stack_push_check: check failure: selector = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); |
| VERBOSE(("s = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| EXCEPTION(SS_EXCEPTION, s & 0xfffc); | EXCEPTION(SS_EXCEPTION, s & 0xfffc); |
| } | } |
| void | void MEMCALL |
| cpu_stack_pop_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len) | cpu_stack_pop_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len, |
| BOOL is32bit) | |
| { | { |
| __ASSERT(sdp != NULL); | __ASSERT(sdp != NULL); |
| Line 351 cpu_stack_pop_check(UINT16 s, descriptor | Line 351 cpu_stack_pop_check(UINT16 s, descriptor |
| goto exc; | goto exc; |
| } | } |
| if (!check_limit_upstairs(sdp, sp, len)) | if (!check_limit_upstairs(sdp, sp, len, is32bit)) |
| goto exc; | goto exc; |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_stack_pop_check: check failure.")); | VERBOSE(("cpu_stack_pop_check: check failure: selector = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); |
| VERBOSE(("s = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| EXCEPTION(SS_EXCEPTION, s & 0xfffc); | EXCEPTION(SS_EXCEPTION, s & 0xfffc); |
| } | } |
| #if defined(IA32_SUPPORT_DEBUG_REGISTER) | |
| static INLINE void | |
| check_memory_break_point(UINT32 address, UINT length, UINT rw) | |
| { | |
| int i; | |
| if (CPU_STAT_BP && !(CPU_EFLAG & RF_FLAG)) { | |
| for (i = 0; i < CPU_DEBUG_REG_INDEX_NUM; i++) { | |
| if ((CPU_STAT_BP & (1 << i)) | |
| && (CPU_DR7_GET_RW(i) & rw) | |
| && ((address <= CPU_DR(i) && address + length > CPU_DR(i)) | |
| || (address > CPU_DR(i) && address < CPU_DR(i) + CPU_DR7_GET_LEN(i)))) { | |
| CPU_STAT_BP_EVENT |= CPU_STAT_BP_EVENT_B(i); | |
| } | |
| } | |
| } | |
| } | |
| #else | |
| #define check_memory_break_point(address, length, rw) | |
| #endif | |
| /* | /* |
| * code fetch | * code fetch |
| */ | */ |
| #define ucrw (CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE) | |
| UINT8 MEMCALL | UINT8 MEMCALL |
| cpu_codefetch(UINT32 offset) | cpu_codefetch(UINT32 offset) |
| { | { |
| const int ucrw = CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE; | |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| #if defined(IA32_SUPPORT_TLB) | |
| TLB_ENTRY_T *ep; | |
| #endif | |
| sdp = &CPU_CS_DESC; | sdp = &CPU_CS_DESC; |
| if (offset <= sdp->u.seg.limit) { | addr = sdp->u.seg.segbase + offset; |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PAGING) | if (!CPU_STAT_PM) |
| return cpu_memoryread(addr); | return cpu_memoryread(addr); |
| #if defined(IA32_SUPPORT_TLB) | if (offset <= sdp->u.seg.limit) |
| ep = tlb_lookup(addr, ucrw); | return cpu_lmemoryread(addr, ucrw); |
| if (ep != NULL && ep->memp != NULL) { | |
| return ep->memp[addr & 0xfff]; | |
| } | |
| #endif | |
| return cpu_linear_memory_read_b(addr, ucrw); | |
| } | |
| EXCEPTION(GP_EXCEPTION, 0); | EXCEPTION(GP_EXCEPTION, 0); |
| return 0; /* compiler happy */ | return 0; /* compiler happy */ |
| } | } |
| Line 421 cpu_codefetch(UINT32 offset) | Line 389 cpu_codefetch(UINT32 offset) |
| UINT16 MEMCALL | UINT16 MEMCALL |
| cpu_codefetch_w(UINT32 offset) | cpu_codefetch_w(UINT32 offset) |
| { | { |
| const int ucrw = CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE; | |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| #if defined(IA32_SUPPORT_TLB) | |
| TLB_ENTRY_T *ep; | |
| UINT16 value; | |
| #endif | |
| sdp = &CPU_CS_DESC; | sdp = &CPU_CS_DESC; |
| if (offset <= sdp->u.seg.limit - 1) { | addr = sdp->u.seg.segbase + offset; |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PAGING) | if (!CPU_STAT_PM) |
| return cpu_memoryread_w(addr); | return cpu_memoryread_w(addr); |
| #if defined(IA32_SUPPORT_TLB) | if (offset <= sdp->u.seg.limit - 1) |
| ep = tlb_lookup(addr, ucrw); | return cpu_lmemoryread_w(addr, ucrw); |
| if (ep != NULL && ep->memp != NULL) { | |
| if ((addr + 1) & 0x00000fff) { | |
| return LOADINTELWORD(ep->memp + (addr & 0xfff)); | |
| } | |
| value = ep->memp[0xfff]; | |
| ep = tlb_lookup(addr + 1, ucrw); | |
| if (ep != NULL && ep->memp != NULL) { | |
| value += (UINT16)ep->memp[0] << 8; | |
| return value; | |
| } | |
| } | |
| #endif | |
| return cpu_linear_memory_read_w(addr, ucrw); | |
| } | |
| EXCEPTION(GP_EXCEPTION, 0); | EXCEPTION(GP_EXCEPTION, 0); |
| return 0; /* compiler happy */ | return 0; /* compiler happy */ |
| } | } |
| Line 456 cpu_codefetch_w(UINT32 offset) | Line 408 cpu_codefetch_w(UINT32 offset) |
| UINT32 MEMCALL | UINT32 MEMCALL |
| cpu_codefetch_d(UINT32 offset) | cpu_codefetch_d(UINT32 offset) |
| { | { |
| const int ucrw = CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE; | |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| #if defined(IA32_SUPPORT_TLB) | |
| TLB_ENTRY_T *ep[2]; | |
| UINT32 value; | |
| UINT remain; | |
| #endif | |
| sdp = &CPU_CS_DESC; | sdp = &CPU_CS_DESC; |
| if (offset <= sdp->u.seg.limit - 3) { | addr = sdp->u.seg.segbase + offset; |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PAGING) | if (!CPU_STAT_PM) |
| return cpu_memoryread_d(addr); | return cpu_memoryread_d(addr); |
| #if defined(IA32_SUPPORT_TLB) | |
| ep[0] = tlb_lookup(addr, ucrw); | if (offset <= sdp->u.seg.limit - 3) |
| if (ep[0] != NULL && ep[0]->memp != NULL) { | return cpu_lmemoryread_d(addr, ucrw); |
| remain = 0x1000 - (addr & 0xfff); | |
| if (remain >= 4) { | |
| return LOADINTELDWORD(ep[0]->memp + (addr & 0xfff)); | |
| } | |
| ep[1] = tlb_lookup(addr + remain, ucrw); | |
| if (ep[1] != NULL && ep[1]->memp != NULL) { | |
| switch (remain) { | |
| case 3: | |
| value = ep[0]->memp[0xffd]; | |
| value += (UINT32)LOADINTELWORD(ep[0]->memp + 0xffe) << 8; | |
| value += (UINT32)ep[1]->memp[0] << 24; | |
| break; | |
| case 2: | |
| value = LOADINTELWORD(ep[0]->memp + 0xffe); | |
| value += (UINT32)LOADINTELWORD(ep[1]->memp + 0) << 16; | |
| break; | |
| case 1: | |
| value = ep[0]->memp[0xfff]; | |
| value += (UINT32)LOADINTELWORD(ep[1]->memp + 0) << 8; | |
| value += (UINT32)ep[1]->memp[2] << 24; | |
| break; | |
| default: | |
| ia32_panic("cpu_codefetch_d(): out of range. (remain = %d)\n", remain); | |
| return (UINT32)-1; | |
| } | |
| return value; | |
| } | |
| } | |
| #endif | |
| return cpu_linear_memory_read_d(addr, ucrw); | |
| } | |
| EXCEPTION(GP_EXCEPTION, 0); | EXCEPTION(GP_EXCEPTION, 0); |
| return 0; /* compiler happy */ | return 0; /* compiler happy */ |
| } | } |
| #undef ucrw | |
| /* | /* |
| * additional physical address memory access functions | * additional physical address memory access functions |
| */ | */ |
| Line 559 cpu_memorywrite_f(UINT32 paddr, const RE | Line 472 cpu_memorywrite_f(UINT32 paddr, const RE |
| /* | /* |
| * virtual address memory access functions | * virtual address memory access functions |
| */ | */ |
| #include "cpu_mem.mcr" | #define CHOOSE_EXCEPTION(sreg) \ |
| (((sreg) == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION) | |
| VIRTUAL_ADDRESS_MEMORY_ACCESS_FUNCTION(b, UINT8, 1) | |
| VIRTUAL_ADDRESS_MEMORY_ACCESS_FUNCTION(w, UINT16, 2) | |
| VIRTUAL_ADDRESS_MEMORY_ACCESS_FUNCTION(d, UINT32, 4) | |
| UINT64 MEMCALL | |
| cpu_vmemoryread_q(int idx, UINT32 offset) | |
| { | |
| descriptor_t *sdp; | |
| UINT32 addr; | |
| int exc; | |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | |
| sdp = &CPU_STAT_SREG(idx); | |
| if (!SEG_IS_VALID(sdp)) { | |
| exc = GP_EXCEPTION; | |
| goto err; | |
| } | |
| if (!(sdp->flag & CPU_DESC_FLAG_READABLE)) { | #include "cpu_mem.mcr" |
| cpu_memoryread_check(sdp, offset, 8, | |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | |
| if (!check_limit_upstairs(sdp, offset, 8)) | |
| goto range_failure; | |
| } | |
| addr = sdp->u.seg.segbase + offset; | |
| check_memory_break_point(addr, 8, CPU_DR7_RW_RO); | |
| if (!CPU_STAT_PAGING) | |
| return cpu_memoryread_q(addr); | |
| return cpu_linear_memory_read_q(addr, CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); | |
| range_failure: | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(b, UINT8, 1) |
| VERBOSE(("cpu_vmemoryread_q: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | DECLARE_VIRTUAL_ADDRESS_MEMORY_RMW_FUNCTIONS(b, UINT8, 1) |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(w, UINT16, 2) |
| err: | DECLARE_VIRTUAL_ADDRESS_MEMORY_RMW_FUNCTIONS(w, UINT16, 2) |
| EXCEPTION(exc, 0); | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(d, UINT32, 4) |
| return 0; /* compiler happy */ | DECLARE_VIRTUAL_ADDRESS_MEMORY_RMW_FUNCTIONS(d, UINT32, 4) |
| } | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(q, UINT64, 8) |
| void MEMCALL | REG80 MEMCALL |
| cpu_vmemorywrite_q(int idx, UINT32 offset, UINT64 value) | cpu_vmemoryread_f(int idx, UINT32 offset) |
| { | { |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| Line 611 cpu_vmemorywrite_q(int idx, UINT32 offse | Line 495 cpu_vmemorywrite_q(int idx, UINT32 offse |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); |
| sdp = &CPU_STAT_SREG(idx); | sdp = &CPU_STAT_SREG(idx); |
| if (!SEG_IS_VALID(sdp)) { | |
| exc = GP_EXCEPTION; | |
| goto err; | |
| } | |
| if (!(sdp->flag & CPU_DESC_FLAG_WRITABLE)) { | |
| cpu_memorywrite_check(sdp, offset, 8, | |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | |
| if (!check_limit_upstairs(sdp, offset, 8)) | |
| goto range_failure; | |
| } | |
| addr = sdp->u.seg.segbase + offset; | addr = sdp->u.seg.segbase + offset; |
| check_memory_break_point(addr, 8, CPU_DR7_RW_RW); | |
| if (!CPU_STAT_PAGING) { | |
| cpu_memorywrite_q(addr, value); | |
| } else { | |
| cpu_linear_memory_write_q(addr, value, CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); | |
| } | |
| return; | |
| range_failure: | |
| VERBOSE(("cpu_vmemorywrite_q: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | |
| err: | |
| EXCEPTION(exc, 0); | |
| } | |
| REG80 MEMCALL | |
| cpu_vmemoryread_f(int idx, UINT32 offset) | |
| { | |
| descriptor_t *sdp; | |
| UINT32 addr; | |
| int exc; | |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | if (!CPU_STAT_PM) |
| return cpu_memoryread_f(addr); | |
| sdp = &CPU_STAT_SREG(idx); | |
| if (!SEG_IS_VALID(sdp)) { | if (!SEG_IS_VALID(sdp)) { |
| exc = GP_EXCEPTION; | exc = GP_EXCEPTION; |
| goto err; | goto err; |
| } | } |
| if (!(sdp->flag & CPU_DESC_FLAG_READABLE)) { | if (!(sdp->flag & CPU_DESC_FLAG_READABLE)) { |
| cpu_memoryread_check(sdp, offset, 10, | cpu_memoryread_check(sdp, offset, 10, CHOOSE_EXCEPTION(idx)); |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { |
| if (!check_limit_upstairs(sdp, offset, 10)) | if (!check_limit_upstairs(sdp, offset, 10, SEG_IS_32BIT(sdp))) |
| goto range_failure; | goto range_failure; |
| } | } |
| addr = sdp->u.seg.segbase + offset; | return cpu_lmemoryread_f(addr, CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); |
| check_memory_break_point(addr, 10, CPU_DR7_RW_RO); | |
| if (!CPU_STAT_PAGING) | |
| return cpu_memoryread_f(addr); | |
| return cpu_linear_memory_read_f(addr, CPU_PAGE_READ_DATA | CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); | |
| range_failure: | range_failure: |
| VERBOSE(("cpu_vmemoryread_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | VERBOSE(("cpu_vmemoryread_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | exc = CHOOSE_EXCEPTION(idx); |
| err: | err: |
| EXCEPTION(exc, 0); | EXCEPTION(exc, 0); |
| { | { |
| Line 689 cpu_vmemorywrite_f(int idx, UINT32 offse | Line 534 cpu_vmemorywrite_f(int idx, UINT32 offse |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); |
| sdp = &CPU_STAT_SREG(idx); | sdp = &CPU_STAT_SREG(idx); |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PM) { | |
| cpu_memorywrite_f(addr, value); | |
| return; | |
| } | |
| if (!SEG_IS_VALID(sdp)) { | if (!SEG_IS_VALID(sdp)) { |
| exc = GP_EXCEPTION; | exc = GP_EXCEPTION; |
| goto err; | goto err; |
| } | } |
| if (!(sdp->flag & CPU_DESC_FLAG_WRITABLE)) { | if (!(sdp->flag & CPU_DESC_FLAG_WRITABLE)) { |
| cpu_memorywrite_check(sdp, offset, 10, | cpu_memorywrite_check(sdp, offset, 10, CHOOSE_EXCEPTION(idx)); |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { |
| if (!check_limit_upstairs(sdp, offset, 10)) | if (!check_limit_upstairs(sdp, offset, 10, SEG_IS_32BIT(sdp))) |
| goto range_failure; | goto range_failure; |
| } | } |
| addr = sdp->u.seg.segbase + offset; | cpu_lmemorywrite_f(addr, value, CPU_PAGE_WRITE_DATA | CPU_STAT_USER_MODE); |
| check_memory_break_point(addr, 10, CPU_DR7_RW_RW); | |
| if (!CPU_STAT_PAGING) { | |
| cpu_memorywrite_f(addr, value); | |
| } else { | |
| cpu_linear_memory_write_f(addr, value, CPU_PAGE_WRITE_DATA | CPU_STAT_USER_MODE); | |
| } | |
| return; | return; |
| range_failure: | range_failure: |
| VERBOSE(("cpu_vmemorywrite_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | VERBOSE(("cpu_vmemorywrite_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | exc = CHOOSE_EXCEPTION(idx); |
| err: | err: |
| EXCEPTION(exc, 0); | EXCEPTION(exc, 0); |
| } | } |