|
|
| version 1.24, 2011/12/16 09:05:42 | version 1.33, 2012/01/08 19:09:40 |
|---|---|
| Line 64 check_limit_upstairs(descriptor_t *sdp, | Line 64 check_limit_upstairs(descriptor_t *sdp, |
| if (!SEG_IS_32BIT(sdp)) { | if (!SEG_IS_32BIT(sdp)) { |
| if ((len > limit) /* len check */ | if ((len > limit) /* len check */ |
| || (end > limit)) { /* [1] */ | || (end > limit)) { /* [1] */ |
| return 0; | goto exc; |
| } | } |
| } else { | } else { |
| sdp->flag |= CPU_DESC_FLAG_WHOLEADR; | sdp->flag |= CPU_DESC_FLAG_WHOLEADR; |
| Line 88 check_limit_upstairs(descriptor_t *sdp, | Line 88 check_limit_upstairs(descriptor_t *sdp, |
| || (end < offset) /* wrap check */ | || (end < offset) /* wrap check */ |
| || (offset < sdp->u.seg.limit) /* [1] */ | || (offset < sdp->u.seg.limit) /* [1] */ |
| || (end > limit)) { /* [2] */ | || (end > limit)) { /* [2] */ |
| return 0; | goto exc; |
| } | } |
| } | } |
| } else { | } else { |
| Line 107 check_limit_upstairs(descriptor_t *sdp, | Line 107 check_limit_upstairs(descriptor_t *sdp, |
| if (!SEG_IS_32BIT(sdp)) { | if (!SEG_IS_32BIT(sdp)) { |
| if ((len > limit) /* len check */ | if ((len > limit) /* len check */ |
| || (offset + len > limit)) { /* [1] */ | || (offset + len > limit)) { /* [1] */ |
| return 0; | goto exc; |
| } | } |
| } else { | } else { |
| sdp->flag |= CPU_DESC_FLAG_WHOLEADR; | sdp->flag |= CPU_DESC_FLAG_WHOLEADR; |
| Line 127 check_limit_upstairs(descriptor_t *sdp, | Line 127 check_limit_upstairs(descriptor_t *sdp, |
| */ | */ |
| if ((len > sdp->u.seg.limit) /* len check */ | if ((len > sdp->u.seg.limit) /* len check */ |
| || (end < offset) /* wrap check */ | || (end < offset) /* wrap check */ |
| || (end > sdp->u.seg.limit)) { /* [1] */ | || (end > sdp->u.seg.limit + 1)) { /* [1] */ |
| return 0; | goto exc; |
| } | } |
| } | } |
| } | } |
| return 1; /* Ok! */ | return 1; /* Ok! */ |
| exc: | |
| VERBOSE(("check_limit_upstairs: check failure: offset = 0x%08x, len = %d", offset, len + 1)); | |
| #if defined(DEBUG) | |
| segdesc_dump(sdp); | |
| #endif | |
| return 0; | |
| } | } |
| static void MEMCALL | static void MEMCALL |
| Line 170 cpu_memoryread_check(descriptor_t *sdp, | Line 177 cpu_memoryread_check(descriptor_t *sdp, |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_memoryread_check: check failure.")); | VERBOSE(("cpu_memoryread_check: check failure: offset = 0x%08x, len = %d", offset, len)); |
| VERBOSE(("offset = 0x%08x, len = %d", offset, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| Line 210 cpu_memorywrite_check(descriptor_t *sdp, | Line 216 cpu_memorywrite_check(descriptor_t *sdp, |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_memorywrite_check: check failure.")); | VERBOSE(("cpu_memorywrite_check: check failure: offset = 0x%08x, len = %d", offset, len)); |
| VERBOSE(("offset = 0x%08x, len = %d", offset, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| EXCEPTION(e, 0); | EXCEPTION(e, 0); |
| } | } |
| void | void MEMCALL |
| cpu_stack_push_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len) | cpu_stack_push_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len) |
| { | { |
| UINT32 limit; | UINT32 limit; |
| Line 227 cpu_stack_push_check(UINT16 s, descripto | Line 232 cpu_stack_push_check(UINT16 s, descripto |
| __ASSERT(sdp != NULL); | __ASSERT(sdp != NULL); |
| __ASSERT(len > 0); | __ASSERT(len > 0); |
| len--; | |
| if (!SEG_IS_VALID(sdp) | if (!SEG_IS_VALID(sdp) |
| || !SEG_IS_PRESENT(sdp) | || !SEG_IS_PRESENT(sdp) |
| || SEG_IS_SYSTEM(sdp) | || SEG_IS_SYSTEM(sdp) |
| Line 235 cpu_stack_push_check(UINT16 s, descripto | Line 242 cpu_stack_push_check(UINT16 s, descripto |
| goto exc; | goto exc; |
| } | } |
| len--; | |
| start = sp - len; | start = sp - len; |
| limit = SEG_IS_32BIT(sdp) ? 0xffffffff : 0x0000ffff; | limit = SEG_IS_32BIT(sdp) ? 0xffffffff : 0x0000ffff; |
| Line 320 cpu_stack_push_check(UINT16 s, descripto | Line 326 cpu_stack_push_check(UINT16 s, descripto |
| */ | */ |
| if ((len > sdp->u.seg.limit) /* len check */ | if ((len > sdp->u.seg.limit) /* len check */ |
| || (start > sp) /* wrap check */ | || (start > sp) /* wrap check */ |
| || (sp > sdp->u.seg.limit)) { /* [1] */ | || (sp > sdp->u.seg.limit + 1)) { /* [1] */ |
| goto exc; | goto exc; |
| } | } |
| } | } |
| Line 328 cpu_stack_push_check(UINT16 s, descripto | Line 334 cpu_stack_push_check(UINT16 s, descripto |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_stack_push_check: check failure.")); | VERBOSE(("cpu_stack_push_check: check failure: selector = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); |
| VERBOSE(("s = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| EXCEPTION(SS_EXCEPTION, s & 0xfffc); | EXCEPTION(SS_EXCEPTION, s & 0xfffc); |
| } | } |
| void | void MEMCALL |
| cpu_stack_pop_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len) | cpu_stack_pop_check(UINT16 s, descriptor_t *sdp, UINT32 sp, UINT len) |
| { | { |
| Line 356 cpu_stack_pop_check(UINT16 s, descriptor | Line 361 cpu_stack_pop_check(UINT16 s, descriptor |
| return; | return; |
| exc: | exc: |
| VERBOSE(("cpu_stack_pop_check: check failure.")); | VERBOSE(("cpu_stack_pop_check: check failure: selector = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); |
| VERBOSE(("s = 0x%04x, sp = 0x%08x, len = %d", s, sp, len)); | |
| #if defined(DEBUG) | #if defined(DEBUG) |
| segdesc_dump(sdp); | segdesc_dump(sdp); |
| #endif | #endif |
| EXCEPTION(SS_EXCEPTION, s & 0xfffc); | EXCEPTION(SS_EXCEPTION, s & 0xfffc); |
| } | } |
| #if defined(IA32_SUPPORT_DEBUG_REGISTER) | |
| static INLINE void | |
| check_memory_break_point(UINT32 address, UINT length, UINT rw) | |
| { | |
| int i; | |
| if (CPU_STAT_BP && !(CPU_EFLAG & RF_FLAG)) { | |
| for (i = 0; i < CPU_DEBUG_REG_INDEX_NUM; i++) { | |
| if ((CPU_STAT_BP & (1 << i)) | |
| && (CPU_DR7_GET_RW(i) & rw) | |
| && ((address <= CPU_DR(i) && address + length > CPU_DR(i)) | |
| || (address > CPU_DR(i) && address < CPU_DR(i) + CPU_DR7_GET_LEN(i)))) { | |
| CPU_STAT_BP_EVENT |= CPU_STAT_BP_EVENT_B(i); | |
| } | |
| } | |
| } | |
| } | |
| #else | |
| #define check_memory_break_point(address, length, rw) | |
| #endif | |
| /* | /* |
| * code fetch | * code fetch |
| */ | */ |
| #define ucrw (CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE) | |
| UINT8 MEMCALL | UINT8 MEMCALL |
| cpu_codefetch(UINT32 offset) | cpu_codefetch(UINT32 offset) |
| { | { |
| const int ucrw = CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE; | |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| TLB_ENTRY_T *ep; | |
| sdp = &CPU_CS_DESC; | sdp = &CPU_CS_DESC; |
| if (offset <= sdp->u.seg.limit) { | addr = sdp->u.seg.segbase + offset; |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PAGING) | if (!CPU_STAT_PM) |
| return cpu_memoryread(addr); | return cpu_memoryread(addr); |
| ep = tlb_lookup(addr, ucrw); | if (offset <= sdp->u.seg.limit) |
| if (ep != NULL && ep->memp != NULL) { | return cpu_lmemoryread(addr, ucrw); |
| return ep->memp[addr & 0xfff]; | |
| } | |
| return cpu_linear_memory_read_b(addr, ucrw); | |
| } | |
| EXCEPTION(GP_EXCEPTION, 0); | EXCEPTION(GP_EXCEPTION, 0); |
| return 0; /* compiler happy */ | return 0; /* compiler happy */ |
| } | } |
| Line 417 cpu_codefetch(UINT32 offset) | Line 394 cpu_codefetch(UINT32 offset) |
| UINT16 MEMCALL | UINT16 MEMCALL |
| cpu_codefetch_w(UINT32 offset) | cpu_codefetch_w(UINT32 offset) |
| { | { |
| const int ucrw = CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE; | |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| TLB_ENTRY_T *ep; | |
| UINT16 value; | |
| sdp = &CPU_CS_DESC; | sdp = &CPU_CS_DESC; |
| if (offset <= sdp->u.seg.limit - 1) { | addr = sdp->u.seg.segbase + offset; |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PAGING) | if (!CPU_STAT_PM) |
| return cpu_memoryread_w(addr); | return cpu_memoryread_w(addr); |
| ep = tlb_lookup(addr, ucrw); | if (offset <= sdp->u.seg.limit - 1) |
| if (ep != NULL && ep->memp != NULL) { | return cpu_lmemoryread_w(addr, ucrw); |
| if ((addr + 1) & 0x00000fff) { | |
| return LOADINTELWORD(ep->memp + (addr & 0xfff)); | |
| } | |
| value = ep->memp[0xfff]; | |
| ep = tlb_lookup(addr + 1, ucrw); | |
| if (ep != NULL && ep->memp != NULL) { | |
| value += (UINT16)ep->memp[0] << 8; | |
| return value; | |
| } | |
| } | |
| return cpu_linear_memory_read_w(addr, ucrw); | |
| } | |
| EXCEPTION(GP_EXCEPTION, 0); | EXCEPTION(GP_EXCEPTION, 0); |
| return 0; /* compiler happy */ | return 0; /* compiler happy */ |
| } | } |
| Line 448 cpu_codefetch_w(UINT32 offset) | Line 413 cpu_codefetch_w(UINT32 offset) |
| UINT32 MEMCALL | UINT32 MEMCALL |
| cpu_codefetch_d(UINT32 offset) | cpu_codefetch_d(UINT32 offset) |
| { | { |
| const int ucrw = CPU_PAGE_READ_CODE | CPU_STAT_USER_MODE; | |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| TLB_ENTRY_T *ep[2]; | |
| UINT32 value; | |
| UINT remain; | |
| sdp = &CPU_CS_DESC; | sdp = &CPU_CS_DESC; |
| if (offset <= sdp->u.seg.limit - 3) { | addr = sdp->u.seg.segbase + offset; |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PAGING) | if (!CPU_STAT_PM) |
| return cpu_memoryread_d(addr); | return cpu_memoryread_d(addr); |
| ep[0] = tlb_lookup(addr, ucrw); | |
| if (ep[0] != NULL && ep[0]->memp != NULL) { | if (offset <= sdp->u.seg.limit - 3) |
| remain = 0x1000 - (addr & 0xfff); | return cpu_lmemoryread_d(addr, ucrw); |
| if (remain >= 4) { | |
| return LOADINTELDWORD(ep[0]->memp + (addr & 0xfff)); | |
| } | |
| ep[1] = tlb_lookup(addr + remain, ucrw); | |
| if (ep[1] != NULL && ep[1]->memp != NULL) { | |
| switch (remain) { | |
| case 3: | |
| value = ep[0]->memp[0xffd]; | |
| value += (UINT32)LOADINTELWORD(ep[0]->memp + 0xffe) << 8; | |
| value += (UINT32)ep[1]->memp[0] << 24; | |
| break; | |
| case 2: | |
| value = LOADINTELWORD(ep[0]->memp + 0xffe); | |
| value += (UINT32)LOADINTELWORD(ep[1]->memp + 0) << 16; | |
| break; | |
| case 1: | |
| value = ep[0]->memp[0xfff]; | |
| value += (UINT32)LOADINTELWORD(ep[1]->memp + 0) << 8; | |
| value += (UINT32)ep[1]->memp[2] << 24; | |
| break; | |
| default: | |
| ia32_panic("cpu_codefetch_d(): out of range. (remain = %d)\n", remain); | |
| return (UINT32)-1; | |
| } | |
| return value; | |
| } | |
| } | |
| return cpu_linear_memory_read_d(addr, ucrw); | |
| } | |
| EXCEPTION(GP_EXCEPTION, 0); | EXCEPTION(GP_EXCEPTION, 0); |
| return 0; /* compiler happy */ | return 0; /* compiler happy */ |
| } | } |
| #undef ucrw | |
| /* | /* |
| * additional physical address memory access functions | * additional physical address memory access functions |
| */ | */ |
| Line 547 cpu_memorywrite_f(UINT32 paddr, const RE | Line 477 cpu_memorywrite_f(UINT32 paddr, const RE |
| /* | /* |
| * virtual address memory access functions | * virtual address memory access functions |
| */ | */ |
| #include "cpu_mem.mcr" | #define CHOOSE_EXCEPTION(sreg) \ |
| (((sreg) == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION) | |
| VIRTUAL_ADDRESS_MEMORY_ACCESS_FUNCTION(b, UINT8, 1) | |
| VIRTUAL_ADDRESS_MEMORY_ACCESS_FUNCTION(w, UINT16, 2) | |
| VIRTUAL_ADDRESS_MEMORY_ACCESS_FUNCTION(d, UINT32, 4) | |
| UINT64 MEMCALL | |
| cpu_vmemoryread_q(int idx, UINT32 offset) | |
| { | |
| descriptor_t *sdp; | |
| UINT32 addr; | |
| int exc; | |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | |
| sdp = &CPU_STAT_SREG(idx); | |
| if (!SEG_IS_VALID(sdp)) { | |
| exc = GP_EXCEPTION; | |
| goto err; | |
| } | |
| if (!(sdp->flag & CPU_DESC_FLAG_READABLE)) { | #include "cpu_mem.mcr" |
| cpu_memoryread_check(sdp, offset, 8, | |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | |
| if (!check_limit_upstairs(sdp, offset, 8)) | |
| goto range_failure; | |
| } | |
| addr = sdp->u.seg.segbase + offset; | |
| check_memory_break_point(addr, 8, CPU_DR7_RW_RO); | |
| if (!CPU_STAT_PAGING) | |
| return cpu_memoryread_q(addr); | |
| return cpu_linear_memory_read_q(addr, CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); | |
| range_failure: | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(b, UINT8, 1) |
| VERBOSE(("cpu_vmemoryread_q: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | DECLARE_VIRTUAL_ADDRESS_MEMORY_RMW_FUNCTIONS(b, UINT8, 1) |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(w, UINT16, 2) |
| err: | DECLARE_VIRTUAL_ADDRESS_MEMORY_RMW_FUNCTIONS(w, UINT16, 2) |
| EXCEPTION(exc, 0); | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(d, UINT32, 4) |
| return 0; /* compiler happy */ | DECLARE_VIRTUAL_ADDRESS_MEMORY_RMW_FUNCTIONS(d, UINT32, 4) |
| } | DECLARE_VIRTUAL_ADDRESS_MEMORY_RW_FUNCTIONS(q, UINT64, 8) |
| void MEMCALL | REG80 MEMCALL |
| cpu_vmemorywrite_q(int idx, UINT32 offset, UINT64 value) | cpu_vmemoryread_f(int idx, UINT32 offset) |
| { | { |
| descriptor_t *sdp; | descriptor_t *sdp; |
| UINT32 addr; | UINT32 addr; |
| Line 599 cpu_vmemorywrite_q(int idx, UINT32 offse | Line 500 cpu_vmemorywrite_q(int idx, UINT32 offse |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); |
| sdp = &CPU_STAT_SREG(idx); | sdp = &CPU_STAT_SREG(idx); |
| if (!SEG_IS_VALID(sdp)) { | |
| exc = GP_EXCEPTION; | |
| goto err; | |
| } | |
| if (!(sdp->flag & CPU_DESC_FLAG_WRITABLE)) { | |
| cpu_memorywrite_check(sdp, offset, 8, | |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | |
| if (!check_limit_upstairs(sdp, offset, 8)) | |
| goto range_failure; | |
| } | |
| addr = sdp->u.seg.segbase + offset; | addr = sdp->u.seg.segbase + offset; |
| check_memory_break_point(addr, 8, CPU_DR7_RW_RW); | |
| if (!CPU_STAT_PAGING) { | |
| cpu_memorywrite_q(addr, value); | |
| } else { | |
| cpu_linear_memory_write_q(addr, value, CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); | |
| } | |
| return; | |
| range_failure: | |
| VERBOSE(("cpu_vmemorywrite_q: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | |
| err: | |
| EXCEPTION(exc, 0); | |
| } | |
| REG80 MEMCALL | if (!CPU_STAT_PM) |
| cpu_vmemoryread_f(int idx, UINT32 offset) | return cpu_memoryread_f(addr); |
| { | |
| descriptor_t *sdp; | |
| UINT32 addr; | |
| int exc; | |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | |
| sdp = &CPU_STAT_SREG(idx); | |
| if (!SEG_IS_VALID(sdp)) { | if (!SEG_IS_VALID(sdp)) { |
| exc = GP_EXCEPTION; | exc = GP_EXCEPTION; |
| goto err; | goto err; |
| } | } |
| if (!(sdp->flag & CPU_DESC_FLAG_READABLE)) { | if (!(sdp->flag & CPU_DESC_FLAG_READABLE)) { |
| cpu_memoryread_check(sdp, offset, 10, | cpu_memoryread_check(sdp, offset, 10, CHOOSE_EXCEPTION(idx)); |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { |
| if (!check_limit_upstairs(sdp, offset, 10)) | if (!check_limit_upstairs(sdp, offset, 10)) |
| goto range_failure; | goto range_failure; |
| } | } |
| addr = sdp->u.seg.segbase + offset; | return cpu_lmemoryread_f(addr, CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); |
| check_memory_break_point(addr, 10, CPU_DR7_RW_RO); | |
| if (!CPU_STAT_PAGING) | |
| return cpu_memoryread_f(addr); | |
| return cpu_linear_memory_read_f(addr, CPU_PAGE_READ_DATA | CPU_PAGE_READ_DATA | CPU_STAT_USER_MODE); | |
| range_failure: | range_failure: |
| VERBOSE(("cpu_vmemoryread_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | VERBOSE(("cpu_vmemoryread_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | exc = CHOOSE_EXCEPTION(idx); |
| err: | err: |
| EXCEPTION(exc, 0); | EXCEPTION(exc, 0); |
| { | { |
| Line 677 cpu_vmemorywrite_f(int idx, UINT32 offse | Line 539 cpu_vmemorywrite_f(int idx, UINT32 offse |
| __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); | __ASSERT((unsigned int)idx < CPU_SEGREG_NUM); |
| sdp = &CPU_STAT_SREG(idx); | sdp = &CPU_STAT_SREG(idx); |
| addr = sdp->u.seg.segbase + offset; | |
| if (!CPU_STAT_PM) { | |
| cpu_memorywrite_f(addr, value); | |
| return; | |
| } | |
| if (!SEG_IS_VALID(sdp)) { | if (!SEG_IS_VALID(sdp)) { |
| exc = GP_EXCEPTION; | exc = GP_EXCEPTION; |
| goto err; | goto err; |
| } | } |
| if (!(sdp->flag & CPU_DESC_FLAG_WRITABLE)) { | if (!(sdp->flag & CPU_DESC_FLAG_WRITABLE)) { |
| cpu_memorywrite_check(sdp, offset, 10, | cpu_memorywrite_check(sdp, offset, 10, CHOOSE_EXCEPTION(idx)); |
| (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION); | |
| } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { | } else if (!(sdp->flag & CPU_DESC_FLAG_WHOLEADR)) { |
| if (!check_limit_upstairs(sdp, offset, 10)) | if (!check_limit_upstairs(sdp, offset, 10)) |
| goto range_failure; | goto range_failure; |
| } | } |
| addr = sdp->u.seg.segbase + offset; | cpu_lmemorywrite_f(addr, value, CPU_PAGE_WRITE_DATA | CPU_STAT_USER_MODE); |
| check_memory_break_point(addr, 10, CPU_DR7_RW_RW); | |
| if (!CPU_STAT_PAGING) { | |
| cpu_memorywrite_f(addr, value); | |
| } else { | |
| cpu_linear_memory_write_f(addr, value, CPU_PAGE_WRITE_DATA | CPU_STAT_USER_MODE); | |
| } | |
| return; | return; |
| range_failure: | range_failure: |
| VERBOSE(("cpu_vmemorywrite_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); | VERBOSE(("cpu_vmemorywrite_f: type = %d, offset = %08x, limit = %08x", sdp->type, offset, sdp->u.seg.limit)); |
| exc = (idx == CPU_SS_INDEX) ? SS_EXCEPTION : GP_EXCEPTION; | exc = CHOOSE_EXCEPTION(idx); |
| err: | err: |
| EXCEPTION(exc, 0); | EXCEPTION(exc, 0); |
| } | } |