|
|
| version 1.15, 2004/03/24 14:03:52 | version 1.24, 2011/01/15 18:36:12 |
|---|---|
| Line 1 | Line 1 |
| /* $Id$ */ | |
| /* | /* |
| * Copyright (c) 2003 NONAKA Kimihiro | * Copyright (c) 2003 NONAKA Kimihiro |
| * All rights reserved. | * All rights reserved. |
| Line 12 | Line 10 |
| * 2. Redistributions in binary form must reproduce the above copyright | * 2. Redistributions in binary form must reproduce the above copyright |
| * notice, this list of conditions and the following disclaimer in the | * notice, this list of conditions and the following disclaimer in the |
| * documentation and/or other materials provided with the distribution. | * documentation and/or other materials provided with the distribution. |
| * 3. The name of the author may not be used to endorse or promote products | |
| * derived from this software without specific prior written permission. | |
| * | * |
| * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR |
| * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES | * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES |
| Line 128 extern "C" { | Line 124 extern "C" { |
| /* | /* |
| * linear address memory access function | * linear address memory access function |
| */ | */ |
| #if defined(IA32_PAGING_EACHSIZE) | void MEMCALL cpu_memory_access_la_region(UINT32 address, UINT length, const int ucrw, UINT8 *data); |
| UINT8 MEMCALL cpu_memory_access_la_RMW_b(UINT32 laddr, UINT32 (*func)(UINT32, void *), void *arg); | UINT32 MEMCALL laddr2paddr(const UINT32 laddr, const int ucrw); |
| UINT16 MEMCALL cpu_memory_access_la_RMW_w(UINT32 laddr, UINT32 (*func)(UINT32, void *), void *arg); | #define laddr_to_paddr(laddr, ucrw) \ |
| UINT32 MEMCALL cpu_memory_access_la_RMW_d(UINT32 laddr, UINT32 (*func)(UINT32, void *), void *arg); | (!CPU_STAT_PAGING) ? (laddr) : (laddr2paddr((laddr), (ucrw))) |
| UINT8 MEMCALL cpu_linear_memory_read_b(UINT32 laddr, const int crw, const int user_mode); | |
| UINT16 MEMCALL cpu_linear_memory_read_w(UINT32 laddr, const int crw, const int user_mode); | |
| UINT32 MEMCALL cpu_linear_memory_read_d(UINT32 laddr, const int crw, const int user_mode); | |
| void MEMCALL cpu_linear_memory_write_b(UINT32 laddr, UINT8 value, const int user_mode); | |
| void MEMCALL cpu_linear_memory_write_w(UINT32 laddr, UINT16 value, const int user_mode); | |
| void MEMCALL cpu_linear_memory_write_d(UINT32 laddr, UINT32 value, const int user_mode); | |
| #else /* !IA32_PAGING_EACHSIZE */ | |
| UINT32 MEMCALL cpu_memory_access_la_RMW(UINT32 laddr, UINT length, const int user_mode, UINT32 (*func)(UINT32, void *), void *arg); | |
| UINT32 MEMCALL cpu_linear_memory_read(UINT32 address, UINT length, const int crw, const int user_mode); | |
| void MEMCALL cpu_linear_memory_write(UINT32 address, UINT32 value, UINT length, const int user_mode); | |
| #endif /* IA32_PAGING_EACHSIZE */ | |
| void MEMCALL cpu_memory_access_la_region(UINT32 address, UINT length, const int crw, const int user_mode, BYTE *data); | |
| void MEMCALL paging_check(UINT32 laddr, UINT length, const int crw, const int user_mode); | |
| /* crw */ | /* ucrw */ |
| #define CPU_PAGE_READ (0 << 0) | |
| #define CPU_PAGE_WRITE (1 << 0) | #define CPU_PAGE_WRITE (1 << 0) |
| #define CPU_PAGE_CODE (1 << 1) | #define CPU_PAGE_CODE (1 << 1) |
| #define CPU_PAGE_DATA (1 << 2) | #define CPU_PAGE_DATA (1 << 2) |
| #define CPU_PAGE_READ_CODE (CPU_PAGE_READ|CPU_PAGE_CODE) | #define CPU_PAGE_USER_MODE (1 << 3) /* == CPU_MODE_USER */ |
| #define CPU_PAGE_READ_DATA (CPU_PAGE_READ|CPU_PAGE_DATA) | #define CPU_PAGE_READ_CODE (CPU_PAGE_CODE) |
| #define CPU_PAGE_READ_DATA (CPU_PAGE_DATA) | |
| #define CPU_PAGE_WRITE_DATA (CPU_PAGE_WRITE|CPU_PAGE_DATA) | #define CPU_PAGE_WRITE_DATA (CPU_PAGE_WRITE|CPU_PAGE_DATA) |
| #if defined(IA32_PAGING_EACHSIZE) | UINT8 MEMCALL cpu_memory_access_la_RMW_b(UINT32 laddr, UINT32 (*func)(UINT32, void *), void *arg); |
| UINT16 MEMCALL cpu_memory_access_la_RMW_w(UINT32 laddr, UINT32 (*func)(UINT32, void *), void *arg); | |
| #define cpu_lmemoryread(a,pl) \ | UINT32 MEMCALL cpu_memory_access_la_RMW_d(UINT32 laddr, UINT32 (*func)(UINT32, void *), void *arg); |
| (!CPU_STAT_PAGING) ? \ | UINT8 MEMCALL cpu_linear_memory_read_b(UINT32 laddr, const int ucrw); |
| cpu_memoryread(a) : cpu_linear_memory_read_b(a,CPU_PAGE_READ_DATA,pl) | UINT16 MEMCALL cpu_linear_memory_read_w(UINT32 laddr, const int ucrw); |
| #define cpu_lmemoryread_b(a,pl) cpu_lmemoryread(a,pl) | UINT32 MEMCALL cpu_linear_memory_read_d(UINT32 laddr, const int ucrw); |
| #define cpu_lmemoryread_w(a,pl) \ | UINT64 MEMCALL cpu_linear_memory_read_q(UINT32 laddr, const int ucrw); |
| (!CPU_STAT_PAGING) ? \ | REG80 MEMCALL cpu_linear_memory_read_f(UINT32 laddr, const int ucrw); |
| cpu_memoryread_w(a) : cpu_linear_memory_read_w(a,CPU_PAGE_READ_DATA,pl) | void MEMCALL cpu_linear_memory_write_b(UINT32 laddr, UINT8 value, const int user_mode); |
| #define cpu_lmemoryread_d(a,pl) \ | void MEMCALL cpu_linear_memory_write_w(UINT32 laddr, UINT16 value, const int user_mode); |
| (!CPU_STAT_PAGING) ? \ | void MEMCALL cpu_linear_memory_write_d(UINT32 laddr, UINT32 value, const int user_mode); |
| cpu_memoryread_d(a) : cpu_linear_memory_read_d(a,CPU_PAGE_READ_DATA,pl) | void MEMCALL cpu_linear_memory_write_q(UINT32 laddr, UINT64 value, const int user_mode); |
| void MEMCALL cpu_linear_memory_write_f(UINT32 laddr, const REG80 *value, const int user_mode); | |
| #define cpu_lmemorywrite(a,v,pl) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memorywrite(a,v) : cpu_linear_memory_write_b(a,v,pl) | |
| #define cpu_lmemorywrite_b(a,v,pl) cpu_lmemorywrite(a,v,pl) | |
| #define cpu_lmemorywrite_w(a,v,pl) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memorywrite_w(a,v) : cpu_linear_memory_write_w(a,v,pl) | |
| #define cpu_lmemorywrite_d(a,v,pl) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memorywrite_d(a,v) : cpu_linear_memory_write_d(a,v,pl) | |
| /* code */ | |
| #define cpu_lcmemoryread(a) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memoryread(a) : \ | |
| cpu_linear_memory_read_b(a,CPU_PAGE_READ_CODE,CPU_STAT_USER_MODE) | |
| #define cpu_lcmemoryread_w(a) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memoryread_w(a) : \ | |
| cpu_linear_memory_read_w(a,CPU_PAGE_READ_CODE,CPU_STAT_USER_MODE) | |
| #define cpu_lcmemoryread_d(a) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memoryread_d(a) : \ | |
| cpu_linear_memory_read_d(a,CPU_PAGE_READ_CODE,CPU_STAT_USER_MODE) | |
| #else /* !IA32_PAGING_EACHSIZE */ | |
| #define cpu_memory_access_la_RMW_b(l,f,a) \ | |
| cpu_memory_access_la_RMW(l,1,f,a) | |
| #define cpu_memory_access_la_RMW_w(l,f,a) \ | |
| cpu_memory_access_la_RMW(l,2,f,a) | |
| #define cpu_memory_access_la_RMW_d(l,f,a) \ | |
| cpu_memory_access_la_RMW(l,4,f,a) | |
| #define cpu_lmemoryread(a,pl) \ | #define cpu_lmemoryread(a,pl) \ |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memoryread(a) : \ | cpu_memoryread(a) : \ |
| (UINT8)cpu_linear_memory_read(a,1,CPU_PAGE_READ_DATA,pl) | cpu_linear_memory_read_b(a,CPU_PAGE_READ_DATA | (pl)) |
| #define cpu_lmemoryread_b(a,pl) cpu_lmemoryread(a,pl) | #define cpu_lmemoryread_b(a,pl) cpu_lmemoryread(a,pl) |
| #define cpu_lmemoryread_w(a,pl) \ | #define cpu_lmemoryread_w(a,pl) \ |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memoryread_w(a) : \ | cpu_memoryread_w(a) : \ |
| (UINT16)cpu_linear_memory_read(a,2,CPU_PAGE_READ_DATA,pl) | cpu_linear_memory_read_w(a,CPU_PAGE_READ_DATA | (pl)) |
| #define cpu_lmemoryread_d(a,pl) \ | #define cpu_lmemoryread_d(a,pl) \ |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memoryread_d(a) : \ | cpu_memoryread_d(a) : \ |
| cpu_linear_memory_read(a,4,CPU_PAGE_READ_DATA,pl) | cpu_linear_memory_read_d(a,CPU_PAGE_READ_DATA | (pl)) |
| #define cpu_lmemoryread_q(a,pl) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memoryread_q(a) : \ | |
| cpu_linear_memory_read_q(a,CPU_PAGE_READ_DATA | (pl)) | |
| #define cpu_lmemorywrite(a,v,pl) \ | #define cpu_lmemorywrite(a,v,pl) \ |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memorywrite(a,v) : \ | cpu_memorywrite(a,v) : cpu_linear_memory_write_b(a,v,pl) |
| cpu_linear_memory_write(a,v,1,pl) | |
| #define cpu_lmemorywrite_b(a,v,pl) cpu_lmemorywrite(a,v,pl) | #define cpu_lmemorywrite_b(a,v,pl) cpu_lmemorywrite(a,v,pl) |
| #define cpu_lmemorywrite_w(a,v,pl) \ | #define cpu_lmemorywrite_w(a,v,pl) \ |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memorywrite_w(a,v) : \ | cpu_memorywrite_w(a,v) : cpu_linear_memory_write_w(a,v,pl) |
| cpu_linear_memory_write(a,v,2,pl) | |
| #define cpu_lmemorywrite_d(a,v,pl) \ | #define cpu_lmemorywrite_d(a,v,pl) \ |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memorywrite_d(a,v) : \ | cpu_memorywrite_d(a,v) : cpu_linear_memory_write_d(a,v,pl) |
| cpu_linear_memory_write(a,v,4,pl) | #define cpu_lmemorywrite_q(a,v,pl) \ |
| /* code */ | |
| #define cpu_lcmemoryread(a) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memoryread(a) : \ | |
| (UINT8)cpu_linear_memory_read(a,1,CPU_PAGE_READ_CODE,CPU_STAT_USER_MODE) | |
| #define cpu_lcmemoryread_w(a) \ | |
| (!CPU_STAT_PAGING) ? \ | |
| cpu_memoryread_w(a) : \ | |
| (UINT16)cpu_linear_memory_read(a,2,CPU_PAGE_READ_CODE,CPU_STAT_USER_MODE) | |
| #define cpu_lcmemoryread_d(a) \ | |
| (!CPU_STAT_PAGING) ? \ | (!CPU_STAT_PAGING) ? \ |
| cpu_memoryread_d(a) : \ | cpu_memorywrite_q(a,v) : cpu_linear_memory_write_q(a,v,pl) |
| cpu_linear_memory_read(a,4,CPU_PAGE_READ_CODE,CPU_STAT_USER_MODE) | |
| #endif /* IA32_PAGING_EACHSIZE */ | |
| /* | /* |
| * linear address memory access with superviser mode | * linear address memory access with superviser mode |
| Line 258 void MEMCALL paging_check(UINT32 laddr, | Line 197 void MEMCALL paging_check(UINT32 laddr, |
| /* | /* |
| * CR3 (Page Directory Entry base physical address) | * TLB function |
| */ | */ |
| #define set_CR3(cr3) \ | typedef struct { |
| do { \ | UINT32 tag; /* linear address */ |
| VERBOSE(("set_CR3: old = %08x, new = 0x%08x", CPU_CR3, (cr3) & CPU_CR3_MASK)); \ | #define TLB_ENTRY_TAG_VALID (1 << 0) |
| CPU_CR3 = (cr3) & CPU_CR3_MASK; \ | /* pde & pte & CPU_PTE_WRITABLE (1 << 1) */ |
| CPU_STAT_PDE_BASE = CPU_CR3 & CPU_CR3_PD_MASK; \ | /* pde & pte & CPU_PTE_USER_MODE (1 << 2) */ |
| tlb_flush(FALSE); \ | #define TLB_ENTRY_TAG_DIRTY CPU_PTE_DIRTY /* (1 << 6) */ |
| } while (/*CONSTCOND*/ 0) | #define TLB_ENTRY_TAG_GLOBAL CPU_PTE_GLOBAL_PAGE /* (1 << 8) */ |
| #define TLB_ENTRY_TAG_MAX_SHIFT 12 | |
| UINT32 paddr; /* physical address */ | |
| UINT8 *memp; /* shortcut for pre-fetch queue */ | |
| } TLB_ENTRY_T; | |
| /* | |
| * TLB function | |
| */ | |
| #if defined(IA32_SUPPORT_TLB) | #if defined(IA32_SUPPORT_TLB) |
| void tlb_init(void); | void tlb_init(void); |
| void tlb_flush(BOOL allflush); | void MEMCALL tlb_flush(BOOL allflush); |
| void tlb_flush_page(UINT32 laddr); | void MEMCALL tlb_flush_page(UINT32 laddr); |
| TLB_ENTRY_T* MEMCALL tlb_lookup(const UINT32 laddr, const int ucrw); | |
| #else | #else |
| #define tlb_init() | #define tlb_init() |
| #define tlb_flush(allflush) (void)(allflush) | #define tlb_flush(allflush) |
| #define tlb_flush_page(laddr) (void)(laddr) | #define tlb_flush_page(la) |
| #define tlb_lookup(la, ucrw) NULL | |
| #endif | #endif |
| #ifdef __cplusplus | #ifdef __cplusplus |