| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: mipsX_subr.S,v 1.26.36.1.2.51 2011/12/13 07:14:51 matt Exp $ */ | | 1 | /* $NetBSD: mipsX_subr.S,v 1.26.36.1.2.52 2011/12/23 23:40:00 matt Exp $ */ |
2 | | | 2 | |
3 | /* | | 3 | /* |
4 | * Copyright 2002 Wasabi Systems, Inc. | | 4 | * Copyright 2002 Wasabi Systems, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * Written by Simon Burge for Wasabi Systems, Inc. | | 7 | * Written by Simon Burge for Wasabi Systems, Inc. |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -1528,57 +1528,64 @@ END(MIPSX(systemcall)) | | | @@ -1528,57 +1528,64 @@ END(MIPSX(systemcall)) |
1528 | * from some types of errors but it is tricky. | | 1528 | * from some types of errors but it is tricky. |
1529 | */ | | 1529 | */ |
1530 | .p2align 5 | | 1530 | .p2align 5 |
1531 | NESTED_NOPROFILE(MIPSX(cache_exception), KERNFRAME_SIZ, ra) | | 1531 | NESTED_NOPROFILE(MIPSX(cache_exception), KERNFRAME_SIZ, ra) |
1532 | .set noat | | 1532 | .set noat |
1533 | .mask 0x80000000, -4 | | 1533 | .mask 0x80000000, -4 |
1534 | #ifdef sbmips /* XXX! SB-1 needs a real cache error handler */ | | 1534 | #ifdef sbmips /* XXX! SB-1 needs a real cache error handler */ |
1535 | eret | | 1535 | eret |
1536 | nop | | 1536 | nop |
1537 | #endif | | 1537 | #endif |
1538 | PTR_LA k0, panic # return to panic | | 1538 | PTR_LA k0, panic # return to panic |
1539 | PTR_LA a0, 9f # panicstr | | 1539 | PTR_LA a0, 9f # panicstr |
1540 | _MFC0 a1, MIPS_COP_0_ERROR_PC | | 1540 | _MFC0 a1, MIPS_COP_0_ERROR_PC |
1541 | #if (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1541 | #if (MIPS64_RMIXL) > 0 |
1542 | .set push | | | |
1543 | .set arch=xlr | | | |
1544 | li k1, 0x309 /* L1D_CACHE_ERROR_LOG */ | | 1542 | li k1, 0x309 /* L1D_CACHE_ERROR_LOG */ |
1545 | mfcr a2, k1 | | 1543 | mfcr a2, k1 |
1546 | li k1, 0x30b /* L1D_CACHE_INTERRUPT */ | | 1544 | li k1, 0x30b /* L1D_CACHE_INTERRUPT */ |
1547 | mfcr a3, k1 | | 1545 | mfcr a3, k1 |
1548 | .set pop | | 1546 | #if defined(__mips_o32) |
| | | 1547 | #error O32 not supported. |
| | | 1548 | #endif |
| | | 1549 | mfc0 a4, MIPS_COP_0_STATUS |
| | | 1550 | mfc0 a5, MIPS_COP_0_CAUSE |
| | | 1551 | #elif (MIPS64R2_RMIXL) > 0 |
| | | 1552 | li k1, 0x308 /* LSU_CERR_LOG0 */ |
| | | 1553 | mfcr a3, k1 |
| | | 1554 | li k1, 0x309 /* LSU_CERR_LOG1 */ |
| | | 1555 | mfcr a2, k1 |
1549 | #if defined(__mips_o32) | | 1556 | #if defined(__mips_o32) |
1550 | #error O32 not supported. | | 1557 | #error O32 not supported. |
1551 | #endif | | 1558 | #endif |
1552 | mfc0 a4, MIPS_COP_0_STATUS | | 1559 | mfc0 a4, MIPS_COP_0_STATUS |
1553 | mfc0 a5, MIPS_COP_0_CAUSE | | 1560 | mfc0 a5, MIPS_COP_0_CAUSE |
1554 | #else | | 1561 | #else |
1555 | mfc0 a2, MIPS_COP_0_ECC | | 1562 | mfc0 a2, MIPS_COP_0_ECC |
1556 | mfc0 a3, MIPS_COP_0_CACHE_ERR | | 1563 | mfc0 a3, MIPS_COP_0_CACHE_ERR |
1557 | #endif | | 1564 | #endif |
1558 | | | 1565 | |
1559 | _MTC0 k0, MIPS_COP_0_ERROR_PC # set return address | | 1566 | _MTC0 k0, MIPS_COP_0_ERROR_PC # set return address |
1560 | COP0_SYNC | | 1567 | COP0_SYNC |
1561 | | | 1568 | |
1562 | mfc0 k0, MIPS_COP_0_STATUS # restore status | | 1569 | mfc0 k0, MIPS_COP_0_STATUS # restore status |
1563 | li k1, MIPS3_SR_DIAG_PE # ignore further errors | | 1570 | li k1, MIPS3_SR_DIAG_PE # ignore further errors |
1564 | or k0, k1 | | 1571 | or k0, k1 |
1565 | mtc0 k0, MIPS_COP_0_STATUS # restore status | | 1572 | mtc0 k0, MIPS_COP_0_STATUS # restore status |
1566 | COP0_SYNC | | 1573 | COP0_SYNC |
1567 | | | 1574 | |
1568 | eret | | 1575 | eret |
1569 | | | 1576 | |
1570 | #if defined(MIPS64_XLS) | | 1577 | #if (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1571 | MSG("cache error @ EPC %#lx\nL1D_CACHE_ERROR_LOG %#lx\nL1D_CACHE_INTERRUPT %#lx\nstatus %#x, cause %#x"); | | 1578 | MSG("cache error @ EPC %#llx\nL1D_CACHE_ERROR_LOG %#llx\nL1D_CACHE_INTERRUPT %#lx\nstatus %#x, cause %#x"); |
1572 | #else | | 1579 | #else |
1573 | MSG("cache error @ EPC 0x%x ErrCtl 0x%x CacheErr 0x%x"); | | 1580 | MSG("cache error @ EPC 0x%x ErrCtl 0x%x CacheErr 0x%x"); |
1574 | #endif | | 1581 | #endif |
1575 | .set at | | 1582 | .set at |
1576 | END(MIPSX(cache_exception)) | | 1583 | END(MIPSX(cache_exception)) |
1577 | | | 1584 | |
1578 | | | 1585 | |
1579 | /*---------------------------------------------------------------------------- | | 1586 | /*---------------------------------------------------------------------------- |
1580 | * | | 1587 | * |
1581 | * R4000 TLB exception handlers | | 1588 | * R4000 TLB exception handlers |
1582 | * | | 1589 | * |
1583 | *---------------------------------------------------------------------------- | | 1590 | *---------------------------------------------------------------------------- |
1584 | */ | | 1591 | */ |
| @@ -1905,27 +1912,27 @@ LEAF(MIPSX(tlb_update_addr)) | | | @@ -1905,27 +1912,27 @@ LEAF(MIPSX(tlb_update_addr)) |
1905 | COP0_SYNC | | 1912 | COP0_SYNC |
1906 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1913 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1907 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 1914 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
1908 | 1: li v0, __SIMPLELOCK_LOCKED | | 1915 | 1: li v0, __SIMPLELOCK_LOCKED |
1909 | swapw v0, ta3 | | 1916 | swapw v0, ta3 |
1910 | bnez v0, 1b | | 1917 | bnez v0, 1b |
1911 | nop | | 1918 | nop |
1912 | #endif | | 1919 | #endif |
1913 | #if (PGSHIFT & 1) == 0 | | 1920 | #if (PGSHIFT & 1) == 0 |
1914 | and t1, a0, MIPS3_PG_ODDPG # t1 = Even/Odd flag | | 1921 | and t1, a0, MIPS3_PG_ODDPG # t1 = Even/Odd flag |
1915 | #endif | | 1922 | #endif |
1916 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) | | 1923 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) |
1917 | and a0, a0, v0 | | 1924 | and a0, a0, v0 |
1918 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save current PID | | 1925 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save current ASID |
1919 | _MTC0 a0, MIPS_COP_0_TLB_HI # Init high reg | | 1926 | _MTC0 a0, MIPS_COP_0_TLB_HI # Init high reg |
1920 | COP0_SYNC | | 1927 | COP0_SYNC |
1921 | and a2, a1, MIPS3_PG_G # Copy global bit | | 1928 | and a2, a1, MIPS3_PG_G # Copy global bit |
1922 | tlbp # Probe for the entry. | | 1929 | tlbp # Probe for the entry. |
1923 | COP0_SYNC | | 1930 | COP0_SYNC |
1924 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 | | 1931 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 |
1925 | _EXT a1, a1, 0, WIRED_POS | | 1932 | _EXT a1, a1, 0, WIRED_POS |
1926 | #else | | 1933 | #else |
1927 | _SLL a1, a1, WIRED_SHIFT # Clear top 34 bits of EntryLo | | 1934 | _SLL a1, a1, WIRED_SHIFT # Clear top 34 bits of EntryLo |
1928 | _SRL a1, a1, WIRED_SHIFT | | 1935 | _SRL a1, a1, WIRED_SHIFT |
1929 | #endif | | 1936 | #endif |
1930 | mfc0 v0, MIPS_COP_0_TLB_INDEX # See what we got | | 1937 | mfc0 v0, MIPS_COP_0_TLB_INDEX # See what we got |
1931 | #if (PGSHIFT & 1) == 0 | | 1938 | #if (PGSHIFT & 1) == 0 |
| @@ -1962,27 +1969,27 @@ LEAF(MIPSX(tlb_update_addr)) | | | @@ -1962,27 +1969,27 @@ LEAF(MIPSX(tlb_update_addr)) |
1962 | COP0_SYNC | | 1969 | COP0_SYNC |
1963 | _MTC0 a1, MIPS_COP_0_TLB_LO1 # init low reg1. | | 1970 | _MTC0 a1, MIPS_COP_0_TLB_LO1 # init low reg1. |
1964 | COP0_SYNC | | 1971 | COP0_SYNC |
1965 | tlbwi # update slot found | | 1972 | tlbwi # update slot found |
1966 | COP0_SYNC | | 1973 | COP0_SYNC |
1967 | #endif /* (PGSHIFT & 1) == 0 */ | | 1974 | #endif /* (PGSHIFT & 1) == 0 */ |
1968 | 4: | | 1975 | 4: |
1969 | #ifdef MIPS3 | | 1976 | #ifdef MIPS3 |
1970 | nop # Make sure pipeline | | 1977 | nop # Make sure pipeline |
1971 | nop # advances before we | | 1978 | nop # advances before we |
1972 | nop # use the TLB. | | 1979 | nop # use the TLB. |
1973 | nop | | 1980 | nop |
1974 | #endif | | 1981 | #endif |
1975 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID | | 1982 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore ASID |
1976 | COP0_SYNC | | 1983 | COP0_SYNC |
1977 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1984 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1978 | INT_S zero, 0(ta3) | | 1985 | INT_S zero, 0(ta3) |
1979 | #endif | | 1986 | #endif |
1980 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register | | 1987 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register |
1981 | JR_HB_RA | | 1988 | JR_HB_RA |
1982 | END(MIPSX(tlb_update_addr)) | | 1989 | END(MIPSX(tlb_update_addr)) |
1983 | | | 1990 | |
1984 | /*-------------------------------------------------------------------------- | | 1991 | /*-------------------------------------------------------------------------- |
1985 | * | | 1992 | * |
1986 | * mipsN_tlb_read_indexed -- | | 1993 | * mipsN_tlb_read_indexed -- |
1987 | * | | 1994 | * |
1988 | * Read the TLB entry. | | 1995 | * Read the TLB entry. |
| @@ -1998,100 +2005,109 @@ END(MIPSX(tlb_update_addr)) | | | @@ -1998,100 +2005,109 @@ END(MIPSX(tlb_update_addr)) |
1998 | *-------------------------------------------------------------------------- | | 2005 | *-------------------------------------------------------------------------- |
1999 | */ | | 2006 | */ |
2000 | LEAF(MIPSX(tlb_read_indexed)) | | 2007 | LEAF(MIPSX(tlb_read_indexed)) |
2001 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. | | 2008 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. |
2002 | mtc0 zero, MIPS_COP_0_STATUS # Disable interrupts | | 2009 | mtc0 zero, MIPS_COP_0_STATUS # Disable interrupts |
2003 | COP0_SYNC | | 2010 | COP0_SYNC |
2004 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2011 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2005 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2012 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2006 | 1: li v0, __SIMPLELOCK_LOCKED | | 2013 | 1: li v0, __SIMPLELOCK_LOCKED |
2007 | swapw v0, ta3 | | 2014 | swapw v0, ta3 |
2008 | bnez v0, 1b | | 2015 | bnez v0, 1b |
2009 | nop | | 2016 | nop |
2010 | #endif | | 2017 | #endif |
2011 | mfc0 ta2, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2018 | _MFC0 ta0, MIPS_COP_0_TLB_HI # Get current ASID |
| | | 2019 | mfc0 ta1, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
| | | 2020 | mfc0 ta2, MIPS_COP_0_TLB_INDEX # save the index register |
2012 | #ifdef MIPS3 | | 2021 | #ifdef MIPS3 |
2013 | nop | | 2022 | nop |
2014 | #endif | | 2023 | #endif |
2015 | _MFC0 t0, MIPS_COP_0_TLB_HI # Get current PID | | | |
2016 | | | 2024 | |
2017 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index register | | 2025 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index register |
2018 | COP0_SYNC | | 2026 | COP0_SYNC |
2019 | tlbr # Read from the TLB | | 2027 | tlbr # Read from the TLB |
2020 | COP0_SYNC | | 2028 | COP0_SYNC |
2021 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # fetch the pgMask | | 2029 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # fetch the pgMask |
2022 | _MFC0 t3, MIPS_COP_0_TLB_HI # fetch the hi entry | | 2030 | _MFC0 t2, MIPS_COP_0_TLB_HI # fetch the hi entry |
2023 | _MFC0 ta0, MIPS_COP_0_TLB_LO0 # See what we got | | 2031 | _MFC0 t1, MIPS_COP_0_TLB_LO1 # See what we got |
2024 | _MFC0 ta1, MIPS_COP_0_TLB_LO1 # See what we got | | 2032 | _MFC0 t0, MIPS_COP_0_TLB_LO0 # See what we got |
2025 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID | | 2033 | _MTC0 ta0, MIPS_COP_0_TLB_HI # restore ASID |
2026 | mtc0 ta2, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2034 | mtc0 ta1, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
| | | 2035 | mtc0 ta2, MIPS_COP_0_TLB_INDEX # make sure index is invalid |
2027 | COP0_SYNC | | 2036 | COP0_SYNC |
2028 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2037 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2029 | INT_S zero, 0(ta3) # unlock the tlb | | 2038 | INT_S zero, 0(ta3) # unlock the tlb |
2030 | #endif | | 2039 | #endif |
2031 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register | | 2040 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register |
2032 | COP0_SYNC | | 2041 | COP0_SYNC |
2033 | PTR_S t3, TLBMASK_HI(a1) | | 2042 | PTR_S t2, TLBMASK_HI(a1) |
2034 | INT_S ta0, TLBMASK_LO0(a1) | | 2043 | INT_S t1, TLBMASK_LO1(a1) |
2035 | INT_S ta1, TLBMASK_LO1(a1) | | 2044 | INT_S t0, TLBMASK_LO0(a1) |
2036 | j ra | | 2045 | j ra |
2037 | INT_S t2, TLBMASK_MASK(a1) | | 2046 | INT_S t3, TLBMASK_MASK(a1) |
2038 | END(MIPSX(tlb_read_indexed)) | | 2047 | END(MIPSX(tlb_read_indexed)) |
2039 | | | 2048 | |
2040 | /*-------------------------------------------------------------------------- | | 2049 | /*-------------------------------------------------------------------------- |
2041 | * | | 2050 | * |
2042 | * void mipsN_tlb_invalidate_addr(vaddr_t va) | | 2051 | * void mipsN_tlb_invalidate_addr(vaddr_t va) |
2043 | * | | 2052 | * |
2044 | * Invalidate a TLB entry which has the given vaddr and ASID if found. | | 2053 | * Invalidate a TLB entry which has the given vaddr and ASID if found. |
2045 | *-------------------------------------------------------------------------- | | 2054 | *-------------------------------------------------------------------------- |
2046 | */ | | 2055 | */ |
2047 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_addr)) | | 2056 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_addr)) |
2048 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2057 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2049 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2058 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2050 | COP0_SYNC | | 2059 | COP0_SYNC |
2051 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2060 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2052 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2061 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2053 | 1: li v0, __SIMPLELOCK_LOCKED | | 2062 | 1: li v0, __SIMPLELOCK_LOCKED |
2054 | swapw v0, ta3 | | 2063 | swapw v0, ta3 |
2055 | bnez v0, 1b | | 2064 | bnez v0, 1b |
2056 | nop | | 2065 | nop |
2057 | #endif | | 2066 | #endif |
2058 | | | 2067 | |
2059 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) | | 2068 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) |
2060 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID | | 2069 | _MFC0 ta0, MIPS_COP_0_TLB_HI # save current ASID |
2061 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2070 | mfc0 ta1, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
| | | 2071 | mfc0 ta2, MIPS_COP_0_TLB_INDEX # see what we got |
2062 | and a0, v0 # make sure valid entryHi | | 2072 | and a0, v0 # make sure valid entryHi |
2063 | _MTC0 a0, MIPS_COP_0_TLB_HI # look for the vaddr & ASID | | 2073 | _MTC0 a0, MIPS_COP_0_TLB_HI # look for the vaddr & ASID |
2064 | COP0_SYNC | | 2074 | COP0_SYNC |
2065 | tlbp # probe the entry in question | | 2075 | tlbp # probe the entry in question |
2066 | COP0_SYNC | | 2076 | COP0_SYNC |
2067 | mfc0 v0, MIPS_COP_0_TLB_INDEX # see what we got | | 2077 | mfc0 t2, MIPS_COP_0_TLB_INDEX # see what we got |
2068 | bltz v0, 1f # index < 0 then skip | | 2078 | bltz t2, 1f # index < 0 then skip |
2069 | li t1, MIPS_KSEG0_START # invalid address | | 2079 | li v0, MIPS_KSEG0_START # invalid address |
2070 | PTR_SLL v0, (PGSHIFT | 1) # PAGE_SHIFT | 1 | | 2080 | #if (MIPS32R2 + MIPS64R2 + RMIXL_MIPS64R2) > 0 |
2071 | PTR_ADDU t1, v0 | | 2081 | _INS v0, t2, PGSHIFT | 1, 10 |
2072 | _MTC0 t1, MIPS_COP_0_TLB_HI # make entryHi invalid | | 2082 | #else |
| | | 2083 | PTR_SLL t2, (PGSHIFT | 1) # PAGE_SHIFT | 1 |
| | | 2084 | PTR_ADDU v0, t2 |
| | | 2085 | #endif |
| | | 2086 | _MTC0 v0, MIPS_COP_0_TLB_HI # make entryHi invalid |
2073 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 | | 2087 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 |
2074 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2088 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2075 | #if 0 | | 2089 | #if 0 |
2076 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask | | 2090 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask |
2077 | #endif | | 2091 | #endif |
2078 | COP0_SYNC | | 2092 | COP0_SYNC |
2079 | | | 2093 | |
2080 | tlbwi | | 2094 | tlbwi |
2081 | COP0_SYNC | | 2095 | COP0_SYNC |
2082 | 1: | | 2096 | 1: |
2083 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID | | 2097 | _MTC0 ta0, MIPS_COP_0_TLB_HI # restore current ASID |
2084 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2098 | mtc0 ta1, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
| | | 2099 | mtc0 ta2, MIPS_COP_0_TLB_INDEX # invalidate TLB index |
| | | 2100 | |
2085 | COP0_SYNC | | 2101 | COP0_SYNC |
2086 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2102 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2087 | INT_S zero, 0(ta3) # unlock the tlb | | 2103 | INT_S zero, 0(ta3) # unlock the tlb |
2088 | #endif | | 2104 | #endif |
2089 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2105 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2090 | JR_HB_RA | | 2106 | JR_HB_RA |
2091 | END(MIPSX(tlb_invalidate_addr)) | | 2107 | END(MIPSX(tlb_invalidate_addr)) |
2092 | | | 2108 | |
2093 | /* | | 2109 | /* |
2094 | * void mipsN_tlb_invalidate_asids(uint32_t base, uint32_t limit); | | 2110 | * void mipsN_tlb_invalidate_asids(uint32_t base, uint32_t limit); |
2095 | * | | 2111 | * |
2096 | * Invalidate TLB entries belong to per process user spaces with | | 2112 | * Invalidate TLB entries belong to per process user spaces with |
2097 | * base <= ASIDs < limit while leaving entries for kernel space | | 2113 | * base <= ASIDs < limit while leaving entries for kernel space |
| @@ -2099,67 +2115,74 @@ END(MIPSX(tlb_invalidate_addr)) | | | @@ -2099,67 +2115,74 @@ END(MIPSX(tlb_invalidate_addr)) |
2099 | */ | | 2115 | */ |
2100 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_asids)) | | 2116 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_asids)) |
2101 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2117 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2102 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2118 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2103 | COP0_SYNC | | 2119 | COP0_SYNC |
2104 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2120 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2105 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2121 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2106 | 1: li v0, __SIMPLELOCK_LOCKED | | 2122 | 1: li v0, __SIMPLELOCK_LOCKED |
2107 | swapw v0, ta3 | | 2123 | swapw v0, ta3 |
2108 | bnez v0, 1b | | 2124 | bnez v0, 1b |
2109 | nop | | 2125 | nop |
2110 | #endif | | 2126 | #endif |
2111 | | | 2127 | |
2112 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save the current PID. | | 2128 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save the current ASID. |
2113 | mfc0 t1, MIPS_COP_0_TLB_WIRED | | 2129 | mfc0 t1, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
| | | 2130 | mfc0 t2, MIPS_COP_0_TLB_WIRED |
| | | 2131 | INT_L t3, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2114 | li v0, MIPS_KSEG0_START # invalid address | | 2132 | li v0, MIPS_KSEG0_START # invalid address |
2115 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2133 | #if (MIPS32R2 + MIPS64R2 + RMIXL_MIPS64R2) > 0 |
2116 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2134 | _INS v0, t2, PGSHIFT | 1, 10 # insert TLB index as page # |
| | | 2135 | #else |
| | | 2136 | sll ta0, t2, PGSHIFT | 1 # PAGE_SHIFT | 1 |
| | | 2137 | PTR_ADDU v0, ta0 # start at first unwired entry |
| | | 2138 | #endif |
| | | 2139 | li ta0, 1 << (PGSHIFT | 1) # address increment. |
2117 | | | 2140 | |
2118 | # do {} while (t1 < t2) | | 2141 | # do {} while (t2 < t3) |
2119 | 1: | | 2142 | 1: |
2120 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index | | 2143 | mtc0 t2, MIPS_COP_0_TLB_INDEX # set index |
2121 | COP0_SYNC | | 2144 | COP0_SYNC |
2122 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 | | | |
2123 | tlbr # obtain an entry | | 2145 | tlbr # obtain an entry |
2124 | COP0_SYNC | | 2146 | COP0_SYNC |
2125 | _MFC0 a0, MIPS_COP_0_TLB_LO1 | | 2147 | _MFC0 a2, MIPS_COP_0_TLB_LO1 |
2126 | and a0, MIPS3_PG_G # check to see it has G bit | | 2148 | and a2, MIPS3_PG_G # check to see it has G bit |
2127 | bnez a0, 2f # yep, skip this one. | | 2149 | bnez a2, 2f # yep, skip this one. |
2128 | nop | | 2150 | nop |
2129 | _MFC0 a0, MIPS_COP_0_TLB_HI # get VA and ASID | | 2151 | _MFC0 a2, MIPS_COP_0_TLB_HI # get VA and ASID |
2130 | and a0, MIPS3_PG_ASID # focus on ASID | | 2152 | and a2, MIPS3_PG_ASID # focus on ASID |
2131 | sltu a3, a0, a1 # asid < base? | | 2153 | sltu a3, a2, a0 # asid < base? |
2132 | bnez a3, 2f # yes, skip this entry. | | 2154 | bnez a3, 2f # yes, skip this entry. |
2133 | nop | | 2155 | nop |
2134 | sltu a3, a0, a2 # asid < limit | | 2156 | sltu a3, a2, a1 # asid < limit |
2135 | beqz a3, 2f # nope, skip this entry. | | 2157 | beqz a3, 2f # nope, skip this entry. |
2136 | nop | | 2158 | nop |
2137 | PTR_ADDU ta0, v0 | | | |
2138 | | | 2159 | |
2139 | _MTC0 ta0, MIPS_COP_0_TLB_HI # make entryHi invalid | | 2160 | _MTC0 v0, MIPS_COP_0_TLB_HI # make entryHi invalid |
2140 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 | | 2161 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 |
2141 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2162 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2142 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry | | 2163 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry |
2143 | COP0_SYNC | | 2164 | COP0_SYNC |
2144 | tlbwi # invalidate the TLB entry | | 2165 | tlbwi # invalidate the TLB entry |
2145 | COP0_SYNC | | 2166 | COP0_SYNC |
2146 | 2: | | 2167 | 2: |
2147 | addu t1, 1 | | 2168 | addu t2, 1 |
2148 | bne t1, t2, 1b | | 2169 | bne t2, t3, 1b |
2149 | nop | | 2170 | PTR_ADDU v0, ta0 |
2150 | | | 2171 | |
2151 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID. | | 2172 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore ASID. |
2152 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2173 | mtc0 t1, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
| | | 2174 | # since v0 has its MSB set, it is an invalid TLB index |
| | | 2175 | mtc0 v0, MIPS_COP_0_TLB_INDEX # invalidate index |
2153 | COP0_SYNC | | 2176 | COP0_SYNC |
2154 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2177 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2155 | INT_S zero, 0(ta3) # unlock the tlb | | 2178 | INT_S zero, 0(ta3) # unlock the tlb |
2156 | #endif | | 2179 | #endif |
2157 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2180 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2158 | JR_HB_RA # new ASID will be set soon | | 2181 | JR_HB_RA # new ASID will be set soon |
2159 | END(MIPSX(tlb_invalidate_asids)) | | 2182 | END(MIPSX(tlb_invalidate_asids)) |
2160 | | | 2183 | |
2161 | /* | | 2184 | /* |
2162 | * void mipsN_tlb_invalidate_globals(void); | | 2185 | * void mipsN_tlb_invalidate_globals(void); |
2163 | * | | 2186 | * |
2164 | * Invalidate the non-wired TLB entries belonging to kernel space while | | 2187 | * Invalidate the non-wired TLB entries belonging to kernel space while |
2165 | * leaving entries for user space (not marked global) intact. | | 2188 | * leaving entries for user space (not marked global) intact. |
| @@ -2169,56 +2192,62 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_glob | | | @@ -2169,56 +2192,62 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_glob |
2169 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2192 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2170 | COP0_SYNC | | 2193 | COP0_SYNC |
2171 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2194 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2172 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2195 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2173 | 1: li v0, __SIMPLELOCK_LOCKED | | 2196 | 1: li v0, __SIMPLELOCK_LOCKED |
2174 | swapw v0, ta3 | | 2197 | swapw v0, ta3 |
2175 | bnez v0, 1b | | 2198 | bnez v0, 1b |
2176 | nop | | 2199 | nop |
2177 | #endif | | 2200 | #endif |
2178 | | | 2201 | |
2179 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID | | 2202 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID |
2180 | mfc0 t1, MIPS_COP_0_TLB_WIRED | | 2203 | mfc0 t1, MIPS_COP_0_TLB_WIRED |
2181 | li v0, MIPS_KSEG0_START # invalid address | | 2204 | li v0, MIPS_KSEG0_START # invalid address |
| | | 2205 | #if (MIPS32R2 + MIPS64R2 + RMIXL_MIPS64R2) > 0 |
| | | 2206 | _INS v0, t1, PGSHIFT | 1, 10 # insert TLB index as page # |
| | | 2207 | #else |
| | | 2208 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 |
| | | 2209 | PTR_ADDU v0, ta0 # offset for TLB index |
| | | 2210 | #endif |
| | | 2211 | li ta0, 1 << (PGSHIFT | 1) |
2182 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2212 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2183 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2213 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
2184 | | | 2214 | |
2185 | # do {} while (t1 < t2) | | 2215 | # do {} while (t1 < t2) |
2186 | 1: | | 2216 | 1: |
2187 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index | | 2217 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index |
2188 | COP0_SYNC | | 2218 | COP0_SYNC |
2189 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 | | | |
2190 | tlbr # obtain an entry | | 2219 | tlbr # obtain an entry |
2191 | COP0_SYNC | | 2220 | COP0_SYNC |
2192 | _MFC0 a0, MIPS_COP_0_TLB_LO1 | | 2221 | _MFC0 a0, MIPS_COP_0_TLB_LO1 |
2193 | and a0, MIPS3_PG_G # check to see it has G bit | | 2222 | and a0, MIPS3_PG_G # check to see it has G bit |
2194 | beqz a0, 2f # no, skip this entry | | 2223 | beqz a0, 2f # no, skip this entry |
2195 | nop | | 2224 | nop |
2196 | PTR_ADDU ta0, v0 | | | |
2197 | | | 2225 | |
2198 | _MTC0 ta0, MIPS_COP_0_TLB_HI # make entryHi invalid | | 2226 | _MTC0 v0, MIPS_COP_0_TLB_HI # make entryHi invalid |
2199 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 | | 2227 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 |
2200 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2228 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2201 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry | | 2229 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry |
2202 | COP0_SYNC | | 2230 | COP0_SYNC |
2203 | tlbwi # invalidate the TLB entry | | 2231 | tlbwi # invalidate the TLB entry |
2204 | COP0_SYNC | | 2232 | COP0_SYNC |
2205 | 2: | | 2233 | 2: |
2206 | addu t1, 1 | | 2234 | addu t1, 1 |
2207 | bne t1, t2, 1b | | 2235 | bne t1, t2, 1b |
2208 | nop | | 2236 | PTR_ADDU v0, ta0 |
2209 | | | 2237 | |
2210 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID | | 2238 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID |
2211 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2239 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
| | | 2240 | mtc0 v0, MIPS_COP_0_TLB_INDEX # invalidate index |
2212 | COP0_SYNC | | 2241 | COP0_SYNC |
2213 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2242 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2214 | INT_S zero, 0(ta3) # unlock the tlb | | 2243 | INT_S zero, 0(ta3) # unlock the tlb |
2215 | #endif | | 2244 | #endif |
2216 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2245 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2217 | JR_HB_RA | | 2246 | JR_HB_RA |
2218 | END(MIPSX(tlb_invalidate_globals)) | | 2247 | END(MIPSX(tlb_invalidate_globals)) |
2219 | | | 2248 | |
2220 | /* | | 2249 | /* |
2221 | * void mipsN_tlb_invalidate_all(void); | | 2250 | * void mipsN_tlb_invalidate_all(void); |
2222 | * | | 2251 | * |
2223 | * Invalidate all of non-wired TLB entries. | | 2252 | * Invalidate all of non-wired TLB entries. |
2224 | */ | | 2253 | */ |
| @@ -2230,64 +2259,70 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_all) | | | @@ -2230,64 +2259,70 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_all) |
2230 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2259 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2231 | 1: li v0, __SIMPLELOCK_LOCKED | | 2260 | 1: li v0, __SIMPLELOCK_LOCKED |
2232 | swapw v0, ta3 | | 2261 | swapw v0, ta3 |
2233 | bnez v0, 1b | | 2262 | bnez v0, 1b |
2234 | nop | | 2263 | nop |
2235 | #endif | | 2264 | #endif |
2236 | | | 2265 | |
2237 | INT_L a0, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2266 | INT_L a0, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2238 | | | 2267 | |
2239 | li v0, MIPS_KSEG0_START # invalid address | | 2268 | li v0, MIPS_KSEG0_START # invalid address |
2240 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID | | 2269 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID |
2241 | mfc0 t1, MIPS_COP_0_TLB_WIRED | | 2270 | mfc0 t1, MIPS_COP_0_TLB_WIRED |
2242 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2271 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
| | | 2272 | #if (MIPS32R2 + MIPS64R2 + RMIXL_MIPS64R2) > 0 |
| | | 2273 | _INS v0, t1, PGSHIFT | 1, 10 # insert TLB index as page # |
| | | 2274 | #else |
| | | 2275 | sll ta0, t1, PGSHIFT | 1 # addr for TLB index |
| | | 2276 | PTR_ADDU v0, ta0 |
| | | 2277 | #endif |
| | | 2278 | li ta0, 1 << (PGSHIFT | 1) |
2243 | | | 2279 | |
2244 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 | | 2280 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 |
2245 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2281 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2246 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask | | 2282 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask |
2247 | | | 2283 | |
2248 | # do {} while (t1 < a0) | | 2284 | # do {} while (t1 < a0) |
2249 | 1: | | 2285 | 1: |
2250 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set TLBindex | | 2286 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set TLBindex |
2251 | COP0_SYNC | | 2287 | COP0_SYNC |
2252 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 | | 2288 | _MTC0 v0, MIPS_COP_0_TLB_HI # make entryHi invalid |
2253 | PTR_ADDU ta0, v0 | | | |
2254 | _MTC0 ta0, MIPS_COP_0_TLB_HI # make entryHi invalid | | | |
2255 | COP0_SYNC | | 2289 | COP0_SYNC |
2256 | tlbwi # clear the entry | | 2290 | tlbwi # clear the entry |
2257 | COP0_SYNC | | 2291 | COP0_SYNC |
2258 | addu t1, 1 # increment index | | 2292 | addu t1, 1 # increment index |
2259 | bne t1, a0, 1b | | 2293 | bne t1, a0, 1b |
2260 | nop | | 2294 | PTR_ADDU v0, ta0 |
2261 | | | 2295 | |
2262 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore ASID | | 2296 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore ASID |
2263 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2297 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
| | | 2298 | mtc0 v0, MIPS_COP_0_TLB_INDEX # invalidate index |
2264 | COP0_SYNC | | 2299 | COP0_SYNC |
2265 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2300 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2266 | INT_S zero, 0(ta3) # unlock the tlb | | 2301 | INT_S zero, 0(ta3) # unlock the tlb |
2267 | #endif | | 2302 | #endif |
2268 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2303 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2269 | JR_HB_RA | | 2304 | JR_HB_RA |
2270 | END(MIPSX(tlb_invalidate_all)) | | 2305 | END(MIPSX(tlb_invalidate_all)) |
2271 | | | 2306 | |
2272 | /* | | 2307 | /* |
2273 | * u_int mipsN_tlb_record_asids(u_long *bitmap, uint32_t asid_mask); | | 2308 | * u_int mipsN_tlb_record_asids(u_long *bitmap, uint32_t asid_mask); |
2274 | * | | 2309 | * |
2275 | * Record all the ASIDs in use in the TLB and return the number of different | | 2310 | * Record all the ASIDs in use in the TLB and return the number of different |
2276 | * ASIDs present. | | 2311 | * ASIDs present. |
2277 | */ | | 2312 | */ |
2278 | LEAF_NOPROFILE(MIPSX(tlb_record_asids)) | | 2313 | LEAF_NOPROFILE(MIPSX(tlb_record_asids)) |
2279 | | | 2314 | |
2280 | _MFC0 a3, MIPS_COP_0_TLB_HI # Save the current PID. | | 2315 | _MFC0 a3, MIPS_COP_0_TLB_HI # Save the current ASID. |
2281 | mfc0 ta0, MIPS_COP_0_TLB_WIRED | | 2316 | mfc0 ta0, MIPS_COP_0_TLB_WIRED |
2282 | INT_L ta1, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2317 | INT_L ta1, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2283 | move ta2, zero | | 2318 | move ta2, zero |
2284 | li t3, 1 | | 2319 | li t3, 1 |
2285 | | | 2320 | |
2286 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2321 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2287 | #ifdef _LP64 | | 2322 | #ifdef _LP64 |
2288 | and t0, v1, MIPS_SR_INT_IE | | 2323 | and t0, v1, MIPS_SR_INT_IE |
2289 | xor t0, v1 | | 2324 | xor t0, v1 |
2290 | mtc0 t0, MIPS_COP_0_STATUS # disable interrupts | | 2325 | mtc0 t0, MIPS_COP_0_STATUS # disable interrupts |
2291 | #else | | 2326 | #else |
2292 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2327 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2293 | #endif | | 2328 | #endif |
| @@ -2358,111 +2393,119 @@ END(MIPSX(tlb_record_asids)) | | | @@ -2358,111 +2393,119 @@ END(MIPSX(tlb_record_asids)) |
2358 | * mipsN_tlb_enter(size_t tlb_index, vaddr_t va, uint32_t pte); | | 2393 | * mipsN_tlb_enter(size_t tlb_index, vaddr_t va, uint32_t pte); |
2359 | */ | | 2394 | */ |
2360 | LEAF(MIPSX(tlb_enter)) | | 2395 | LEAF(MIPSX(tlb_enter)) |
2361 | .set noat | | 2396 | .set noat |
2362 | mfc0 v1, MIPS_COP_0_STATUS # save status | | 2397 | mfc0 v1, MIPS_COP_0_STATUS # save status |
2363 | mtc0 zero, MIPS_COP_0_STATUS # disable interupts | | 2398 | mtc0 zero, MIPS_COP_0_STATUS # disable interupts |
2364 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2399 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2365 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2400 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2366 | 1: li v0, __SIMPLELOCK_LOCKED | | 2401 | 1: li v0, __SIMPLELOCK_LOCKED |
2367 | swapw v0, ta3 | | 2402 | swapw v0, ta3 |
2368 | bnez v0, 1b | | 2403 | bnez v0, 1b |
2369 | nop | | 2404 | nop |
2370 | #endif | | 2405 | #endif |
| | | 2406 | # a0, a1, a3, v0, ta3 in use |
2371 | _MFC0 ta0, MIPS_COP_0_TLB_HI # save EntryHi | | 2407 | _MFC0 ta0, MIPS_COP_0_TLB_HI # save EntryHi |
| | | 2408 | mfc0 ta1, MIPS_COP_0_TLB_INDEX # save TLB index |
2372 | | | 2409 | |
2373 | #if (PGSHIFT & 1) == 0 | | 2410 | #if (PGSHIFT & 1) == 0 |
2374 | and a3, a1, MIPS3_PG_ODDPG # select odd page bit | | 2411 | and a3, a1, MIPS3_PG_ODDPG # select odd page bit |
2375 | xor a3, a1 # clear it. | | 2412 | xor a1, a3 # clear it. |
| | | 2413 | /* a3 contain ODDPG bit, a1 is now even */ |
2376 | #endif | | 2414 | #endif |
2377 | _MTC0 a3, MIPS_COP_0_TLB_HI # set the VA for tlbp | | 2415 | _MTC0 a1, MIPS_COP_0_TLB_HI # set the VA for tlbp |
2378 | COP0_SYNC | | 2416 | COP0_SYNC |
2379 | | | 2417 | |
2380 | #if (PGSHIFT & 1) == 0 | | 2418 | #if (PGSHIFT & 1) == 0 |
2381 | and t2, a2, MIPS3_PG_G # make prototype tlb_lo0 | | 2419 | and t2, a2, MIPS3_PG_G # make prototype tlb_lo0 |
2382 | and t3, a2, MIPS3_PG_G # make prototype tlb_lo1 | | 2420 | and t3, a2, MIPS3_PG_G # make prototype tlb_lo1 |
2383 | #endif | | 2421 | #endif |
2384 | | | 2422 | |
2385 | tlbp # is va in TLB? | | 2423 | tlbp # is va in TLB? |
2386 | COP0_SYNC | | 2424 | COP0_SYNC |
2387 | | | 2425 | |
2388 | mfc0 v0, MIPS_COP_0_TLB_INDEX # was it in the TLB? | | 2426 | mfc0 t0, MIPS_COP_0_TLB_INDEX # was it in the TLB? |
2389 | bltz v0, 1f # nope | | 2427 | bltz t0, 1f # nope |
2390 | nop | | 2428 | nop |
2391 | | | 2429 | |
2392 | #if (PGSHIFT & 1) == 0 | | 2430 | #if (PGSHIFT & 1) == 0 |
2393 | /* | | 2431 | /* |
2394 | * Get the existing tlb_lo's because we need to replace one of them. | | 2432 | * Get the existing tlb_lo's because we need to replace one of them. |
2395 | */ | | 2433 | */ |
2396 | mfc0 t2, MIPS_COP_0_TLB_LO0 # save for update | | 2434 | mfc0 t2, MIPS_COP_0_TLB_LO0 # save for update |
2397 | mfc0 t3, MIPS_COP_0_TLB_LO1 # save for update | | 2435 | mfc0 t3, MIPS_COP_0_TLB_LO1 # save for update |
2398 | #endif | | 2436 | #endif |
2399 | | | 2437 | |
2400 | /* | | 2438 | /* |
2401 | * If it's already where we want, no reason to invalidate it. | | 2439 | * If it's already where we want, no reason to invalidate it. |
2402 | */ | | 2440 | */ |
2403 | beq v0, a0, 2f # already where we want it? | | 2441 | beq t0, a0, 2f # already where we want it? |
2404 | nop | | 2442 | nop |
2405 | | | 2443 | |
2406 | /* | | 2444 | /* |
2407 | * Clear the existing TLB entry for it. | | 2445 | * Clear the existing TLB entry for it. |
2408 | */ | | 2446 | */ |
2409 | sll t1, v0, (1 | PGSHIFT) # make a fake addr for the entry | | 2447 | li v0, MIPS_KSEG0_START |
2410 | lui t3, %hi(MIPS_KSEG0_START) | | 2448 | #if (MIPS32R2 + MIPS64R2 + RMIXL_MIPS64R2) > 0 |
2411 | or t1, t3 | | 2449 | _INS v0, t0, PGSHIFT | 1, 10 # insert TLB index into addr |
2412 | _MTC0 t1, MIPS_COP_0_TLB_HI | | 2450 | #else |
| | | 2451 | sll t1, t0, PGSHIFT | 1 # make a fake addr for the entry |
| | | 2452 | or v0, t1 |
| | | 2453 | #endif |
| | | 2454 | _MTC0 v0, MIPS_COP_0_TLB_HI # set to KSEG0 addr (invalid) |
2413 | COP0_SYNC | | 2455 | COP0_SYNC |
2414 | | | 2456 | |
2415 | and t0, a2, MIPS3_PG_G # make prototype tlb_lo | | 2457 | and t0, a2, MIPS3_PG_G # make prototype tlb_lo |
2416 | mtc0 t0, MIPS_COP_0_TLB_LO0 # use an invalid tlb_lo0 | | 2458 | mtc0 t0, MIPS_COP_0_TLB_LO0 # use an invalid tlb_lo0 |
2417 | mtc0 t0, MIPS_COP_0_TLB_LO1 # use an invalid tlb_lo1 | | 2459 | mtc0 t0, MIPS_COP_0_TLB_LO1 # use an invalid tlb_lo1 |
2418 | COP0_SYNC | | 2460 | COP0_SYNC |
2419 | | | 2461 | |
2420 | tlbwi # now write the invalid TLB | | 2462 | tlbwi # now write the invalid TLB |
2421 | COP0_SYNC | | 2463 | COP0_SYNC |
2422 | | | 2464 | |
2423 | _MTC0 a3, MIPS_COP_0_TLB_HI # restore the addr for new TLB | | 2465 | _MTC0 a1, MIPS_COP_0_TLB_HI # restore the addr for new TLB |
2424 | COP0_SYNC | | 2466 | COP0_SYNC # a1 is free for use. |
2425 | 1: | | 2467 | 1: |
2426 | mtc0 a0, MIPS_COP_0_TLB_INDEX # set the index | | 2468 | mtc0 a0, MIPS_COP_0_TLB_INDEX # set the index |
2427 | COP0_SYNC | | 2469 | COP0_SYNC # a0 is free for use. |
2428 | | | 2470 | |
2429 | 2: | | 2471 | 2: |
2430 | #if (PGSHIFT & 1) == 0 | | 2472 | #if (PGSHIFT & 1) == 0 |
2431 | and t3, a1, MIPS3_PG_ODDPG # odd or even page | | 2473 | sll a3, 31 - PGSHIFT # move ODDPG to MSB |
2432 | sll t3, 31 - PGSHIFT # move to MSB | | 2474 | sra a3, 31 # a3 a mask (0/~0 = even/odd) |
2433 | sra t3, 31 # t3 a mask (0/~0 = even/odd) | | 2475 | not v0, a3 # v0 a mask (~0/0 = even/odd) |
2434 | not v0, t3 # v0 a mask (~0/0 = even/odd) | | 2476 | |
2435 | | | 2477 | /* a0 and a1 are now free for use */ |
2436 | and ta1, t2, t3 | | 2478 | and a0, a3, t2 |
2437 | and ta2, a2, v0 | | 2479 | and a1, v0, a2 |
2438 | or t2, ta1, ta2 # t2 = (t3 & t2) | (~t3 & a2) | | 2480 | or t2, a0, a1 # t2 = (a3 & t2) | (~a3 & a2) |
2439 | and ta1, t3, v0 | | 2481 | and a0, v0, t3 |
2440 | and ta2, a2, t3 | | 2482 | and a1, a3, a2 |
2441 | or t3, ta1, ta2 # t3 = (~t3 & t3) | (t3 & a2) | | 2483 | or t3, a0, a1 # t3 = (~a3 & t3) | (a3 & a2) |
2442 | | | 2484 | |
2443 | mtc0 t2, MIPS_COP_0_TLB_LO0 # set tlb_lo0 (even) | | 2485 | mtc0 t2, MIPS_COP_0_TLB_LO0 # set tlb_lo0 (even) |
2444 | mtc0 t3, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (odd) | | 2486 | mtc0 t3, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (odd) |
2445 | #else | | 2487 | #else |
2446 | mtc0 a2, MIPS_COP_0_TLB_LO0 # set tlb_lo1 (lower half) | | 2488 | mtc0 a2, MIPS_COP_0_TLB_LO0 # set tlb_lo1 (lower half) |
2447 | INT_ADDU a2, MIPS3_PG_NEXT | | 2489 | INT_ADDU a2, MIPS3_PG_NEXT |
2448 | mtc0 a2, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (upper half) | | 2490 | mtc0 a2, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (upper half) |
2449 | #endif | | 2491 | #endif |
2450 | COP0_SYNC | | 2492 | COP0_SYNC |
2451 | | | 2493 | |
2452 | tlbwi # enter it into the TLB | | 2494 | tlbwi # enter it into the TLB |
2453 | COP0_SYNC | | 2495 | COP0_SYNC |
2454 | | | 2496 | |
2455 | _MTC0 ta0, MIPS_COP_0_TLB_HI # restore EntryHi | | 2497 | _MTC0 ta0, MIPS_COP_0_TLB_HI # restore EntryHi |
| | | 2498 | mtc0 ta1, MIPS_COP_0_TLB_INDEX # restore TLB index |
2456 | COP0_SYNC | | 2499 | COP0_SYNC |
2457 | | | 2500 | |
2458 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2501 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2459 | INT_S zero, 0(ta3) # unlock the tlb | | 2502 | INT_S zero, 0(ta3) # unlock the tlb |
2460 | #endif | | 2503 | #endif |
2461 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2504 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2462 | JR_HB_RA | | 2505 | JR_HB_RA |
2463 | .set at | | 2506 | .set at |
2464 | END(MIPSX(tlb_enter)) | | 2507 | END(MIPSX(tlb_enter)) |
2465 | | | 2508 | |
2466 | /*-------------------------------------------------------------------------- | | 2509 | /*-------------------------------------------------------------------------- |
2467 | * | | 2510 | * |
2468 | * mipsN_tlb_write_indexed -- | | 2511 | * mipsN_tlb_write_indexed -- |
| @@ -2488,41 +2531,43 @@ LEAF(MIPSX(tlb_write_indexed)) | | | @@ -2488,41 +2531,43 @@ LEAF(MIPSX(tlb_write_indexed)) |
2488 | INT_L t1, TLBMASK_LO1(a1) # fetch tlb->tlb_lo1 | | 2531 | INT_L t1, TLBMASK_LO1(a1) # fetch tlb->tlb_lo1 |
2489 | INT_L t2, TLBMASK_MASK(a1) # fetch tlb->tlb_mask | | 2532 | INT_L t2, TLBMASK_MASK(a1) # fetch tlb->tlb_mask |
2490 | PTR_L t3, TLBMASK_HI(a1) # fetch tlb->tlb_hi | | 2533 | PTR_L t3, TLBMASK_HI(a1) # fetch tlb->tlb_hi |
2491 | mfc0 v1, MIPS_COP_0_STATUS # save status | | 2534 | mfc0 v1, MIPS_COP_0_STATUS # save status |
2492 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2535 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2493 | COP0_SYNC | | 2536 | COP0_SYNC |
2494 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2537 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2495 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 | | 2538 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
2496 | 1: li v0, __SIMPLELOCK_LOCKED | | 2539 | 1: li v0, __SIMPLELOCK_LOCKED |
2497 | swapw v0, ta3 | | 2540 | swapw v0, ta3 |
2498 | bnez v0, 1b | | 2541 | bnez v0, 1b |
2499 | nop | | 2542 | nop |
2500 | #endif | | 2543 | #endif |
| | | 2544 | _MFC0 ta0, MIPS_COP_0_TLB_HI # Save the current ASID. |
2501 | mfc0 ta1, MIPS_COP_0_TLB_PG_MASK # Save current page mask. | | 2545 | mfc0 ta1, MIPS_COP_0_TLB_PG_MASK # Save current page mask. |
2502 | _MFC0 ta0, MIPS_COP_0_TLB_HI # Save the current PID. | | 2546 | mfc0 ta2, MIPS_COP_0_TLB_INDEX # Save current tlb index |
2503 | | | 2547 | |
2504 | _MTC0 t0, MIPS_COP_0_TLB_LO0 # Set up entry lo0. | | 2548 | _MTC0 t0, MIPS_COP_0_TLB_LO0 # Set up entry lo0. |
2505 | _MTC0 t1, MIPS_COP_0_TLB_LO1 # Set up entry lo1. | | 2549 | _MTC0 t1, MIPS_COP_0_TLB_LO1 # Set up entry lo1. |
2506 | COP0_SYNC | | 2550 | COP0_SYNC |
2507 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index. | | 2551 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index. |
2508 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # Set up entry pagemask. | | 2552 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # Set up entry pagemask. |
2509 | _MTC0 t3, MIPS_COP_0_TLB_HI # Set up entry high. | | 2553 | _MTC0 t3, MIPS_COP_0_TLB_HI # Set up entry high. |
2510 | COP0_SYNC | | 2554 | COP0_SYNC |
2511 | tlbwi # Write the TLB | | 2555 | tlbwi # Write the TLB |
2512 | COP0_SYNC | | 2556 | COP0_SYNC |
2513 | | | 2557 | |
2514 | _MTC0 ta0, MIPS_COP_0_TLB_HI # Restore the PID. | | 2558 | _MTC0 ta0, MIPS_COP_0_TLB_HI # Restore the ASID. |
2515 | mtc0 ta1, MIPS_COP_0_TLB_PG_MASK # Restore page mask. | | 2559 | mtc0 ta1, MIPS_COP_0_TLB_PG_MASK # Restore page mask. |
| | | 2560 | mtc0 ta2, MIPS_COP_0_TLB_INDEX # Restore TLB index |
2516 | COP0_SYNC | | 2561 | COP0_SYNC |
2517 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2562 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2518 | INT_S zero, 0(ta3) # unlock the tlb | | 2563 | INT_S zero, 0(ta3) # unlock the tlb |
2519 | #endif | | 2564 | #endif |
2520 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register | | 2565 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register |
2521 | JR_HB_RA | | 2566 | JR_HB_RA |
2522 | END(MIPSX(tlb_write_indexed)) | | 2567 | END(MIPSX(tlb_write_indexed)) |
2523 | | | 2568 | |
2524 | /* | | 2569 | /* |
2525 | * mipsN_lwp_trampoline() | | 2570 | * mipsN_lwp_trampoline() |
2526 | * | | 2571 | * |
2527 | * Arrange for a function to be invoked neatly, after a cpu_switch(). | | 2572 | * Arrange for a function to be invoked neatly, after a cpu_switch(). |
2528 | * Call the service function with one argument, specified by the s0 | | 2573 | * Call the service function with one argument, specified by the s0 |