| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: mipsX_subr.S,v 1.26.36.1.2.49 2011/12/02 00:01:37 matt Exp $ */ | | 1 | /* $NetBSD: mipsX_subr.S,v 1.26.36.1.2.50 2011/12/03 01:56:55 matt Exp $ */ |
2 | | | 2 | |
3 | /* | | 3 | /* |
4 | * Copyright 2002 Wasabi Systems, Inc. | | 4 | * Copyright 2002 Wasabi Systems, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * Written by Simon Burge for Wasabi Systems, Inc. | | 7 | * Written by Simon Burge for Wasabi Systems, Inc. |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -335,27 +335,27 @@ | | | @@ -335,27 +335,27 @@ |
335 | * on an r4000. | | 335 | * on an r4000. |
336 | * | | 336 | * |
337 | * This code is copied to the TLB exception vector address to | | 337 | * This code is copied to the TLB exception vector address to |
338 | * handle TLB translation misses. | | 338 | * handle TLB translation misses. |
339 | * NOTE: This code should be relocatable and max 32 instructions!!! | | 339 | * NOTE: This code should be relocatable and max 32 instructions!!! |
340 | * | | 340 | * |
341 | * Don't check for invalid pte's here. We load them as well and | | 341 | * Don't check for invalid pte's here. We load them as well and |
342 | * let the processor trap to load the correct value after service. | | 342 | * let the processor trap to load the correct value after service. |
343 | *---------------------------------------------------------------------------- | | 343 | *---------------------------------------------------------------------------- |
344 | */ | | 344 | */ |
345 | VECTOR(MIPSX(tlb_miss), unknown) | | 345 | VECTOR(MIPSX(tlb_miss), unknown) |
346 | .set noat | | 346 | .set noat |
347 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 347 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
348 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 #00: get tlbinfo lock addr | | 348 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 #00: get tlbinfo hwlock addr |
349 | li k0, __SIMPLELOCK_LOCKED #01: lock value | | 349 | li k0, __SIMPLELOCK_LOCKED #01: lock value |
350 | swapw k0, k1 #02: swap it in place | | 350 | swapw k0, k1 #02: swap it in place |
351 | bnez k0, MIPSX(tlblocked) #03: a lie | | 351 | bnez k0, MIPSX(tlblocked) #03: a lie |
352 | # lui in delay slot | | 352 | # lui in delay slot |
353 | #endif | | 353 | #endif |
354 | lui k1, %hi(CPUVAR(PMAP_SEG0TAB)) #00: k1=hi of seg0tab | | 354 | lui k1, %hi(CPUVAR(PMAP_SEG0TAB)) #00: k1=hi of seg0tab |
355 | _MFC0 k0, MIPS_COP_0_BAD_VADDR #01: k0=bad address | | 355 | _MFC0 k0, MIPS_COP_0_BAD_VADDR #01: k0=bad address |
356 | bltz k0, MIPSX(kernelfault) #02: k0<0 -> kernel fault | | 356 | bltz k0, MIPSX(kernelfault) #02: k0<0 -> kernel fault |
357 | PTR_SRL k0, 1*(PGSHIFT-PTR_SCALESHIFT)+(PGSHIFT-2)#03: k0=seg offset (almost) | | 357 | PTR_SRL k0, 1*(PGSHIFT-PTR_SCALESHIFT)+(PGSHIFT-2)#03: k0=seg offset (almost) |
358 | PTR_L k1, %lo(CPUVAR(PMAP_SEG0TAB))(k1)#04: k1=seg0tab | | 358 | PTR_L k1, %lo(CPUVAR(PMAP_SEG0TAB))(k1)#04: k1=seg0tab |
359 | MIPSX(tlb_miss_common): | | 359 | MIPSX(tlb_miss_common): |
360 | #ifdef _LP64 | | 360 | #ifdef _LP64 |
361 | beqz k1, MIPSX(nopagetable) #05: is there a pagetable? | | 361 | beqz k1, MIPSX(nopagetable) #05: is there a pagetable? |
| @@ -422,30 +422,30 @@ MIPSX(tlb_miss_common): | | | @@ -422,30 +422,30 @@ MIPSX(tlb_miss_common): |
422 | #endif | | 422 | #endif |
423 | _MTC0 k0, MIPS_COP_0_TLB_LO0 #14: lo0 is loaded | | 423 | _MTC0 k0, MIPS_COP_0_TLB_LO0 #14: lo0 is loaded |
424 | _MTC0 k1, MIPS_COP_0_TLB_LO1 #15: lo1 is loaded | | 424 | _MTC0 k1, MIPS_COP_0_TLB_LO1 #15: lo1 is loaded |
425 | sll $0, $0, 3 #16: standard nop (ehb) | | 425 | sll $0, $0, 3 #16: standard nop (ehb) |
426 | #ifdef MIPS3 | | 426 | #ifdef MIPS3 |
427 | nop #17: extra nop for QED5230 | | 427 | nop #17: extra nop for QED5230 |
428 | #endif | | 428 | #endif |
429 | tlbwr #18: write to tlb | | 429 | tlbwr #18: write to tlb |
430 | sll $0, $0, 3 #19: standard nop (ehb) | | 430 | sll $0, $0, 3 #19: standard nop (ehb) |
431 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 431 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
432 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 #1a get tlbinfo lock addr | | 432 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 #1a get tlbinfo lock addr |
433 | INT_S zero, 0(k1) #1b clear lock | | 433 | INT_S zero, 0(k1) #1b clear lock |
434 | #elif (MIPS3 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 434 | #elif (MIPS3 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
435 | lui k1, %hi(CPUVAR(EV_TLBMISSES)) #1a: k1=hi of tlbmisses | | 435 | lui k1, %hi(CPUVAR(EV_USER_TLBMISSES)) #1a: k1=hi of tlbmisses |
436 | REG_L k0, %lo(CPUVAR(EV_TLBMISSES))(k1) #1b | | 436 | REG_L k0, %lo(CPUVAR(EV_USER_TLBMISSES))(k1) #1b |
437 | REG_ADDU k0, 1 #1c | | 437 | REG_ADDU k0, 1 #1c |
438 | REG_S k0, %lo(CPUVAR(EV_TLBMISSES))(k1) #1d | | 438 | REG_S k0, %lo(CPUVAR(EV_USER_TLBMISSES))(k1) #1d |
439 | #endif | | 439 | #endif |
440 | eret #1e: return from exception | | 440 | eret #1e: return from exception |
441 | .set at | | 441 | .set at |
442 | _VECTOR_END(MIPSX(tlb_miss)) | | 442 | _VECTOR_END(MIPSX(tlb_miss)) |
443 | | | 443 | |
444 | #if defined(USE_64BIT_CP0_FUNCTIONS) | | 444 | #if defined(USE_64BIT_CP0_FUNCTIONS) |
445 | /* | | 445 | /* |
446 | * mipsN_xtlb_miss routine | | 446 | * mipsN_xtlb_miss routine |
447 | * | | 447 | * |
448 | * Vector code for the XTLB-miss exception vector 0x80000080 on an r4000. | | 448 | * Vector code for the XTLB-miss exception vector 0x80000080 on an r4000. |
449 | * | | 449 | * |
450 | * This code is copied to the XTLB exception vector address to | | 450 | * This code is copied to the XTLB exception vector address to |
451 | * handle TLB translation misses while in 64-bit mode. | | 451 | * handle TLB translation misses while in 64-bit mode. |
| @@ -556,38 +556,48 @@ VECTOR(MIPSX(exception), unknown) | | | @@ -556,38 +556,48 @@ VECTOR(MIPSX(exception), unknown) |
556 | nop #0d | | 556 | nop #0d |
557 | nop #0e | | 557 | nop #0e |
558 | #ifndef _LP64 | | 558 | #ifndef _LP64 |
559 | nop #0f | | 559 | nop #0f |
560 | #endif | | 560 | #endif |
561 | .p2align 4 | | 561 | .p2align 4 |
562 | MIPSX(kernelfault): | | 562 | MIPSX(kernelfault): |
563 | j _C_LABEL(MIPSX(kern_tlb_miss)) #10: kernel exception | | 563 | j _C_LABEL(MIPSX(kern_tlb_miss)) #10: kernel exception |
564 | nop #11: branch delay slot | | 564 | nop #11: branch delay slot |
565 | nop | | 565 | nop |
566 | nop | | 566 | nop |
567 | MIPSX(nopagetable): | | 567 | MIPSX(nopagetable): |
568 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 568 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
569 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 #14: get tlbinfo lock addr | | 569 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 #14: get tlbinfo hwlock addr |
570 | INT_S zero, 0(k1) #15: clear lock | | 570 | INT_S zero, 0(k1) #15: clear lock |
571 | #endif | | 571 | #endif |
572 | lui k1, %hi(CPUVAR(CURLWP)) #16: k1=hi of curlwp | | 572 | lui k1, %hi(CPUVAR(CURLWP)) #16: k1=hi of curlwp |
573 | j MIPSX(slowfault) #17: no page table present | | 573 | j MIPSX(slowfault) #17: no page table present |
574 | PTR_L k1, %lo(CPUVAR(CURLWP))(k1) #18: k1=lo of curlwp | | 574 | PTR_L k1, %lo(CPUVAR(CURLWP))(k1) #18: k1=lo of curlwp |
575 | nop #19: branch delay slot | | 575 | nop #19: branch delay slot |
576 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 576 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 577 | /* |
| | | 578 | * If the TLB was locked, then it must have been locked by another thread |
| | | 579 | * context. If so, that thread is updating the TLB and may be updated the |
| | | 580 | * address we are concerned with. So the best thing we can do is just return |
| | | 581 | * from the exception and hope the other thread has fixed the reason for this |
| | | 582 | * exception. If not, another exception will be raised and hopefully then |
| | | 583 | * we'll get the TLB hwlock. |
| | | 584 | */ |
577 | MIPSX(tlblocked): | | 585 | MIPSX(tlblocked): |
578 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 0 #1a: k1=hi of curlwp | | 586 | lui k1, %hi(CPUVAR(EV_TLBLOCKED)) #1a: k1=hi of tlbmisses |
579 | j MIPSX(slowfault) #1b: no page table present | | 587 | REG_L k0, %lo(CPUVAR(EV_TLBLOCKED))(k1) #1b |
580 | PTR_L k1, CPU_INFO_CURLWP(k1) #1c: k1=lo of curlwp | | 588 | REG_ADDU k0, 1 #1c |
| | | 589 | REG_S k0, %lo(CPUVAR(EV_TLBLOCKED))(k1) #1d |
| | | 590 | eret #1e |
581 | #endif | | 591 | #endif |
582 | .set at | | 592 | .set at |
583 | _VECTOR_END(MIPSX(exception)) | | 593 | _VECTOR_END(MIPSX(exception)) |
584 | | | 594 | |
585 | /* | | 595 | /* |
586 | * Handle MIPS32/MIPS64 style interrupt exception vector. | | 596 | * Handle MIPS32/MIPS64 style interrupt exception vector. |
587 | */ | | 597 | */ |
588 | VECTOR(MIPSX(intr), unknown) | | 598 | VECTOR(MIPSX(intr), unknown) |
589 | .set noat | | 599 | .set noat |
590 | mfc0 k1, MIPS_COP_0_STATUS #00: get the status register | | 600 | mfc0 k1, MIPS_COP_0_STATUS #00: get the status register |
591 | nop #01: stall | | 601 | nop #01: stall |
592 | and k1, k1, MIPS3_SR_KSU_USER #02: test for user mode | | 602 | and k1, k1, MIPS3_SR_KSU_USER #02: test for user mode |
593 | PTR_LA k0, MIPSX(user_intr) #03: assume user mode | | 603 | PTR_LA k0, MIPSX(user_intr) #03: assume user mode |
| @@ -1640,67 +1650,73 @@ LEAF_NOPROFILE(MIPSX(kern_tlb_miss)) | | | @@ -1640,67 +1650,73 @@ LEAF_NOPROFILE(MIPSX(kern_tlb_miss)) |
1640 | _SRL k1, k1, WIRED_SHIFT | | 1650 | _SRL k1, k1, WIRED_SHIFT |
1641 | #endif | | 1651 | #endif |
1642 | #else | | 1652 | #else |
1643 | INT_ADDU k1, k0, MIPS3_PG_NEXT # point to next page | | 1653 | INT_ADDU k1, k0, MIPS3_PG_NEXT # point to next page |
1644 | #endif /* PGSHIFT & 1) == 0 */ | | 1654 | #endif /* PGSHIFT & 1) == 0 */ |
1645 | _MTC0 k1, MIPS_COP_0_TLB_LO1 # load PTE entry | | 1655 | _MTC0 k1, MIPS_COP_0_TLB_LO1 # load PTE entry |
1646 | COP0_SYNC | | 1656 | COP0_SYNC |
1647 | tlbwr # write random TLB | | 1657 | tlbwr # write random TLB |
1648 | COP0_SYNC | | 1658 | COP0_SYNC |
1649 | #ifdef MIPS3 | | 1659 | #ifdef MIPS3 |
1650 | nop | | 1660 | nop |
1651 | nop | | 1661 | nop |
1652 | #endif | | 1662 | #endif |
| | | 1663 | #if (MIPS3 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 1664 | lui k1, %hi(CPUVAR(EV_KERN_TLBMISSES)) |
| | | 1665 | REG_L k0, %lo(CPUVAR(EV_KERN_TLBMISSES))(k1) |
| | | 1666 | REG_ADDU k0, 1 |
| | | 1667 | REG_S k0, %lo(CPUVAR(EV_KERN_TLBMISSES))(k1) |
| | | 1668 | #endif |
1653 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1669 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1654 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlbinfo lock addr | | 1670 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlbinfo hwlock addr |
1655 | INT_S zero, 0(k1) # clear lock | | 1671 | INT_S zero, 0(k1) # clear lock |
1656 | #endif | | 1672 | #endif |
1657 | eret | | 1673 | eret |
1658 | .set at | | 1674 | .set at |
1659 | END(MIPSX(kern_tlb_miss)) | | 1675 | END(MIPSX(kern_tlb_miss)) |
1660 | | | 1676 | |
1661 | /*---------------------------------------------------------------------------- | | 1677 | /*---------------------------------------------------------------------------- |
1662 | * | | 1678 | * |
1663 | * mipsN_tlb_invalid_exception -- | | 1679 | * mipsN_kern_tlb_invalid_exception -- |
1664 | * | | 1680 | * |
1665 | * Handle a TLB invalid exception from kernel mode in kernel space. | | 1681 | * Handle a TLB invalid exception from kernel mode in kernel space. |
1666 | * The BaddVAddr, Context, and EntryHi registers contain the failed | | 1682 | * The BaddVAddr, Context, and EntryHi registers contain the failed |
1667 | * virtual address. | | 1683 | * virtual address. |
1668 | * | | 1684 | * |
1669 | * The case of wired TLB entries is special. The wired TLB entries | | 1685 | * The case of wired TLB entries is special. The wired TLB entries |
1670 | * are used to keep the u area TLB's valid. The PTE entries for these | | 1686 | * are used to keep the u area TLB's valid. The PTE entries for these |
1671 | * do not have MIPS3_PG_G set; the kernel instead relies | | 1687 | * do not have MIPS3_PG_G set; the kernel instead relies |
1672 | * on the switch_resume function to set these bits. | | 1688 | * on the switch_resume function to set these bits. |
1673 | * | | 1689 | * |
1674 | * To preserve this situation, we set PG_G bits on the "other" TLB entries | | 1690 | * To preserve this situation, we set PG_G bits on the "other" TLB entries |
1675 | * when they are wired. | | 1691 | * when they are wired. |
1676 | * | | 1692 | * |
1677 | * Results: | | 1693 | * Results: |
1678 | * None. | | 1694 | * None. |
1679 | * | | 1695 | * |
1680 | * Side effects: | | 1696 | * Side effects: |
1681 | * None. | | 1697 | * None. |
1682 | * | | 1698 | * |
1683 | *---------------------------------------------------------------------------- | | 1699 | *---------------------------------------------------------------------------- |
1684 | */ | | 1700 | */ |
1685 | LEAF_NOPROFILE(MIPSX(tlb_invalid_exception)) | | 1701 | LEAF_NOPROFILE(MIPSX(kern_tlb_invalid_exception)) |
1686 | .set noat | | 1702 | .set noat |
1687 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1703 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1688 | #define TLB_INVALID_EXCEPTION_EXIT _C_LABEL(MIPSX(tlbunlock_kern_gen_exception)) | | 1704 | #define TLB_INVALID_EXCEPTION_EXIT _C_LABEL(MIPSX(tlbunlock_kern_gen_exception)) |
1689 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr | | 1705 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr |
1690 | li k0, __SIMPLELOCK_LOCKED | | 1706 | li k0, __SIMPLELOCK_LOCKED |
1691 | 1: swapw k0, k1 # set it to locked | | 1707 | swapw k0, k1 # set it to locked |
1692 | bnez k0, 1b # was it locked? | | 1708 | bnez k0, 99f # was it locked? |
1693 | nop # if it was, try again | | 1709 | nop # if it was, do an eret |
1694 | #else | | 1710 | #else |
1695 | #define TLB_INVALID_EXCEPTION_EXIT _C_LABEL(MIPSX(kern_gen_exception)) | | 1711 | #define TLB_INVALID_EXCEPTION_EXIT _C_LABEL(MIPSX(kern_gen_exception)) |
1696 | #endif | | 1712 | #endif |
1697 | _MFC0 k0, MIPS_COP_0_BAD_VADDR # get the fault address | | 1713 | _MFC0 k0, MIPS_COP_0_BAD_VADDR # get the fault address |
1698 | #if VM_MIN_KERNEL_ADDRESS == MIPS_KSEG2_START | | 1714 | #if VM_MIN_KERNEL_ADDRESS == MIPS_KSEG2_START |
1699 | li k1, VM_MIN_KERNEL_ADDRESS # compute index | | 1715 | li k1, VM_MIN_KERNEL_ADDRESS # compute index |
1700 | #else | | 1716 | #else |
1701 | li k1, VM_MIN_KERNEL_ADDRESS>>32 # compute index | | 1717 | li k1, VM_MIN_KERNEL_ADDRESS>>32 # compute index |
1702 | dsll32 k1, k1, 0 | | 1718 | dsll32 k1, k1, 0 |
1703 | #endif | | 1719 | #endif |
1704 | bgez k0, TLB_INVALID_EXCEPTION_EXIT # full trap processing | | 1720 | bgez k0, TLB_INVALID_EXCEPTION_EXIT # full trap processing |
1705 | nop # - delay slot - | | 1721 | nop # - delay slot - |
1706 | PTR_SUBU k0, k1 | | 1722 | PTR_SUBU k0, k1 |
| @@ -1764,26 +1780,27 @@ LEAF_NOPROFILE(MIPSX(tlb_invalid_excepti | | | @@ -1764,26 +1780,27 @@ LEAF_NOPROFILE(MIPSX(tlb_invalid_excepti |
1764 | or k1, k1, k0 | | 1780 | or k1, k1, k0 |
1765 | _MTC0 k0, MIPS_COP_0_TLB_LO1 # load PTE entry | | 1781 | _MTC0 k0, MIPS_COP_0_TLB_LO1 # load PTE entry |
1766 | COP0_SYNC | | 1782 | COP0_SYNC |
1767 | #endif /* (PGSHIFT & 1) == 0 */ | | 1783 | #endif /* (PGSHIFT & 1) == 0 */ |
1768 | tlbwi # write TLB | | 1784 | tlbwi # write TLB |
1769 | COP0_SYNC | | 1785 | COP0_SYNC |
1770 | #ifdef MIPS3 | | 1786 | #ifdef MIPS3 |
1771 | nop | | 1787 | nop |
1772 | nop | | 1788 | nop |
1773 | #endif | | 1789 | #endif |
1774 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1790 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1775 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr | | 1791 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr |
1776 | INT_S zero, 0(k1) # clear lock | | 1792 | INT_S zero, 0(k1) # clear lock |
| | | 1793 | 99: |
1777 | #endif | | 1794 | #endif |
1778 | eret | | 1795 | eret |
1779 | | | 1796 | |
1780 | #if (PGSHIFT & 1) == 0 | | 1797 | #if (PGSHIFT & 1) == 0 |
1781 | MIPSX(kern_tlbi_odd): | | 1798 | MIPSX(kern_tlbi_odd): |
1782 | INT_L k0, 0(k1) # get PTE entry | | 1799 | INT_L k0, 0(k1) # get PTE entry |
1783 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 | | 1800 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 |
1784 | _EXT k0, k0, 0, WIRED_POS | | 1801 | _EXT k0, k0, 0, WIRED_POS |
1785 | #else | | 1802 | #else |
1786 | _SLL k0, k0, WIRED_SHIFT # get rid of wired bit | | 1803 | _SLL k0, k0, WIRED_SHIFT # get rid of wired bit |
1787 | _SRL k0, k0, WIRED_SHIFT | | 1804 | _SRL k0, k0, WIRED_SHIFT |
1788 | #endif | | 1805 | #endif |
1789 | _MTC0 k0, MIPS_COP_0_TLB_LO1 # save PTE entry | | 1806 | _MTC0 k0, MIPS_COP_0_TLB_LO1 # save PTE entry |
| @@ -1816,32 +1833,32 @@ MIPSX(kern_tlbi_odd): | | | @@ -1816,32 +1833,32 @@ MIPSX(kern_tlbi_odd): |
1816 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1833 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1817 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr | | 1834 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr |
1818 | INT_S zero, 0(k1) # clear lock | | 1835 | INT_S zero, 0(k1) # clear lock |
1819 | #endif | | 1836 | #endif |
1820 | eret | | 1837 | eret |
1821 | #endif /* (PGSHIFT & 1) == 0 */ | | 1838 | #endif /* (PGSHIFT & 1) == 0 */ |
1822 | | | 1839 | |
1823 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 1840 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
1824 | /* | | 1841 | /* |
1825 | * Before entering kern_gen_exception we need to clear the tlb lock that | | 1842 | * Before entering kern_gen_exception we need to clear the tlb lock that |
1826 | * we locked. | | 1843 | * we locked. |
1827 | */ | | 1844 | */ |
1828 | MIPSX(tlbunlock_kern_gen_exception): | | 1845 | MIPSX(tlbunlock_kern_gen_exception): |
1829 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlblock addr | | 1846 | _MFC0 k1, MIPS_COP_0_OSSCRATCH, 2 # get tlb hwlock addr |
1830 | b _C_LABEL(MIPSX(kern_gen_exception)) | | 1847 | b _C_LABEL(MIPSX(kern_gen_exception)) |
1831 | INT_S zero, 0(k1) # clear lock | | 1848 | INT_S zero, 0(k1) # clear lock |
1832 | #endif | | 1849 | #endif |
1833 | | | 1850 | |
1834 | END(MIPSX(tlb_invalid_exception)) | | 1851 | END(MIPSX(kern_tlb_invalid_exception)) |
1835 | | | 1852 | |
1836 | /* | | 1853 | /* |
1837 | * Mark where code entered from exception hander jumptable | | 1854 | * Mark where code entered from exception hander jumptable |
1838 | * ends, for stack traceback code. | | 1855 | * ends, for stack traceback code. |
1839 | */ | | 1856 | */ |
1840 | | | 1857 | |
1841 | .globl _C_LABEL(MIPSX(exceptionentry_end)) | | 1858 | .globl _C_LABEL(MIPSX(exceptionentry_end)) |
1842 | _C_LABEL(MIPSX(exceptionentry_end)): | | 1859 | _C_LABEL(MIPSX(exceptionentry_end)): |
1843 | | | 1860 | |
1844 | /*-------------------------------------------------------------------------- | | 1861 | /*-------------------------------------------------------------------------- |
1845 | * | | 1862 | * |
1846 | * mipsN_tlb_set_asid -- | | 1863 | * mipsN_tlb_set_asid -- |
1847 | * | | 1864 | * |
| @@ -1872,26 +1889,33 @@ END(MIPSX(tlb_set_asid)) | | | @@ -1872,26 +1889,33 @@ END(MIPSX(tlb_set_asid)) |
1872 | * | | 1889 | * |
1873 | * Results: | | 1890 | * Results: |
1874 | * < 0 if skipped, >= 0 if updated. | | 1891 | * < 0 if skipped, >= 0 if updated. |
1875 | * | | 1892 | * |
1876 | * Side effects: | | 1893 | * Side effects: |
1877 | * None. | | 1894 | * None. |
1878 | * | | 1895 | * |
1879 | *-------------------------------------------------------------------------- | | 1896 | *-------------------------------------------------------------------------- |
1880 | */ | | 1897 | */ |
1881 | LEAF(MIPSX(tlb_update_addr)) | | 1898 | LEAF(MIPSX(tlb_update_addr)) |
1882 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. | | 1899 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. |
1883 | mtc0 zero, MIPS_COP_0_STATUS # Disable interrupts | | 1900 | mtc0 zero, MIPS_COP_0_STATUS # Disable interrupts |
1884 | COP0_SYNC | | 1901 | COP0_SYNC |
| | | 1902 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 1903 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 1904 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 1905 | swapw v0, ta3 |
| | | 1906 | bnez v0, 1b |
| | | 1907 | nop |
| | | 1908 | #endif |
1885 | #if (PGSHIFT & 1) == 0 | | 1909 | #if (PGSHIFT & 1) == 0 |
1886 | and t1, a0, MIPS3_PG_ODDPG # t1 = Even/Odd flag | | 1910 | and t1, a0, MIPS3_PG_ODDPG # t1 = Even/Odd flag |
1887 | #endif | | 1911 | #endif |
1888 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) | | 1912 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) |
1889 | and a0, a0, v0 | | 1913 | and a0, a0, v0 |
1890 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save current PID | | 1914 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save current PID |
1891 | _MTC0 a0, MIPS_COP_0_TLB_HI # Init high reg | | 1915 | _MTC0 a0, MIPS_COP_0_TLB_HI # Init high reg |
1892 | COP0_SYNC | | 1916 | COP0_SYNC |
1893 | and a2, a1, MIPS3_PG_G # Copy global bit | | 1917 | and a2, a1, MIPS3_PG_G # Copy global bit |
1894 | tlbp # Probe for the entry. | | 1918 | tlbp # Probe for the entry. |
1895 | COP0_SYNC | | 1919 | COP0_SYNC |
1896 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 | | 1920 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 |
1897 | _EXT a1, a1, 0, WIRED_POS | | 1921 | _EXT a1, a1, 0, WIRED_POS |
| @@ -1936,87 +1960,107 @@ LEAF(MIPSX(tlb_update_addr)) | | | @@ -1936,87 +1960,107 @@ LEAF(MIPSX(tlb_update_addr)) |
1936 | COP0_SYNC | | 1960 | COP0_SYNC |
1937 | tlbwi # update slot found | | 1961 | tlbwi # update slot found |
1938 | COP0_SYNC | | 1962 | COP0_SYNC |
1939 | #endif /* (PGSHIFT & 1) == 0 */ | | 1963 | #endif /* (PGSHIFT & 1) == 0 */ |
1940 | 4: | | 1964 | 4: |
1941 | #ifdef MIPS3 | | 1965 | #ifdef MIPS3 |
1942 | nop # Make sure pipeline | | 1966 | nop # Make sure pipeline |
1943 | nop # advances before we | | 1967 | nop # advances before we |
1944 | nop # use the TLB. | | 1968 | nop # use the TLB. |
1945 | nop | | 1969 | nop |
1946 | #endif | | 1970 | #endif |
1947 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID | | 1971 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID |
1948 | COP0_SYNC | | 1972 | COP0_SYNC |
| | | 1973 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 1974 | INT_S zero, 0(ta3) |
| | | 1975 | #endif |
1949 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register | | 1976 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register |
1950 | JR_HB_RA | | 1977 | JR_HB_RA |
1951 | END(MIPSX(tlb_update_addr)) | | 1978 | END(MIPSX(tlb_update_addr)) |
1952 | | | 1979 | |
1953 | /*-------------------------------------------------------------------------- | | 1980 | /*-------------------------------------------------------------------------- |
1954 | * | | 1981 | * |
1955 | * mipsN_tlb_read_indexed -- | | 1982 | * mipsN_tlb_read_indexed -- |
1956 | * | | 1983 | * |
1957 | * Read the TLB entry. | | 1984 | * Read the TLB entry. |
1958 | * | | 1985 | * |
1959 | * void mipsN_tlb_read_indexed(size_t tlb_index, struct tlbmask *tlb); | | 1986 | * void mipsN_tlb_read_indexed(size_t tlb_index, struct tlbmask *tlb); |
1960 | * | | 1987 | * |
1961 | * Results: | | 1988 | * Results: |
1962 | * None. | | 1989 | * None. |
1963 | * | | 1990 | * |
1964 | * Side effects: | | 1991 | * Side effects: |
1965 | * tlb will contain the TLB entry found. | | 1992 | * tlb will contain the TLB entry found. |
1966 | * | | 1993 | * |
1967 | *-------------------------------------------------------------------------- | | 1994 | *-------------------------------------------------------------------------- |
1968 | */ | | 1995 | */ |
1969 | LEAF(MIPSX(tlb_read_indexed)) | | 1996 | LEAF(MIPSX(tlb_read_indexed)) |
1970 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. | | 1997 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. |
1971 | mtc0 zero, MIPS_COP_0_STATUS # Disable interrupts | | 1998 | mtc0 zero, MIPS_COP_0_STATUS # Disable interrupts |
1972 | COP0_SYNC | | 1999 | COP0_SYNC |
| | | 2000 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2001 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2002 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2003 | swapw v0, ta3 |
| | | 2004 | bnez v0, 1b |
| | | 2005 | nop |
| | | 2006 | #endif |
1973 | mfc0 ta2, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2007 | mfc0 ta2, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
1974 | #ifdef MIPS3 | | 2008 | #ifdef MIPS3 |
1975 | nop | | 2009 | nop |
1976 | #endif | | 2010 | #endif |
1977 | _MFC0 t0, MIPS_COP_0_TLB_HI # Get current PID | | 2011 | _MFC0 t0, MIPS_COP_0_TLB_HI # Get current PID |
1978 | | | 2012 | |
1979 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index register | | 2013 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index register |
1980 | COP0_SYNC | | 2014 | COP0_SYNC |
1981 | tlbr # Read from the TLB | | 2015 | tlbr # Read from the TLB |
1982 | COP0_SYNC | | 2016 | COP0_SYNC |
1983 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # fetch the pgMask | | 2017 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # fetch the pgMask |
1984 | _MFC0 t3, MIPS_COP_0_TLB_HI # fetch the hi entry | | 2018 | _MFC0 t3, MIPS_COP_0_TLB_HI # fetch the hi entry |
1985 | _MFC0 ta0, MIPS_COP_0_TLB_LO0 # See what we got | | 2019 | _MFC0 ta0, MIPS_COP_0_TLB_LO0 # See what we got |
1986 | _MFC0 ta1, MIPS_COP_0_TLB_LO1 # See what we got | | 2020 | _MFC0 ta1, MIPS_COP_0_TLB_LO1 # See what we got |
1987 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID | | 2021 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID |
1988 | mtc0 ta2, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2022 | mtc0 ta2, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
1989 | COP0_SYNC | | 2023 | COP0_SYNC |
| | | 2024 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2025 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2026 | #endif |
1990 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register | | 2027 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register |
1991 | COP0_SYNC | | 2028 | COP0_SYNC |
1992 | PTR_S t3, TLBMASK_HI(a1) | | 2029 | PTR_S t3, TLBMASK_HI(a1) |
1993 | INT_S ta0, TLBMASK_LO0(a1) | | 2030 | INT_S ta0, TLBMASK_LO0(a1) |
1994 | INT_S ta1, TLBMASK_LO1(a1) | | 2031 | INT_S ta1, TLBMASK_LO1(a1) |
1995 | j ra | | 2032 | j ra |
1996 | INT_S t2, TLBMASK_MASK(a1) | | 2033 | INT_S t2, TLBMASK_MASK(a1) |
1997 | END(MIPSX(tlb_read_indexed)) | | 2034 | END(MIPSX(tlb_read_indexed)) |
1998 | | | 2035 | |
1999 | /*-------------------------------------------------------------------------- | | 2036 | /*-------------------------------------------------------------------------- |
2000 | * | | 2037 | * |
2001 | * void mipsN_tlb_invalidate_addr(vaddr_t va) | | 2038 | * void mipsN_tlb_invalidate_addr(vaddr_t va) |
2002 | * | | 2039 | * |
2003 | * Invalidate a TLB entry which has the given vaddr and ASID if found. | | 2040 | * Invalidate a TLB entry which has the given vaddr and ASID if found. |
2004 | *-------------------------------------------------------------------------- | | 2041 | *-------------------------------------------------------------------------- |
2005 | */ | | 2042 | */ |
2006 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_addr)) | | 2043 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_addr)) |
2007 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2044 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2008 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2045 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2009 | COP0_SYNC | | 2046 | COP0_SYNC |
| | | 2047 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2048 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2049 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2050 | swapw v0, ta3 |
| | | 2051 | bnez v0, 1b |
| | | 2052 | nop |
| | | 2053 | #endif |
2010 | | | 2054 | |
2011 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) | | 2055 | li v0, (MIPS3_PG_HVPN | MIPS3_PG_ASID) |
2012 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID | | 2056 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID |
2013 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2057 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
2014 | and a0, v0 # make sure valid entryHi | | 2058 | and a0, v0 # make sure valid entryHi |
2015 | _MTC0 a0, MIPS_COP_0_TLB_HI # look for the vaddr & ASID | | 2059 | _MTC0 a0, MIPS_COP_0_TLB_HI # look for the vaddr & ASID |
2016 | COP0_SYNC | | 2060 | COP0_SYNC |
2017 | tlbp # probe the entry in question | | 2061 | tlbp # probe the entry in question |
2018 | COP0_SYNC | | 2062 | COP0_SYNC |
2019 | mfc0 v0, MIPS_COP_0_TLB_INDEX # see what we got | | 2063 | mfc0 v0, MIPS_COP_0_TLB_INDEX # see what we got |
2020 | bltz v0, 1f # index < 0 then skip | | 2064 | bltz v0, 1f # index < 0 then skip |
2021 | li t1, MIPS_KSEG0_START # invalid address | | 2065 | li t1, MIPS_KSEG0_START # invalid address |
2022 | PTR_SLL v0, (PGSHIFT | 1) # PAGE_SHIFT | 1 | | 2066 | PTR_SLL v0, (PGSHIFT | 1) # PAGE_SHIFT | 1 |
| @@ -2025,41 +2069,51 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_addr | | | @@ -2025,41 +2069,51 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_addr |
2025 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 | | 2069 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 |
2026 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2070 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2027 | #if 0 | | 2071 | #if 0 |
2028 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask | | 2072 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask |
2029 | #endif | | 2073 | #endif |
2030 | COP0_SYNC | | 2074 | COP0_SYNC |
2031 | | | 2075 | |
2032 | tlbwi | | 2076 | tlbwi |
2033 | COP0_SYNC | | 2077 | COP0_SYNC |
2034 | 1: | | 2078 | 1: |
2035 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID | | 2079 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID |
2036 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2080 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
2037 | COP0_SYNC | | 2081 | COP0_SYNC |
| | | 2082 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2083 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2084 | #endif |
2038 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2085 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2039 | JR_HB_RA | | 2086 | JR_HB_RA |
2040 | END(MIPSX(tlb_invalidate_addr)) | | 2087 | END(MIPSX(tlb_invalidate_addr)) |
2041 | | | 2088 | |
2042 | /* | | 2089 | /* |
2043 | * void mipsN_tlb_invalidate_asids(uint32_t base, uint32_t limit); | | 2090 | * void mipsN_tlb_invalidate_asids(uint32_t base, uint32_t limit); |
2044 | * | | 2091 | * |
2045 | * Invalidate TLB entries belong to per process user spaces with | | 2092 | * Invalidate TLB entries belong to per process user spaces with |
2046 | * base <= ASIDs < limit while leaving entries for kernel space | | 2093 | * base <= ASIDs < limit while leaving entries for kernel space |
2047 | * marked global intact. | | 2094 | * marked global intact. |
2048 | */ | | 2095 | */ |
2049 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_asids)) | | 2096 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_asids)) |
2050 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2097 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2051 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2098 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2052 | COP0_SYNC | | 2099 | COP0_SYNC |
| | | 2100 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2101 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2102 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2103 | swapw v0, ta3 |
| | | 2104 | bnez v0, 1b |
| | | 2105 | nop |
| | | 2106 | #endif |
2053 | | | 2107 | |
2054 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save the current PID. | | 2108 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save the current PID. |
2055 | mfc0 t1, MIPS_COP_0_TLB_WIRED | | 2109 | mfc0 t1, MIPS_COP_0_TLB_WIRED |
2056 | li v0, MIPS_KSEG0_START # invalid address | | 2110 | li v0, MIPS_KSEG0_START # invalid address |
2057 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2111 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2058 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2112 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
2059 | | | 2113 | |
2060 | # do {} while (t1 < t2) | | 2114 | # do {} while (t1 < t2) |
2061 | 1: | | 2115 | 1: |
2062 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index | | 2116 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index |
2063 | COP0_SYNC | | 2117 | COP0_SYNC |
2064 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 | | 2118 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 |
2065 | tlbr # obtain an entry | | 2119 | tlbr # obtain an entry |
| @@ -2083,40 +2137,50 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_asid | | | @@ -2083,40 +2137,50 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_asid |
2083 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2137 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2084 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry | | 2138 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry |
2085 | COP0_SYNC | | 2139 | COP0_SYNC |
2086 | tlbwi # invalidate the TLB entry | | 2140 | tlbwi # invalidate the TLB entry |
2087 | COP0_SYNC | | 2141 | COP0_SYNC |
2088 | 2: | | 2142 | 2: |
2089 | addu t1, 1 | | 2143 | addu t1, 1 |
2090 | bne t1, t2, 1b | | 2144 | bne t1, t2, 1b |
2091 | nop | | 2145 | nop |
2092 | | | 2146 | |
2093 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID. | | 2147 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore PID. |
2094 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2148 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
2095 | COP0_SYNC | | 2149 | COP0_SYNC |
| | | 2150 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2151 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2152 | #endif |
2096 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2153 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2097 | JR_HB_RA # new ASID will be set soon | | 2154 | JR_HB_RA # new ASID will be set soon |
2098 | END(MIPSX(tlb_invalidate_asids)) | | 2155 | END(MIPSX(tlb_invalidate_asids)) |
2099 | | | 2156 | |
2100 | /* | | 2157 | /* |
2101 | * void mipsN_tlb_invalidate_globals(void); | | 2158 | * void mipsN_tlb_invalidate_globals(void); |
2102 | * | | 2159 | * |
2103 | * Invalidate the non-wired TLB entries belonging to kernel space while | | 2160 | * Invalidate the non-wired TLB entries belonging to kernel space while |
2104 | * leaving entries for user space (not marked global) intact. | | 2161 | * leaving entries for user space (not marked global) intact. |
2105 | */ | | 2162 | */ |
2106 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_globals)) | | 2163 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_globals)) |
2107 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2164 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2108 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2165 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2109 | COP0_SYNC | | 2166 | COP0_SYNC |
| | | 2167 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2168 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2169 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2170 | swapw v0, ta3 |
| | | 2171 | bnez v0, 1b |
| | | 2172 | nop |
| | | 2173 | #endif |
2110 | | | 2174 | |
2111 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID | | 2175 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID |
2112 | mfc0 t1, MIPS_COP_0_TLB_WIRED | | 2176 | mfc0 t1, MIPS_COP_0_TLB_WIRED |
2113 | li v0, MIPS_KSEG0_START # invalid address | | 2177 | li v0, MIPS_KSEG0_START # invalid address |
2114 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2178 | INT_L t2, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2115 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2179 | mfc0 t3, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
2116 | | | 2180 | |
2117 | # do {} while (t1 < t2) | | 2181 | # do {} while (t1 < t2) |
2118 | 1: | | 2182 | 1: |
2119 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index | | 2183 | mtc0 t1, MIPS_COP_0_TLB_INDEX # set index |
2120 | COP0_SYNC | | 2184 | COP0_SYNC |
2121 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 | | 2185 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 |
2122 | tlbr # obtain an entry | | 2186 | tlbr # obtain an entry |
| @@ -2132,39 +2196,49 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_glob | | | @@ -2132,39 +2196,49 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_glob |
2132 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2196 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2133 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry | | 2197 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out mask entry |
2134 | COP0_SYNC | | 2198 | COP0_SYNC |
2135 | tlbwi # invalidate the TLB entry | | 2199 | tlbwi # invalidate the TLB entry |
2136 | COP0_SYNC | | 2200 | COP0_SYNC |
2137 | 2: | | 2201 | 2: |
2138 | addu t1, 1 | | 2202 | addu t1, 1 |
2139 | bne t1, t2, 1b | | 2203 | bne t1, t2, 1b |
2140 | nop | | 2204 | nop |
2141 | | | 2205 | |
2142 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID | | 2206 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore current ASID |
2143 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2207 | mtc0 t3, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
2144 | COP0_SYNC | | 2208 | COP0_SYNC |
| | | 2209 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2210 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2211 | #endif |
2145 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2212 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2146 | JR_HB_RA | | 2213 | JR_HB_RA |
2147 | END(MIPSX(tlb_invalidate_globals)) | | 2214 | END(MIPSX(tlb_invalidate_globals)) |
2148 | | | 2215 | |
2149 | /* | | 2216 | /* |
2150 | * void mipsN_tlb_invalidate_all(void); | | 2217 | * void mipsN_tlb_invalidate_all(void); |
2151 | * | | 2218 | * |
2152 | * Invalidate all of non-wired TLB entries. | | 2219 | * Invalidate all of non-wired TLB entries. |
2153 | */ | | 2220 | */ |
2154 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_all)) | | 2221 | LEAF_NOPROFILE(MIPSX(tlb_invalidate_all)) |
2155 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2222 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2156 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2223 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2157 | COP0_SYNC | | 2224 | COP0_SYNC |
| | | 2225 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2226 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2227 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2228 | swapw v0, ta3 |
| | | 2229 | bnez v0, 1b |
| | | 2230 | nop |
| | | 2231 | #endif |
2158 | | | 2232 | |
2159 | INT_L a0, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2233 | INT_L a0, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2160 | | | 2234 | |
2161 | li v0, MIPS_KSEG0_START # invalid address | | 2235 | li v0, MIPS_KSEG0_START # invalid address |
2162 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID | | 2236 | _MFC0 t0, MIPS_COP_0_TLB_HI # save current ASID |
2163 | mfc0 t1, MIPS_COP_0_TLB_WIRED | | 2237 | mfc0 t1, MIPS_COP_0_TLB_WIRED |
2164 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # save current pgMask | | 2238 | mfc0 t2, MIPS_COP_0_TLB_PG_MASK # save current pgMask |
2165 | | | 2239 | |
2166 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 | | 2240 | _MTC0 zero, MIPS_COP_0_TLB_LO0 # zero out entryLo0 |
2167 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 | | 2241 | _MTC0 zero, MIPS_COP_0_TLB_LO1 # zero out entryLo1 |
2168 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask | | 2242 | mtc0 zero, MIPS_COP_0_TLB_PG_MASK # zero out pageMask |
2169 | | | 2243 | |
2170 | # do {} while (t1 < a0) | | 2244 | # do {} while (t1 < a0) |
| @@ -2174,109 +2248,132 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_all) | | | @@ -2174,109 +2248,132 @@ LEAF_NOPROFILE(MIPSX(tlb_invalidate_all) |
2174 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 | | 2248 | sll ta0, t1, PGSHIFT | 1 # PAGE_SHIFT | 1 |
2175 | PTR_ADDU ta0, v0 | | 2249 | PTR_ADDU ta0, v0 |
2176 | _MTC0 ta0, MIPS_COP_0_TLB_HI # make entryHi invalid | | 2250 | _MTC0 ta0, MIPS_COP_0_TLB_HI # make entryHi invalid |
2177 | COP0_SYNC | | 2251 | COP0_SYNC |
2178 | tlbwi # clear the entry | | 2252 | tlbwi # clear the entry |
2179 | COP0_SYNC | | 2253 | COP0_SYNC |
2180 | addu t1, 1 # increment index | | 2254 | addu t1, 1 # increment index |
2181 | bne t1, a0, 1b | | 2255 | bne t1, a0, 1b |
2182 | nop | | 2256 | nop |
2183 | | | 2257 | |
2184 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore ASID | | 2258 | _MTC0 t0, MIPS_COP_0_TLB_HI # restore ASID |
2185 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # restore pgMask | | 2259 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # restore pgMask |
2186 | COP0_SYNC | | 2260 | COP0_SYNC |
| | | 2261 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2262 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2263 | #endif |
2187 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2264 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2188 | JR_HB_RA | | 2265 | JR_HB_RA |
2189 | END(MIPSX(tlb_invalidate_all)) | | 2266 | END(MIPSX(tlb_invalidate_all)) |
2190 | | | 2267 | |
2191 | /* | | 2268 | /* |
2192 | * u_int mipsN_tlb_record_asids(u_long *bitmap, uint32_t asid_mask); | | 2269 | * u_int mipsN_tlb_record_asids(u_long *bitmap, uint32_t asid_mask); |
2193 | * | | 2270 | * |
2194 | * Record all the ASIDs in use in the TLB and return the number of different | | 2271 | * Record all the ASIDs in use in the TLB and return the number of different |
2195 | * ASIDs present. | | 2272 | * ASIDs present. |
2196 | */ | | 2273 | */ |
2197 | LEAF_NOPROFILE(MIPSX(tlb_record_asids)) | | 2274 | LEAF_NOPROFILE(MIPSX(tlb_record_asids)) |
2198 | | | 2275 | |
2199 | _MFC0 a3, MIPS_COP_0_TLB_HI # Save the current PID. | | 2276 | _MFC0 a3, MIPS_COP_0_TLB_HI # Save the current PID. |
2200 | mfc0 ta0, MIPS_COP_0_TLB_WIRED | | 2277 | mfc0 ta0, MIPS_COP_0_TLB_WIRED |
2201 | INT_L ta1, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES | | 2278 | INT_L ta1, _C_LABEL(mips_options) + MO_NUM_TLB_ENTRIES |
2202 | move ta2, zero | | 2279 | move ta2, zero |
2203 | li ta3, 1 | | 2280 | li t3, 1 |
2204 | move v0, zero | | | |
2205 | | | 2281 | |
2206 | mfc0 v1, MIPS_COP_0_STATUS # save status register | | 2282 | mfc0 v1, MIPS_COP_0_STATUS # save status register |
2207 | #ifdef _LP64 | | 2283 | #ifdef _LP64 |
2208 | and t0, v1, MIPS_SR_INT_IE | | 2284 | and t0, v1, MIPS_SR_INT_IE |
2209 | xor t0, v1 | | 2285 | xor t0, v1 |
2210 | mtc0 t0, MIPS_COP_0_STATUS # disable interrupts | | 2286 | mtc0 t0, MIPS_COP_0_STATUS # disable interrupts |
2211 | #else | | 2287 | #else |
2212 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts | | 2288 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
2213 | #endif | | 2289 | #endif |
2214 | COP0_SYNC | | 2290 | COP0_SYNC |
| | | 2291 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2292 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2293 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2294 | swapw v0, ta3 |
| | | 2295 | bnez v0, 1b |
| | | 2296 | nop |
| | | 2297 | #else |
| | | 2298 | move v0, zero |
| | | 2299 | #endif |
2215 | | | 2300 | |
2216 | # do {} while (ta0 < ta1) | | 2301 | # do {} while (ta0 < ta1) |
2217 | 1: | | 2302 | 1: |
2218 | mtc0 ta0, MIPS_COP_0_TLB_INDEX # set index | | 2303 | mtc0 ta0, MIPS_COP_0_TLB_INDEX # set index |
2219 | COP0_SYNC | | 2304 | COP0_SYNC |
2220 | tlbr # obtain an entry | | 2305 | tlbr # obtain an entry |
2221 | COP0_SYNC | | 2306 | COP0_SYNC |
2222 | _MFC0 t0, MIPS_COP_0_TLB_LO1 | | 2307 | _MFC0 t0, MIPS_COP_0_TLB_LO1 |
2223 | and t0, MIPS3_PG_G # check to see it has G bit | | 2308 | and t0, MIPS3_PG_G # check to see it has G bit |
2224 | bnez t0, 4f # yep, skip this one. | | 2309 | bnez t0, 4f # yep, skip this one. |
2225 | nop | | 2310 | nop |
2226 | _MFC0 t0, MIPS_COP_0_TLB_HI # get VA and ASID | | 2311 | _MFC0 t0, MIPS_COP_0_TLB_HI # get VA and ASID |
2227 | and t0, a1 # focus on ASID | | 2312 | and t0, a1 # focus on ASID |
2228 | | | 2313 | |
2229 | srl a2, t0, 3 + LONG_SCALESHIFT # drop low 5 or 6 bits | | 2314 | srl a2, t0, 3 + LONG_SCALESHIFT # drop low 5 or 6 bits |
2230 | sll a2, LONG_SCALESHIFT # make an index for the bitmap | | 2315 | sll a2, LONG_SCALESHIFT # make an index for the bitmap |
2231 | _SLLV t0, ta3, t0 # t0 is mask (ta3 == 1) | | 2316 | _SLLV t0, t3, t0 # t0 is mask (t3 == 1) |
2232 | | | 2317 | |
2233 | PTR_ADDU a2, a0 # index into the bitmap | | 2318 | PTR_ADDU a2, a0 # index into the bitmap |
2234 | beq a2, ta2, 3f # is the desired cell loaded? | | 2319 | beq a2, ta2, 3f # is the desired cell loaded? |
2235 | nop # yes, don't reload it | | 2320 | nop # yes, don't reload it |
2236 | beqz ta2, 2f # have we ever loaded it? | | 2321 | beqz ta2, 2f # have we ever loaded it? |
2237 | nop # nope, so don't save it. | | 2322 | nop # nope, so don't save it. |
2238 | | | 2323 | |
2239 | LONG_S t2, 0(ta2) # save the updated value. | | 2324 | LONG_S t2, 0(ta2) # save the updated value. |
2240 | 2: | | 2325 | 2: |
2241 | move ta2, a2 # remember the new cell's addr | | 2326 | move ta2, a2 # remember the new cell's addr |
2242 | LONG_L t2, 0(ta2) # and load it | | 2327 | LONG_L t2, 0(ta2) # and load it |
2243 | 3: | | 2328 | 3: |
2244 | and t1, t2, t0 # t1 = t2 & t0 | | 2329 | and t1, t2, t0 # t1 = t2 & t0 |
2245 | sltu t1, t1, ta3 # t1 = t1 < 1 (aka t1 == 0) | | 2330 | sltu t1, t1, t3 # t1 = t1 < 1 (aka t1 == 0) |
2246 | addu v0, t1 # v0 += t1 | | 2331 | addu v0, t1 # v0 += t1 |
2247 | or t2, t0 # or in the new ASID bits | | 2332 | or t2, t0 # or in the new ASID bits |
2248 | 4: | | 2333 | 4: |
2249 | addu ta0, 1 # increment TLB entry # | | 2334 | addu ta0, 1 # increment TLB entry # |
2250 | bne ta0, ta1, 1b # keep lookup if not limit | | 2335 | bne ta0, ta1, 1b # keep lookup if not limit |
2251 | nop | | 2336 | nop |
2252 | | | 2337 | |
2253 | beqz ta2, 5f # do we have a cell to write? | | 2338 | beqz ta2, 5f # do we have a cell to write? |
2254 | nop # nope. nothing | | 2339 | nop # nope. nothing |
2255 | | | 2340 | |
2256 | LONG_S t2, 0(ta2) # save the updated value. | | 2341 | LONG_S t2, 0(ta2) # save the updated value. |
2257 | 5: | | 2342 | 5: |
2258 | _MTC0 a3, MIPS_COP_0_TLB_HI # restore ASID | | 2343 | _MTC0 a3, MIPS_COP_0_TLB_HI # restore ASID |
2259 | COP0_SYNC | | 2344 | COP0_SYNC |
2260 | | | 2345 | |
| | | 2346 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2347 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2348 | #endif |
2261 | mtc0 v1, MIPS_COP_0_STATUS # restore status register | | 2349 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2262 | JR_HB_RA | | 2350 | JR_HB_RA |
2263 | END(MIPSX(tlb_record_asids)) | | 2351 | END(MIPSX(tlb_record_asids)) |
2264 | | | 2352 | |
2265 | /* | | 2353 | /* |
2266 | * mipsN_tlb_enter(size_t tlb_index, vaddr_t va, uint32_t pte); | | 2354 | * mipsN_tlb_enter(size_t tlb_index, vaddr_t va, uint32_t pte); |
2267 | */ | | 2355 | */ |
2268 | LEAF(MIPSX(tlb_enter)) | | 2356 | LEAF(MIPSX(tlb_enter)) |
2269 | .set noat | | 2357 | .set noat |
| | | 2358 | mfc0 v1, MIPS_COP_0_STATUS # save status |
| | | 2359 | mtc0 zero, MIPS_COP_0_STATUS # disable interupts |
| | | 2360 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2361 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2362 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2363 | swapw v0, ta3 |
| | | 2364 | bnez v0, 1b |
| | | 2365 | nop |
| | | 2366 | #endif |
2270 | _MFC0 ta0, MIPS_COP_0_TLB_HI # save EntryHi | | 2367 | _MFC0 ta0, MIPS_COP_0_TLB_HI # save EntryHi |
2271 | | | 2368 | |
2272 | #if (PGSHIFT & 1) == 0 | | 2369 | #if (PGSHIFT & 1) == 0 |
2273 | and a3, a1, MIPS3_PG_ODDPG # select odd page bit | | 2370 | and a3, a1, MIPS3_PG_ODDPG # select odd page bit |
2274 | xor a3, a1 # clear it. | | 2371 | xor a3, a1 # clear it. |
2275 | #endif | | 2372 | #endif |
2276 | _MTC0 a3, MIPS_COP_0_TLB_HI # set the VA for tlbp | | 2373 | _MTC0 a3, MIPS_COP_0_TLB_HI # set the VA for tlbp |
2277 | COP0_SYNC | | 2374 | COP0_SYNC |
2278 | | | 2375 | |
2279 | #if (PGSHIFT & 1) == 0 | | 2376 | #if (PGSHIFT & 1) == 0 |
2280 | and t2, a2, MIPS3_PG_G # make prototype tlb_lo0 | | 2377 | and t2, a2, MIPS3_PG_G # make prototype tlb_lo0 |
2281 | and t3, a2, MIPS3_PG_G # make prototype tlb_lo1 | | 2378 | and t3, a2, MIPS3_PG_G # make prototype tlb_lo1 |
2282 | #endif | | 2379 | #endif |
| @@ -2296,76 +2393,140 @@ LEAF(MIPSX(tlb_enter)) | | | @@ -2296,76 +2393,140 @@ LEAF(MIPSX(tlb_enter)) |
2296 | mfc0 t3, MIPS_COP_0_TLB_LO1 # save for update | | 2393 | mfc0 t3, MIPS_COP_0_TLB_LO1 # save for update |
2297 | #endif | | 2394 | #endif |
2298 | | | 2395 | |
2299 | /* | | 2396 | /* |
2300 | * If it's already where we want, no reason to invalidate it. | | 2397 | * If it's already where we want, no reason to invalidate it. |
2301 | */ | | 2398 | */ |
2302 | beq v0, a0, 2f # already where we want it? | | 2399 | beq v0, a0, 2f # already where we want it? |
2303 | nop | | 2400 | nop |
2304 | | | 2401 | |
2305 | /* | | 2402 | /* |
2306 | * Clear the existing TLB entry for it. | | 2403 | * Clear the existing TLB entry for it. |
2307 | */ | | 2404 | */ |
2308 | sll t1, v0, (1 | PGSHIFT) # make a fake addr for the entry | | 2405 | sll t1, v0, (1 | PGSHIFT) # make a fake addr for the entry |
2309 | lui v1, %hi(MIPS_KSEG0_START) | | 2406 | lui t3, %hi(MIPS_KSEG0_START) |
2310 | or t1, v1 | | 2407 | or t1, t3 |
2311 | _MTC0 t1, MIPS_COP_0_TLB_HI | | 2408 | _MTC0 t1, MIPS_COP_0_TLB_HI |
2312 | COP0_SYNC | | 2409 | COP0_SYNC |
2313 | | | 2410 | |
2314 | and t0, a2, MIPS3_PG_G # make prototype tlb_lo | | 2411 | and t0, a2, MIPS3_PG_G # make prototype tlb_lo |
2315 | mtc0 t0, MIPS_COP_0_TLB_LO0 # use an invalid tlb_lo0 | | 2412 | mtc0 t0, MIPS_COP_0_TLB_LO0 # use an invalid tlb_lo0 |
2316 | mtc0 t0, MIPS_COP_0_TLB_LO1 # use an invalid tlb_lo1 | | 2413 | mtc0 t0, MIPS_COP_0_TLB_LO1 # use an invalid tlb_lo1 |
2317 | COP0_SYNC | | 2414 | COP0_SYNC |
2318 | | | 2415 | |
2319 | tlbwi # now write the invalid TLB | | 2416 | tlbwi # now write the invalid TLB |
2320 | COP0_SYNC | | 2417 | COP0_SYNC |
2321 | | | 2418 | |
2322 | _MTC0 a3, MIPS_COP_0_TLB_HI # restore the addr for new TLB | | 2419 | _MTC0 a3, MIPS_COP_0_TLB_HI # restore the addr for new TLB |
2323 | COP0_SYNC | | 2420 | COP0_SYNC |
2324 | 1: | | 2421 | 1: |
2325 | mtc0 a0, MIPS_COP_0_TLB_INDEX # set the index | | 2422 | mtc0 a0, MIPS_COP_0_TLB_INDEX # set the index |
2326 | COP0_SYNC | | 2423 | COP0_SYNC |
2327 | | | 2424 | |
2328 | 2: | | 2425 | 2: |
2329 | #if (PGSHIFT & 1) == 0 | | 2426 | #if (PGSHIFT & 1) == 0 |
2330 | and v1, a1, MIPS3_PG_ODDPG # odd or even page | | 2427 | and t3, a1, MIPS3_PG_ODDPG # odd or even page |
2331 | sll v1, 31 - PGSHIFT # move to MSB | | 2428 | sll t3, 31 - PGSHIFT # move to MSB |
2332 | sra v1, 31 # v1 a mask (0/~0 = even/odd) | | 2429 | sra t3, 31 # t3 a mask (0/~0 = even/odd) |
2333 | not v0, v1 # v0 a mask (~0/0 = even/odd) | | 2430 | not v0, t3 # v0 a mask (~0/0 = even/odd) |
2334 | | | 2431 | |
2335 | and ta2, t2, v1 | | 2432 | and ta1, t2, t3 |
2336 | and ta3, a2, v0 | | 2433 | and ta2, a2, v0 |
2337 | or t2, ta2, ta3 # t2 = (v1 & t2) | (~v1 & a2) | | 2434 | or t2, ta1, ta2 # t2 = (t3 & t2) | (~t3 & a2) |
2338 | and ta2, t3, v0 | | 2435 | and ta1, t3, v0 |
2339 | and ta3, a2, v1 | | 2436 | and ta2, a2, t3 |
2340 | or t3, ta2, ta3 # t3 = (~v1 & t3) | (v1 & a2) | | 2437 | or t3, ta1, ta2 # t3 = (~t3 & t3) | (t3 & a2) |
2341 | | | 2438 | |
2342 | mtc0 t2, MIPS_COP_0_TLB_LO0 # set tlb_lo0 (even) | | 2439 | mtc0 t2, MIPS_COP_0_TLB_LO0 # set tlb_lo0 (even) |
2343 | mtc0 t3, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (odd) | | 2440 | mtc0 t3, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (odd) |
2344 | #else | | 2441 | #else |
2345 | mtc0 a2, MIPS_COP_0_TLB_LO0 # set tlb_lo1 (lower half) | | 2442 | mtc0 a2, MIPS_COP_0_TLB_LO0 # set tlb_lo1 (lower half) |
2346 | INT_ADDU a2, MIPS3_PG_NEXT | | 2443 | INT_ADDU a2, MIPS3_PG_NEXT |
2347 | mtc0 a2, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (upper half) | | 2444 | mtc0 a2, MIPS_COP_0_TLB_LO1 # set tlb_lo1 (upper half) |
2348 | #endif | | 2445 | #endif |
2349 | COP0_SYNC | | 2446 | COP0_SYNC |
2350 | | | 2447 | |
2351 | tlbwi # enter it into the TLB | | 2448 | tlbwi # enter it into the TLB |
2352 | COP0_SYNC | | 2449 | COP0_SYNC |
2353 | | | 2450 | |
2354 | _MTC0 ta1, MIPS_COP_0_TLB_HI # restore EntryHi | | 2451 | _MTC0 ta0, MIPS_COP_0_TLB_HI # restore EntryHi |
| | | 2452 | COP0_SYNC |
| | | 2453 | |
| | | 2454 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2455 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2456 | #endif |
| | | 2457 | mtc0 v1, MIPS_COP_0_STATUS # restore status register |
2355 | JR_HB_RA | | 2458 | JR_HB_RA |
2356 | .set at | | 2459 | .set at |
2357 | END(MIPSX(tlb_enter)) | | 2460 | END(MIPSX(tlb_enter)) |
2358 | | | 2461 | |
| | | 2462 | /*-------------------------------------------------------------------------- |
| | | 2463 | * |
| | | 2464 | * mipsN_tlb_write_indexed -- |
| | | 2465 | * |
| | | 2466 | * Write the given entry into the TLB at the given index. |
| | | 2467 | * Pass full R4000 style TLB info including variable page size mask. |
| | | 2468 | * |
| | | 2469 | * mipsN_tlb_write_indexed(size_t tlb_index, const struct tlbmask *tlb) |
| | | 2470 | * |
| | | 2471 | * Results: |
| | | 2472 | * None. |
| | | 2473 | * |
| | | 2474 | * Side effects: |
| | | 2475 | * TLB entry set. |
| | | 2476 | * |
| | | 2477 | *-------------------------------------------------------------------------- |
| | | 2478 | */ |
| | | 2479 | LEAF(MIPSX(tlb_write_indexed)) |
| | | 2480 | /* |
| | | 2481 | * Fetch the arguments first so we don't need to worry about KX/UX/PX |
| | | 2482 | */ |
| | | 2483 | INT_L t0, TLBMASK_LO0(a1) # fetch tlb->tlb_lo0 |
| | | 2484 | INT_L t1, TLBMASK_LO1(a1) # fetch tlb->tlb_lo1 |
| | | 2485 | INT_L t2, TLBMASK_MASK(a1) # fetch tlb->tlb_mask |
| | | 2486 | PTR_L t3, TLBMASK_HI(a1) # fetch tlb->tlb_hi |
| | | 2487 | mfc0 v1, MIPS_COP_0_STATUS # save status |
| | | 2488 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
| | | 2489 | COP0_SYNC |
| | | 2490 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2491 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2492 | 1: li v0, __SIMPLELOCK_LOCKED |
| | | 2493 | swapw v0, ta3 |
| | | 2494 | bnez v0, 1b |
| | | 2495 | nop |
| | | 2496 | #endif |
| | | 2497 | mfc0 ta1, MIPS_COP_0_TLB_PG_MASK # Save current page mask. |
| | | 2498 | _MFC0 ta0, MIPS_COP_0_TLB_HI # Save the current PID. |
| | | 2499 | |
| | | 2500 | _MTC0 t0, MIPS_COP_0_TLB_LO0 # Set up entry lo0. |
| | | 2501 | _MTC0 t1, MIPS_COP_0_TLB_LO1 # Set up entry lo1. |
| | | 2502 | COP0_SYNC |
| | | 2503 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index. |
| | | 2504 | mtc0 t2, MIPS_COP_0_TLB_PG_MASK # Set up entry pagemask. |
| | | 2505 | _MTC0 t3, MIPS_COP_0_TLB_HI # Set up entry high. |
| | | 2506 | COP0_SYNC |
| | | 2507 | tlbwi # Write the TLB |
| | | 2508 | COP0_SYNC |
| | | 2509 | |
| | | 2510 | _MTC0 ta0, MIPS_COP_0_TLB_HI # Restore the PID. |
| | | 2511 | mtc0 ta1, MIPS_COP_0_TLB_PG_MASK # Restore page mask. |
| | | 2512 | COP0_SYNC |
| | | 2513 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
| | | 2514 | INT_S zero, 0(ta3) # unlock the tlb |
| | | 2515 | #endif |
| | | 2516 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register |
| | | 2517 | JR_HB_RA |
| | | 2518 | END(MIPSX(tlb_write_indexed)) |
| | | 2519 | |
2359 | /* | | 2520 | /* |
2360 | * mipsN_lwp_trampoline() | | 2521 | * mipsN_lwp_trampoline() |
2361 | * | | 2522 | * |
2362 | * Arrange for a function to be invoked neatly, after a cpu_switch(). | | 2523 | * Arrange for a function to be invoked neatly, after a cpu_switch(). |
2363 | * Call the service function with one argument, specified by the s0 | | 2524 | * Call the service function with one argument, specified by the s0 |
2364 | * and s1 respectively. There is no need register save operation. | | 2525 | * and s1 respectively. There is no need register save operation. |
2365 | */ | | 2526 | */ |
2366 | LEAF(MIPSX(lwp_trampoline)) | | 2527 | LEAF(MIPSX(lwp_trampoline)) |
2367 | PTR_ADDU sp, -CALLFRAME_SIZ | | 2528 | PTR_ADDU sp, -CALLFRAME_SIZ |
2368 | | | 2529 | |
2369 | # Call lwp_startup(), with args from cpu_switchto()/cpu_setfunc() | | 2530 | # Call lwp_startup(), with args from cpu_switchto()/cpu_setfunc() |
2370 | move a0, v0 | | 2531 | move a0, v0 |
2371 | jal _C_LABEL(lwp_startup) | | 2532 | jal _C_LABEL(lwp_startup) |
| @@ -2460,166 +2621,130 @@ END(MIPSX(setfunc_trampoline)) | | | @@ -2460,166 +2621,130 @@ END(MIPSX(setfunc_trampoline)) |
2460 | * | | 2621 | * |
2461 | * Wiredown the USPACE of newproc in TLB entry#0. Check whether target | | 2622 | * Wiredown the USPACE of newproc in TLB entry#0. Check whether target |
2462 | * USPACE is already in another place of TLB before that, and make | | 2623 | * USPACE is already in another place of TLB before that, and make |
2463 | * sure TBIS(it) in the case. | | 2624 | * sure TBIS(it) in the case. |
2464 | */ | | 2625 | */ |
2465 | LEAF_NOPROFILE(MIPSX(cpu_switch_resume)) | | 2626 | LEAF_NOPROFILE(MIPSX(cpu_switch_resume)) |
2466 | #if PAGE_SIZE < USPACE || 1 | | 2627 | #if PAGE_SIZE < USPACE || 1 |
2467 | INT_L a1, L_MD_UPTE_0(a0) # a1 = upte[0] | | 2628 | INT_L a1, L_MD_UPTE_0(a0) # a1 = upte[0] |
2468 | #if (PGSHIFT & 1) == 0 | | 2629 | #if (PGSHIFT & 1) == 0 |
2469 | INT_L a2, L_MD_UPTE_1(a0) # a2 = upte[1] | | 2630 | INT_L a2, L_MD_UPTE_1(a0) # a2 = upte[1] |
2470 | #else | | 2631 | #else |
2471 | INT_ADDU a2, a1, MIPS3_PG_NEXT # a2 = page following upte[0] | | 2632 | INT_ADDU a2, a1, MIPS3_PG_NEXT # a2 = page following upte[0] |
2472 | #endif | | 2633 | #endif |
2473 | PTR_L v0, L_PCB(a0) # va = l->l_addr | | 2634 | PTR_L a3, L_PCB(a0) # va = l->l_addr |
2474 | #if VM_MIN_KERNEL_ADDRESS == MIPS_KSEG2_START | | 2635 | #if VM_MIN_KERNEL_ADDRESS == MIPS_KSEG2_START |
2475 | li t0, VM_MIN_KERNEL_ADDRESS # compute index | | 2636 | li t0, VM_MIN_KERNEL_ADDRESS # compute index |
2476 | blt v0, t0, MIPSX(resume) | | 2637 | blt a3, t0, MIPSX(resume) |
2477 | nop | | 2638 | nop |
2478 | #if defined(ENABLE_MIPS_KSEGX) | | 2639 | #if defined(ENABLE_MIPS_KSEGX) |
2479 | li t0, VM_KSEGX_ADDRESS # below KSEGX? | | 2640 | li t0, VM_KSEGX_ADDRESS # below KSEGX? |
2480 | blt v0, t0, 1f | | 2641 | blt a3, t0, 1f |
2481 | nop | | 2642 | nop |
2482 | li t0, VM_KSEGX_ADDRESS+VM_KSEGX_SIZE # within KSEGX? | | 2643 | li t0, VM_KSEGX_ADDRESS+VM_KSEGX_SIZE # within KSEGX? |
2483 | blt v0, t0, MIPSX(resume) | | 2644 | blt a3, t0, MIPSX(resume) |
2484 | nop | | 2645 | nop |
2485 | 1: | | 2646 | 1: |
2486 | #endif | | 2647 | #endif |
2487 | #else | | 2648 | #else |
2488 | li t0, MIPS_KSEG0_START # above XKSEG? | | 2649 | li t0, MIPS_KSEG0_START # above XKSEG? |
2489 | blt t0, v0, MIPSX(resume) | | 2650 | blt t0, a3, MIPSX(resume) |
2490 | nop | | 2651 | nop |
2491 | li t0, VM_MIN_KERNEL_ADDRESS>>32 # below XKSEG? | | 2652 | li t0, VM_MIN_KERNEL_ADDRESS>>32 # below XKSEG? |
2492 | dsll32 t0, t0, 0 | | 2653 | dsll32 t0, t0, 0 |
2493 | blt v0, t0, MIPSX(resume) | | 2654 | blt a3, t0, MIPSX(resume) |
2494 | nop | | 2655 | nop |
2495 | #endif | | 2656 | #endif |
2496 | | | 2657 | |
2497 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2658 | #ifdef MULTIPROCESSOR |
2498 | /* | | 2659 | /* |
2499 | * Grab the TLB lock (we could use LL/SC but this is shorter) | | 2660 | * Fetch TLB slot before zeroing status. |
2500 | */ | | 2661 | */ |
2501 | _MFC0 a3, MIPS_COP_0_OSSCRATCH, 2 | | 2662 | PTR_L t0, L_CPU(a0) # get cpu_info |
2502 | li v1, __SIMPLELOCK_LOCKED | | 2663 | INT_L t1, CPU_INFO_KSP_TLB_SLOT(t0) # get TLB# for KSP |
2503 | 1: swapw v1, a3 | | | |
2504 | bnez v1, 1b | | | |
2505 | nop | | | |
2506 | #endif | | 2664 | #endif |
2507 | | | 2665 | |
2508 | #if (PGSHIFT & 1) == 0 | | 2666 | #if (PGSHIFT & 1) == 0 |
2509 | and t0, v0, MIPS3_PG_ODDPG | | 2667 | and v0, a3, MIPS3_PG_ODDPG |
2510 | beqz t0, MIPSX(entry0) | | 2668 | beqz v0, MIPSX(entry0) |
2511 | nop | | 2669 | nop |
2512 | | | 2670 | |
2513 | PANIC("USPACE sat on odd page boundary") | | 2671 | PANIC("USPACE sat on odd page boundary") |
2514 | #endif | | 2672 | #endif |
2515 | | | 2673 | |
2516 | MIPSX(entry0): | | 2674 | MIPSX(entry0): |
2517 | _MFC0 t3, MIPS_COP_0_TLB_HI # save TLB_HI | | 2675 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2518 | _MTC0 v0, MIPS_COP_0_TLB_HI # VPN = va | | 2676 | mfc0 v1, MIPS_COP_0_STATUS # save status |
| | | 2677 | mtc0 zero, MIPS_COP_0_STATUS # disable interrupts |
| | | 2678 | /* |
| | | 2679 | * Grab the TLB lock (we could use LL/SC but this is shorter) |
| | | 2680 | */ |
| | | 2681 | _MFC0 ta3, MIPS_COP_0_OSSCRATCH, 2 |
| | | 2682 | li v0, __SIMPLELOCK_LOCKED |
| | | 2683 | 1: swapw v0, ta3 |
| | | 2684 | bnez v0, 1b |
| | | 2685 | nop |
| | | 2686 | #endif |
| | | 2687 | |
| | | 2688 | _MFC0 ta0, MIPS_COP_0_TLB_HI # save TLB_HI |
| | | 2689 | _MTC0 a3, MIPS_COP_0_TLB_HI # VPN = va |
2519 | COP0_SYNC | | 2690 | COP0_SYNC |
2520 | tlbp # probe VPN | | 2691 | tlbp # probe VPN |
2521 | COP0_SYNC | | 2692 | COP0_SYNC |
2522 | mfc0 t0, MIPS_COP_0_TLB_INDEX | | 2693 | mfc0 t0, MIPS_COP_0_TLB_INDEX |
2523 | #ifdef MIPS3 | | 2694 | #ifdef MIPS3 |
2524 | nop | | 2695 | nop |
2525 | #endif | | 2696 | #endif |
2526 | bltz t0, MIPSX(entry0set) | | 2697 | bltz t0, MIPSX(entry0set) |
2527 | sll t0, t0, (PGSHIFT | 1) # (PAGE_SHIFT | 1) | | 2698 | sll t0, (PGSHIFT | 1) # (PAGE_SHIFT | 1) |
2528 | PTR_LA t0, MIPS_KSEG0_START(t0) | | 2699 | PTR_LA t0, MIPS_KSEG0_START(t0) |
2529 | _MTC0 t0, MIPS_COP_0_TLB_HI | | 2700 | _MTC0 t0, MIPS_COP_0_TLB_HI |
2530 | _MTC0 zero, MIPS_COP_0_TLB_LO0 | | 2701 | _MTC0 zero, MIPS_COP_0_TLB_LO0 |
2531 | _MTC0 zero, MIPS_COP_0_TLB_LO1 | | 2702 | _MTC0 zero, MIPS_COP_0_TLB_LO1 |
2532 | COP0_SYNC | | 2703 | COP0_SYNC |
2533 | tlbwi | | 2704 | tlbwi |
2534 | COP0_SYNC | | 2705 | COP0_SYNC |
2535 | _MTC0 v0, MIPS_COP_0_TLB_HI # set VPN again | | 2706 | _MTC0 a3, MIPS_COP_0_TLB_HI # set VPN again |
2536 | COP0_SYNC | | 2707 | COP0_SYNC |
2537 | MIPSX(entry0set): | | 2708 | MIPSX(entry0set): |
2538 | #ifdef MULTIPROCESSOR | | 2709 | #ifdef MULTIPROCESSOR |
2539 | PTR_L t0, L_CPU(a0) # get cpu_info | | | |
2540 | INT_L t1, CPU_INFO_KSP_TLB_SLOT(t0) # get TLB# for KSP | | | |
2541 | mtc0 t1, MIPS_COP_0_TLB_INDEX # TLB entry (virtual) | | 2710 | mtc0 t1, MIPS_COP_0_TLB_INDEX # TLB entry (virtual) |
2542 | #else | | 2711 | #else |
2543 | mtc0 zero, MIPS_COP_0_TLB_INDEX # TLB entry #0 (virtual) | | 2712 | mtc0 zero, MIPS_COP_0_TLB_INDEX # TLB entry #0 (virtual) |
2544 | #endif | | 2713 | #endif |
2545 | COP0_SYNC | | 2714 | COP0_SYNC |
2546 | or a1, MIPS3_PG_G | | 2715 | or a1, MIPS3_PG_G |
2547 | _MTC0 a1, MIPS_COP_0_TLB_LO0 # upte[0] | PG_G | | 2716 | _MTC0 a1, MIPS_COP_0_TLB_LO0 # upte[0] | PG_G |
2548 | or a2, MIPS3_PG_G | | 2717 | or a2, MIPS3_PG_G |
2549 | _MTC0 a2, MIPS_COP_0_TLB_LO1 # upte[1] | PG_G | | 2718 | _MTC0 a2, MIPS_COP_0_TLB_LO1 # upte[1] | PG_G |
2550 | COP0_SYNC | | 2719 | COP0_SYNC |
2551 | tlbwi # set TLB entry #0 | | 2720 | tlbwi # set TLB entry #0 |
2552 | COP0_SYNC | | 2721 | COP0_SYNC |
2553 | _MTC0 t3, MIPS_COP_0_TLB_HI # restore TLB_HI | | 2722 | _MTC0 ta0, MIPS_COP_0_TLB_HI # restore TLB_HI |
2554 | COP0_SYNC | | 2723 | COP0_SYNC |
2555 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 | | 2724 | #if defined(MULTIPROCESSOR) && (MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 |
2556 | INT_S zero, 0(a3) # clear tlb lock | | 2725 | mtc0 v1, MIPS_COP_0_STATUS # restore Status register |
| | | 2726 | INT_S zero, 0(ta3) # clear tlb lock |
2557 | #endif | | 2727 | #endif |
2558 | MIPSX(resume): | | 2728 | MIPSX(resume): |
2559 | #endif /* PAGE_SIZE < USPACE */ | | 2729 | #endif /* PAGE_SIZE < USPACE */ |
2560 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 | | 2730 | #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0 |
2561 | PTR_L v0, L_PRIVATE(a0) # get lwp private | | 2731 | PTR_L v0, L_PRIVATE(a0) # get lwp private |
2562 | _MTC0 v0, MIPS_COP_0_TLB_CONTEXT, 4 # make available for rdhwr | | 2732 | _MTC0 v0, MIPS_COP_0_TLB_CONTEXT, 4 # make available for rdhwr |
2563 | #endif | | 2733 | #endif |
2564 | j ra | | 2734 | j ra |
2565 | nop | | 2735 | nop |
2566 | END(MIPSX(cpu_switch_resume)) | | 2736 | END(MIPSX(cpu_switch_resume)) |
2567 | | | 2737 | |
2568 | /*-------------------------------------------------------------------------- | | | |
2569 | * | | | |
2570 | * mipsN_tlb_write_indexed -- | | | |
2571 | * | | | |
2572 | * Write the given entry into the TLB at the given index. | | | |
2573 | * Pass full R4000 style TLB info including variable page size mask. | | | |
2574 | * | | | |
2575 | * mipsN_tlb_write_indexed(size_t tlb_index, const struct tlbmask *tlb) | | | |
2576 | * | | | |
2577 | * Results: | | | |
2578 | * None. | | | |
2579 | * | | | |
2580 | * Side effects: | | | |
2581 | * TLB entry set. | | | |
2582 | * | | | |
2583 | *-------------------------------------------------------------------------- | | | |
2584 | */ | | | |
2585 | LEAF(MIPSX(tlb_write_indexed)) | | | |
2586 | mfc0 v1, MIPS_COP_0_STATUS # Save the status register. | | | |
2587 | RESET_EXCEPTION_LEVEL_DISABLE_INTERRUPTS(v0) | | | |
2588 | COP0_SYNC | | | |
2589 | INT_L a2, TLBMASK_LO0(a1) # fetch tlb->tlb_lo0 | | | |
2590 | INT_L a3, TLBMASK_LO1(a1) # fetch tlb->tlb_lo1 | | | |
2591 | mfc0 v0, MIPS_COP_0_TLB_PG_MASK # Save current page mask. | | | |
2592 | _MFC0 t0, MIPS_COP_0_TLB_HI # Save the current PID. | | | |
2593 | | | | |
2594 | _MTC0 a2, MIPS_COP_0_TLB_LO0 # Set up entry low0. | | | |
2595 | _MTC0 a3, MIPS_COP_0_TLB_LO1 # Set up entry low1. | | | |
2596 | COP0_SYNC | | | |
2597 | INT_L a2, TLBMASK_MASK(a1) # fetch tlb->tlb_mask | | | |
2598 | PTR_L a3, TLBMASK_HI(a1) # fetch tlb->tlb_hi | | | |
2599 | mtc0 a0, MIPS_COP_0_TLB_INDEX # Set the index. | | | |
2600 | mtc0 a2, MIPS_COP_0_TLB_PG_MASK # Set up entry pagemask. | | | |
2601 | _MTC0 a3, MIPS_COP_0_TLB_HI # Set up entry high. | | | |
2602 | COP0_SYNC | | | |
2603 | tlbwi # Write the TLB | | | |
2604 | COP0_SYNC | | | |
2605 | | | | |
2606 | _MTC0 t0, MIPS_COP_0_TLB_HI # Restore the PID. | | | |
2607 | mtc0 v0, MIPS_COP_0_TLB_PG_MASK # Restore page mask. | | | |
2608 | COP0_SYNC | | | |
2609 | mtc0 v1, MIPS_COP_0_STATUS # Restore the status register | | | |
2610 | JR_HB_RA | | | |
2611 | END(MIPSX(tlb_write_indexed)) | | | |
2612 | | | | |
2613 | #if defined(MIPS3) | | 2738 | #if defined(MIPS3) |
2614 | /*---------------------------------------------------------------------------- | | 2739 | /*---------------------------------------------------------------------------- |
2615 | * | | 2740 | * |
2616 | * mipsN_VCED -- | | 2741 | * mipsN_VCED -- |
2617 | * | | 2742 | * |
2618 | * Handle virtual coherency exceptions. | | 2743 | * Handle virtual coherency exceptions. |
2619 | * Called directly from the mips3 exception-table code. | | 2744 | * Called directly from the mips3 exception-table code. |
2620 | * only k0, k1 are available on entry | | 2745 | * only k0, k1 are available on entry |
2621 | * | | 2746 | * |
2622 | * Results: | | 2747 | * Results: |
2623 | * None. | | 2748 | * None. |
2624 | * | | 2749 | * |
2625 | * Side effects: | | 2750 | * Side effects: |
| @@ -2749,28 +2874,28 @@ _C_LABEL(MIPSX(locoresw)): | | | @@ -2749,28 +2874,28 @@ _C_LABEL(MIPSX(locoresw)): |
2749 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_idle | | 2874 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_idle |
2750 | PTR_WORD _C_LABEL(nullop) # lsw_send_ipi | | 2875 | PTR_WORD _C_LABEL(nullop) # lsw_send_ipi |
2751 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_offline_md | | 2876 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_offline_md |
2752 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_init | | 2877 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_init |
2753 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_run | | 2878 | PTR_WORD _C_LABEL(nullop) # lsw_cpu_run |
2754 | PTR_WORD _C_LABEL(nullop) # lsw_bus_error | | 2879 | PTR_WORD _C_LABEL(nullop) # lsw_bus_error |
2755 | | | 2880 | |
2756 | MIPSX(excpt_sw): | | 2881 | MIPSX(excpt_sw): |
2757 | #### | | 2882 | #### |
2758 | #### The kernel exception handlers. | | 2883 | #### The kernel exception handlers. |
2759 | #### | | 2884 | #### |
2760 | PTR_WORD _C_LABEL(MIPSX(kern_intr)) # 0 external interrupt | | 2885 | PTR_WORD _C_LABEL(MIPSX(kern_intr)) # 0 external interrupt |
2761 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 1 TLB modification | | 2886 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 1 TLB modification |
2762 | PTR_WORD _C_LABEL(MIPSX(tlb_invalid_exception)) # 2 TLB miss (LW/I-fetch) | | 2887 | PTR_WORD _C_LABEL(MIPSX(kern_tlb_invalid_exception)) # 2 TLB miss (LW/I-fetch) |
2763 | PTR_WORD _C_LABEL(MIPSX(tlb_invalid_exception)) # 3 TLB miss (SW) | | 2888 | PTR_WORD _C_LABEL(MIPSX(kern_tlb_invalid_exception)) # 3 TLB miss (SW) |
2764 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 4 address error (LW/I-fetch) | | 2889 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 4 address error (LW/I-fetch) |
2765 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 5 address error (SW) | | 2890 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 5 address error (SW) |
2766 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 6 bus error (I-fetch) | | 2891 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 6 bus error (I-fetch) |
2767 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 7 bus error (load or store) | | 2892 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 7 bus error (load or store) |
2768 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 8 system call | | 2893 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 8 system call |
2769 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 9 breakpoint | | 2894 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 9 breakpoint |
2770 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 10 reserved instruction | | 2895 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 10 reserved instruction |
2771 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 11 coprocessor unusable | | 2896 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 11 coprocessor unusable |
2772 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 12 arithmetic overflow | | 2897 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 12 arithmetic overflow |
2773 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 13 r4k trap exception | | 2898 | PTR_WORD _C_LABEL(MIPSX(kern_gen_exception)) # 13 r4k trap exception |
2774 | #if defined(MIPS3) | | 2899 | #if defined(MIPS3) |
2775 | PTR_WORD _C_LABEL(MIPSX(VCEI)) # 14 r4k virt coherence | | 2900 | PTR_WORD _C_LABEL(MIPSX(VCEI)) # 14 r4k virt coherence |
2776 | #else | | 2901 | #else |