| // Code generated by mkpreempt.go; DO NOT EDIT. |
| |
| #include "go_asm.h" |
| #include "textflag.h" |
| |
| TEXT ·asyncPreempt(SB),NOSPLIT|NOFRAME,$0-0 |
| MOVV R1, -224(R3) |
| SUBV $224, R3 |
| // Save GPs |
| MOVV R4, 8(R3) |
| MOVV R5, 16(R3) |
| MOVV R6, 24(R3) |
| MOVV R7, 32(R3) |
| MOVV R8, 40(R3) |
| MOVV R9, 48(R3) |
| MOVV R10, 56(R3) |
| MOVV R11, 64(R3) |
| MOVV R12, 72(R3) |
| MOVV R13, 80(R3) |
| MOVV R14, 88(R3) |
| MOVV R15, 96(R3) |
| MOVV R16, 104(R3) |
| MOVV R17, 112(R3) |
| MOVV R18, 120(R3) |
| MOVV R19, 128(R3) |
| MOVV R20, 136(R3) |
| MOVV R21, 144(R3) |
| MOVV R23, 152(R3) |
| MOVV R24, 160(R3) |
| MOVV R25, 168(R3) |
| MOVV R26, 176(R3) |
| MOVV R27, 184(R3) |
| MOVV R28, 192(R3) |
| MOVV R29, 200(R3) |
| MOVV R31, 208(R3) |
| MOVV FCC0, R4 |
| BSTRINSV $7, R4, $0, R5 |
| MOVV FCC1, R4 |
| BSTRINSV $15, R4, $8, R5 |
| MOVV FCC2, R4 |
| BSTRINSV $23, R4, $16, R5 |
| MOVV FCC3, R4 |
| BSTRINSV $31, R4, $24, R5 |
| MOVV FCC4, R4 |
| BSTRINSV $39, R4, $32, R5 |
| MOVV FCC5, R4 |
| BSTRINSV $47, R4, $40, R5 |
| MOVV FCC6, R4 |
| BSTRINSV $55, R4, $48, R5 |
| MOVV FCC7, R4 |
| BSTRINSV $63, R4, $56, R5 |
| MOVV R5, 216(R3) |
| // Save extended register state to p.xRegs.scratch |
| MOVV g_m(g), R4 |
| MOVV m_p(R4), R4 |
| ADDV $(p_xRegs+xRegPerP_scratch), R4, R4 |
| MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLASX(SB), R5 |
| BNE R5, saveLASX |
| MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLSX(SB), R5 |
| BNE R5, saveLSX |
| saveFP: |
| MOVD F0, 0(R4) |
| MOVD F1, 32(R4) |
| MOVD F2, 64(R4) |
| MOVD F3, 96(R4) |
| MOVD F4, 128(R4) |
| MOVD F5, 160(R4) |
| MOVD F6, 192(R4) |
| MOVD F7, 224(R4) |
| MOVD F8, 256(R4) |
| MOVD F9, 288(R4) |
| MOVD F10, 320(R4) |
| MOVD F11, 352(R4) |
| MOVD F12, 384(R4) |
| MOVD F13, 416(R4) |
| MOVD F14, 448(R4) |
| MOVD F15, 480(R4) |
| MOVD F16, 512(R4) |
| MOVD F17, 544(R4) |
| MOVD F18, 576(R4) |
| MOVD F19, 608(R4) |
| MOVD F20, 640(R4) |
| MOVD F21, 672(R4) |
| MOVD F22, 704(R4) |
| MOVD F23, 736(R4) |
| MOVD F24, 768(R4) |
| MOVD F25, 800(R4) |
| MOVD F26, 832(R4) |
| MOVD F27, 864(R4) |
| MOVD F28, 896(R4) |
| MOVD F29, 928(R4) |
| MOVD F30, 960(R4) |
| MOVD F31, 992(R4) |
| JMP preempt |
| saveLSX: |
| VMOVQ V0, 0(R4) |
| VMOVQ V1, 32(R4) |
| VMOVQ V2, 64(R4) |
| VMOVQ V3, 96(R4) |
| VMOVQ V4, 128(R4) |
| VMOVQ V5, 160(R4) |
| VMOVQ V6, 192(R4) |
| VMOVQ V7, 224(R4) |
| VMOVQ V8, 256(R4) |
| VMOVQ V9, 288(R4) |
| VMOVQ V10, 320(R4) |
| VMOVQ V11, 352(R4) |
| VMOVQ V12, 384(R4) |
| VMOVQ V13, 416(R4) |
| VMOVQ V14, 448(R4) |
| VMOVQ V15, 480(R4) |
| VMOVQ V16, 512(R4) |
| VMOVQ V17, 544(R4) |
| VMOVQ V18, 576(R4) |
| VMOVQ V19, 608(R4) |
| VMOVQ V20, 640(R4) |
| VMOVQ V21, 672(R4) |
| VMOVQ V22, 704(R4) |
| VMOVQ V23, 736(R4) |
| VMOVQ V24, 768(R4) |
| VMOVQ V25, 800(R4) |
| VMOVQ V26, 832(R4) |
| VMOVQ V27, 864(R4) |
| VMOVQ V28, 896(R4) |
| VMOVQ V29, 928(R4) |
| VMOVQ V30, 960(R4) |
| VMOVQ V31, 992(R4) |
| JMP preempt |
| saveLASX: |
| XVMOVQ X0, 0(R4) |
| XVMOVQ X1, 32(R4) |
| XVMOVQ X2, 64(R4) |
| XVMOVQ X3, 96(R4) |
| XVMOVQ X4, 128(R4) |
| XVMOVQ X5, 160(R4) |
| XVMOVQ X6, 192(R4) |
| XVMOVQ X7, 224(R4) |
| XVMOVQ X8, 256(R4) |
| XVMOVQ X9, 288(R4) |
| XVMOVQ X10, 320(R4) |
| XVMOVQ X11, 352(R4) |
| XVMOVQ X12, 384(R4) |
| XVMOVQ X13, 416(R4) |
| XVMOVQ X14, 448(R4) |
| XVMOVQ X15, 480(R4) |
| XVMOVQ X16, 512(R4) |
| XVMOVQ X17, 544(R4) |
| XVMOVQ X18, 576(R4) |
| XVMOVQ X19, 608(R4) |
| XVMOVQ X20, 640(R4) |
| XVMOVQ X21, 672(R4) |
| XVMOVQ X22, 704(R4) |
| XVMOVQ X23, 736(R4) |
| XVMOVQ X24, 768(R4) |
| XVMOVQ X25, 800(R4) |
| XVMOVQ X26, 832(R4) |
| XVMOVQ X27, 864(R4) |
| XVMOVQ X28, 896(R4) |
| XVMOVQ X29, 928(R4) |
| XVMOVQ X30, 960(R4) |
| XVMOVQ X31, 992(R4) |
| preempt: |
| CALL ·asyncPreempt2(SB) |
| // Restore non-GPs from *p.xRegs.cache |
| MOVV g_m(g), R4 |
| MOVV m_p(R4), R4 |
| MOVV (p_xRegs+xRegPerP_cache)(R4), R4 |
| MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLASX(SB), R5 |
| BNE R5, restoreLASX |
| MOVBU internal∕cpu·Loong64+const_offsetLOONG64HasLSX(SB), R5 |
| BNE R5, restoreLSX |
| restoreFP: |
| MOVD 992(R4), F31 |
| MOVD 960(R4), F30 |
| MOVD 928(R4), F29 |
| MOVD 896(R4), F28 |
| MOVD 864(R4), F27 |
| MOVD 832(R4), F26 |
| MOVD 800(R4), F25 |
| MOVD 768(R4), F24 |
| MOVD 736(R4), F23 |
| MOVD 704(R4), F22 |
| MOVD 672(R4), F21 |
| MOVD 640(R4), F20 |
| MOVD 608(R4), F19 |
| MOVD 576(R4), F18 |
| MOVD 544(R4), F17 |
| MOVD 512(R4), F16 |
| MOVD 480(R4), F15 |
| MOVD 448(R4), F14 |
| MOVD 416(R4), F13 |
| MOVD 384(R4), F12 |
| MOVD 352(R4), F11 |
| MOVD 320(R4), F10 |
| MOVD 288(R4), F9 |
| MOVD 256(R4), F8 |
| MOVD 224(R4), F7 |
| MOVD 192(R4), F6 |
| MOVD 160(R4), F5 |
| MOVD 128(R4), F4 |
| MOVD 96(R4), F3 |
| MOVD 64(R4), F2 |
| MOVD 32(R4), F1 |
| MOVD 0(R4), F0 |
| JMP restoreGPs |
| restoreLSX: |
| VMOVQ 992(R4), V31 |
| VMOVQ 960(R4), V30 |
| VMOVQ 928(R4), V29 |
| VMOVQ 896(R4), V28 |
| VMOVQ 864(R4), V27 |
| VMOVQ 832(R4), V26 |
| VMOVQ 800(R4), V25 |
| VMOVQ 768(R4), V24 |
| VMOVQ 736(R4), V23 |
| VMOVQ 704(R4), V22 |
| VMOVQ 672(R4), V21 |
| VMOVQ 640(R4), V20 |
| VMOVQ 608(R4), V19 |
| VMOVQ 576(R4), V18 |
| VMOVQ 544(R4), V17 |
| VMOVQ 512(R4), V16 |
| VMOVQ 480(R4), V15 |
| VMOVQ 448(R4), V14 |
| VMOVQ 416(R4), V13 |
| VMOVQ 384(R4), V12 |
| VMOVQ 352(R4), V11 |
| VMOVQ 320(R4), V10 |
| VMOVQ 288(R4), V9 |
| VMOVQ 256(R4), V8 |
| VMOVQ 224(R4), V7 |
| VMOVQ 192(R4), V6 |
| VMOVQ 160(R4), V5 |
| VMOVQ 128(R4), V4 |
| VMOVQ 96(R4), V3 |
| VMOVQ 64(R4), V2 |
| VMOVQ 32(R4), V1 |
| VMOVQ 0(R4), V0 |
| JMP restoreGPs |
| restoreLASX: |
| XVMOVQ 992(R4), X31 |
| XVMOVQ 960(R4), X30 |
| XVMOVQ 928(R4), X29 |
| XVMOVQ 896(R4), X28 |
| XVMOVQ 864(R4), X27 |
| XVMOVQ 832(R4), X26 |
| XVMOVQ 800(R4), X25 |
| XVMOVQ 768(R4), X24 |
| XVMOVQ 736(R4), X23 |
| XVMOVQ 704(R4), X22 |
| XVMOVQ 672(R4), X21 |
| XVMOVQ 640(R4), X20 |
| XVMOVQ 608(R4), X19 |
| XVMOVQ 576(R4), X18 |
| XVMOVQ 544(R4), X17 |
| XVMOVQ 512(R4), X16 |
| XVMOVQ 480(R4), X15 |
| XVMOVQ 448(R4), X14 |
| XVMOVQ 416(R4), X13 |
| XVMOVQ 384(R4), X12 |
| XVMOVQ 352(R4), X11 |
| XVMOVQ 320(R4), X10 |
| XVMOVQ 288(R4), X9 |
| XVMOVQ 256(R4), X8 |
| XVMOVQ 224(R4), X7 |
| XVMOVQ 192(R4), X6 |
| XVMOVQ 160(R4), X5 |
| XVMOVQ 128(R4), X4 |
| XVMOVQ 96(R4), X3 |
| XVMOVQ 64(R4), X2 |
| XVMOVQ 32(R4), X1 |
| XVMOVQ 0(R4), X0 |
| // Restore GPs |
| restoreGPs: |
| MOVV 216(R3), R5 |
| BSTRPICKV $7, R5, $0, R4 |
| MOVV R4, FCC0 |
| BSTRPICKV $15, R5, $8, R4 |
| MOVV R4, FCC1 |
| BSTRPICKV $23, R5, $16, R4 |
| MOVV R4, FCC2 |
| BSTRPICKV $31, R5, $24, R4 |
| MOVV R4, FCC3 |
| BSTRPICKV $39, R5, $32, R4 |
| MOVV R4, FCC4 |
| BSTRPICKV $47, R5, $40, R4 |
| MOVV R4, FCC5 |
| BSTRPICKV $55, R5, $48, R4 |
| MOVV R4, FCC6 |
| BSTRPICKV $63, R5, $56, R4 |
| MOVV R4, FCC7 |
| MOVV 208(R3), R31 |
| MOVV 200(R3), R29 |
| MOVV 192(R3), R28 |
| MOVV 184(R3), R27 |
| MOVV 176(R3), R26 |
| MOVV 168(R3), R25 |
| MOVV 160(R3), R24 |
| MOVV 152(R3), R23 |
| MOVV 144(R3), R21 |
| MOVV 136(R3), R20 |
| MOVV 128(R3), R19 |
| MOVV 120(R3), R18 |
| MOVV 112(R3), R17 |
| MOVV 104(R3), R16 |
| MOVV 96(R3), R15 |
| MOVV 88(R3), R14 |
| MOVV 80(R3), R13 |
| MOVV 72(R3), R12 |
| MOVV 64(R3), R11 |
| MOVV 56(R3), R10 |
| MOVV 48(R3), R9 |
| MOVV 40(R3), R8 |
| MOVV 32(R3), R7 |
| MOVV 24(R3), R6 |
| MOVV 16(R3), R5 |
| MOVV 8(R3), R4 |
| MOVV 224(R3), R1 |
| MOVV (R3), R30 |
| ADDV $232, R3 |
| JMP (R30) |