blob: c35de7f3b75726f5ce67b97c6cdb83072cd5c6c9 [file] [log] [blame] [edit]
// Code generated by mkpreempt.go; DO NOT EDIT.
#include "go_asm.h"
#include "go_tls.h"
#include "asm_amd64.h"
#include "textflag.h"
TEXT ·asyncPreempt(SB),NOSPLIT|NOFRAME,$0-0
PUSHQ BP
MOVQ SP, BP
// Save flags before clobbering them
PUSHFQ
// obj doesn't understand ADD/SUB on SP, but does understand ADJSP
ADJSP $112
// But vet doesn't know ADJSP, so suppress vet stack checking
NOP SP
// Save GPs
MOVQ AX, 0(SP)
MOVQ CX, 8(SP)
MOVQ DX, 16(SP)
MOVQ BX, 24(SP)
MOVQ SI, 32(SP)
MOVQ DI, 40(SP)
MOVQ R8, 48(SP)
MOVQ R9, 56(SP)
MOVQ R10, 64(SP)
MOVQ R11, 72(SP)
MOVQ R12, 80(SP)
MOVQ R13, 88(SP)
MOVQ R14, 96(SP)
MOVQ R15, 104(SP)
// Save extended register state to p.xRegs.scratch
// Don't make assumptions about ABI register state. See mkpreempt.go
get_tls(CX)
MOVQ g(CX), R14
MOVQ g_m(R14), AX
MOVQ m_p(AX), AX
LEAQ (p_xRegs+xRegPerP_scratch)(AX), AX
#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX512(SB), $1
JE saveAVX512
CMPB internal∕cpu·X86+const_offsetX86HasAVX2(SB), $1
JE saveAVX2
#endif
saveSSE:
MOVUPS X0, 0(AX)
MOVUPS X1, 64(AX)
MOVUPS X2, 128(AX)
MOVUPS X3, 192(AX)
MOVUPS X4, 256(AX)
MOVUPS X5, 320(AX)
MOVUPS X6, 384(AX)
MOVUPS X7, 448(AX)
MOVUPS X8, 512(AX)
MOVUPS X9, 576(AX)
MOVUPS X10, 640(AX)
MOVUPS X11, 704(AX)
MOVUPS X12, 768(AX)
MOVUPS X13, 832(AX)
MOVUPS X14, 896(AX)
MOVUPS X15, 960(AX)
JMP preempt
saveAVX2:
VMOVDQU Y0, 0(AX)
VMOVDQU Y1, 64(AX)
VMOVDQU Y2, 128(AX)
VMOVDQU Y3, 192(AX)
VMOVDQU Y4, 256(AX)
VMOVDQU Y5, 320(AX)
VMOVDQU Y6, 384(AX)
VMOVDQU Y7, 448(AX)
VMOVDQU Y8, 512(AX)
VMOVDQU Y9, 576(AX)
VMOVDQU Y10, 640(AX)
VMOVDQU Y11, 704(AX)
VMOVDQU Y12, 768(AX)
VMOVDQU Y13, 832(AX)
VMOVDQU Y14, 896(AX)
VMOVDQU Y15, 960(AX)
JMP preempt
saveAVX512:
VMOVDQU64 Z0, 0(AX)
VMOVDQU64 Z1, 64(AX)
VMOVDQU64 Z2, 128(AX)
VMOVDQU64 Z3, 192(AX)
VMOVDQU64 Z4, 256(AX)
VMOVDQU64 Z5, 320(AX)
VMOVDQU64 Z6, 384(AX)
VMOVDQU64 Z7, 448(AX)
VMOVDQU64 Z8, 512(AX)
VMOVDQU64 Z9, 576(AX)
VMOVDQU64 Z10, 640(AX)
VMOVDQU64 Z11, 704(AX)
VMOVDQU64 Z12, 768(AX)
VMOVDQU64 Z13, 832(AX)
VMOVDQU64 Z14, 896(AX)
VMOVDQU64 Z15, 960(AX)
KMOVQ K0, 1024(AX)
KMOVQ K1, 1032(AX)
KMOVQ K2, 1040(AX)
KMOVQ K3, 1048(AX)
KMOVQ K4, 1056(AX)
KMOVQ K5, 1064(AX)
KMOVQ K6, 1072(AX)
KMOVQ K7, 1080(AX)
JMP preempt
preempt:
CALL ·asyncPreempt2(SB)
// Restore non-GPs from *p.xRegs.cache
MOVQ g_m(R14), AX
MOVQ m_p(AX), AX
MOVQ (p_xRegs+xRegPerP_cache)(AX), AX
#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX512(SB), $1
JE restoreAVX512
CMPB internal∕cpu·X86+const_offsetX86HasAVX2(SB), $1
JE restoreAVX2
#endif
restoreSSE:
MOVUPS 960(AX), X15
MOVUPS 896(AX), X14
MOVUPS 832(AX), X13
MOVUPS 768(AX), X12
MOVUPS 704(AX), X11
MOVUPS 640(AX), X10
MOVUPS 576(AX), X9
MOVUPS 512(AX), X8
MOVUPS 448(AX), X7
MOVUPS 384(AX), X6
MOVUPS 320(AX), X5
MOVUPS 256(AX), X4
MOVUPS 192(AX), X3
MOVUPS 128(AX), X2
MOVUPS 64(AX), X1
MOVUPS 0(AX), X0
JMP restoreGPs
restoreAVX2:
VMOVDQU 960(AX), Y15
VMOVDQU 896(AX), Y14
VMOVDQU 832(AX), Y13
VMOVDQU 768(AX), Y12
VMOVDQU 704(AX), Y11
VMOVDQU 640(AX), Y10
VMOVDQU 576(AX), Y9
VMOVDQU 512(AX), Y8
VMOVDQU 448(AX), Y7
VMOVDQU 384(AX), Y6
VMOVDQU 320(AX), Y5
VMOVDQU 256(AX), Y4
VMOVDQU 192(AX), Y3
VMOVDQU 128(AX), Y2
VMOVDQU 64(AX), Y1
VMOVDQU 0(AX), Y0
JMP restoreGPs
restoreAVX512:
KMOVQ 1080(AX), K7
KMOVQ 1072(AX), K6
KMOVQ 1064(AX), K5
KMOVQ 1056(AX), K4
KMOVQ 1048(AX), K3
KMOVQ 1040(AX), K2
KMOVQ 1032(AX), K1
KMOVQ 1024(AX), K0
VMOVDQU64 960(AX), Z15
VMOVDQU64 896(AX), Z14
VMOVDQU64 832(AX), Z13
VMOVDQU64 768(AX), Z12
VMOVDQU64 704(AX), Z11
VMOVDQU64 640(AX), Z10
VMOVDQU64 576(AX), Z9
VMOVDQU64 512(AX), Z8
VMOVDQU64 448(AX), Z7
VMOVDQU64 384(AX), Z6
VMOVDQU64 320(AX), Z5
VMOVDQU64 256(AX), Z4
VMOVDQU64 192(AX), Z3
VMOVDQU64 128(AX), Z2
VMOVDQU64 64(AX), Z1
VMOVDQU64 0(AX), Z0
JMP restoreGPs
restoreGPs:
// Restore GPs
MOVQ 104(SP), R15
MOVQ 96(SP), R14
MOVQ 88(SP), R13
MOVQ 80(SP), R12
MOVQ 72(SP), R11
MOVQ 64(SP), R10
MOVQ 56(SP), R9
MOVQ 48(SP), R8
MOVQ 40(SP), DI
MOVQ 32(SP), SI
MOVQ 24(SP), BX
MOVQ 16(SP), DX
MOVQ 8(SP), CX
MOVQ 0(SP), AX
ADJSP $-112
POPFQ
POPQ BP
RET