gvisor.dev/gvisor@v0.0.0-20240520182842-f9d4d51c7e0f/pkg/sentry/platform/systrap/sysmsg/syshandler_amd64.S (about) 1 // Copyright 2020 The gVisor Authors. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 // See the License for the specific language governing permissions and 13 // limitations under the License. 14 15 #include "sysmsg_offsets.h" 16 #include "sysmsg_offsets_amd64.h" 17 18 // Helper macros: 19 //////////////////////////////////////// 20 21 // prepare_enter_syshandler does the following: 22 // - saves all registers that are restorable onto the thread_context struct. 23 // - loads the address of the thread_context struct into %rcx. 24 .macro prepare_enter_syshandler 25 // Syshandler clobbers rflags (load_thread_context_addr does so for example). 26 // Therefore save it as the first thing we do. 27 pushfq 28 // load_thread_context_addr overwrites %rcx. 29 push %rcx 30 31 movq %gs:offsetof_sysmsg_context, %rcx 32 33 // Registers listed in order as written in ptregs: 34 movq %r15, offsetof_thread_context_ptregs_r15(%rcx) 35 movq %r14, offsetof_thread_context_ptregs_r14(%rcx) 36 movq %r13, offsetof_thread_context_ptregs_r13(%rcx) 37 movq %r12, offsetof_thread_context_ptregs_r12(%rcx) 38 movq %rbp, offsetof_thread_context_ptregs_rbp(%rcx) 39 movq %rbx, offsetof_thread_context_ptregs_rbx(%rcx) 40 movq %r11, offsetof_thread_context_ptregs_r11(%rcx) 41 movq %r10, offsetof_thread_context_ptregs_r10(%rcx) 42 movq %r9, offsetof_thread_context_ptregs_r9(%rcx) 43 movq %r8, offsetof_thread_context_ptregs_r8(%rcx) 44 movq %rax, offsetof_thread_context_ptregs_rax(%rcx) 45 pop %r15 46 movq %r15, offsetof_thread_context_ptregs_rcx(%rcx) 47 movq %rdx, offsetof_thread_context_ptregs_rdx(%rcx) 48 movq %rsi, offsetof_thread_context_ptregs_rsi(%rcx) 49 movq %rdi, offsetof_thread_context_ptregs_rdi(%rcx) 50 movq %rax, offsetof_thread_context_ptregs_orig_rax(%rcx) 51 52 movw %cs, offsetof_thread_context_ptregs_cs(%rcx) 53 movw %ss, offsetof_thread_context_ptregs_ss(%rcx) 54 // Don't bother save/restoring ds/es on amd64 55 // movw %ds, offsetof_thread_context_ptregs_ds(%rcx) 56 // movw %es, offsetof_thread_context_ptregs_es(%rcx) 57 movw %fs, offsetof_thread_context_ptregs_fs(%rcx) 58 movw %gs, offsetof_thread_context_ptregs_gs(%rcx) 59 60 pop %rax 61 movq %rax, offsetof_thread_context_ptregs_eflags(%rcx) 62 63 movq %gs:offsetof_sysmsg_app_stack, %r8 64 movq %r8, offsetof_thread_context_ptregs_rsp(%rcx) 65 movq %gs:offsetof_sysmsg_ret_addr, %r9 66 movq %r9, offsetof_thread_context_ptregs_rip(%rcx) 67 .endm 68 69 // prepare_exit_syshandler assumes that: 70 // - the memory address of the thread_context is loaded in %rcx. 71 // prepare_exit_syshandler does the following: 72 // - sets sysmsg->ret_addr 73 // - restores all registers that were saved inside the thread_context struct except for 74 // %rsp and rflags. 75 // - %rcx will be restored as well, and will no longer contain the memory address to the 76 // thread context. 77 // - puts user %rsp and rflags onto the syshandler stack (in that order). rflags cannot 78 // be restored at this point because syshandler will clobber it before it exits. 79 .macro prepare_exit_syshandler 80 movq offsetof_thread_context_ptregs_rsp(%rcx), %rax 81 push %rax 82 movq offsetof_thread_context_ptregs_eflags(%rcx), %rbx 83 push %rbx 84 85 // set sysmsg->ret_addr 86 movq offsetof_thread_context_ptregs_rip(%rcx), %r9 87 movq %r9, %gs:offsetof_sysmsg_ret_addr 88 89 // Restore segments. Because restoring segments is slow, restore them only if necessary. 90 movw %fs, %dx 91 cmpw %dx, offsetof_thread_context_ptregs_fs(%rcx) 92 je restored_fs 93 movw offsetof_thread_context_ptregs_fs(%rcx), %fs 94 restored_fs: 95 movw %gs, %si 96 cmpw %si, offsetof_thread_context_ptregs_gs(%rcx) 97 je restored_gs 98 movw offsetof_thread_context_ptregs_gs(%rcx), %gs 99 restored_gs: 100 // Restore other GP registers 101 movq offsetof_thread_context_ptregs_r15(%rcx), %r15 102 movq offsetof_thread_context_ptregs_r14(%rcx), %r14 103 movq offsetof_thread_context_ptregs_r13(%rcx), %r13 104 movq offsetof_thread_context_ptregs_r12(%rcx), %r12 105 movq offsetof_thread_context_ptregs_rbp(%rcx), %rbp 106 movq offsetof_thread_context_ptregs_rbx(%rcx), %rbx 107 movq offsetof_thread_context_ptregs_r11(%rcx), %r11 108 movq offsetof_thread_context_ptregs_r10(%rcx), %r10 109 movq offsetof_thread_context_ptregs_r9(%rcx), %r9 110 movq offsetof_thread_context_ptregs_r8(%rcx), %r8 111 movq offsetof_thread_context_ptregs_rax(%rcx), %rax 112 // %rcx restored last 113 movq offsetof_thread_context_ptregs_rdx(%rcx), %rdx 114 movq offsetof_thread_context_ptregs_rsi(%rcx), %rsi 115 movq offsetof_thread_context_ptregs_rdi(%rcx), %rdi 116 117 movq offsetof_thread_context_ptregs_rcx(%rcx), %rcx 118 .endm 119 120 // save_fpstate saves the current fpstate onto thread_context.fpstate. 121 // It assumes that: 122 // - the memory address of the thread_context is loaded in %rcx. 123 .macro save_fpstate 124 lea offsetof_thread_context_fpstate(%rcx), %rdi 125 movl $XCR0_EAX, %eax 126 movl $XCR0_EDX, %edx 127 movl __export_arch_state+offsetof_arch_state_xsave_mode(%rip), %esi 128 cmpl $XSAVE_MODE_XSAVEOPT, %esi 129 jl use_xsave 130 xsaveopt (%rdi) 131 jmp fpu_saved 132 use_xsave: 133 cmpl $XSAVE_MODE_XSAVE, %esi 134 jl use_fxsave 135 xsave (%rdi) 136 jmp fpu_saved 137 use_fxsave: 138 fxsave (%rdi) 139 fpu_saved: 140 .endm 141 142 // restore_fpstate restores the fpstate previously saved onto thread_context.fpstate. 143 // It assumes that: 144 // - the memory address of the thread_context is loaded in %rcx. 145 .macro restore_fpstate 146 // We only need to restore fpstate if we were signalled that it changed (syshandler 147 // does not modify fpstate). 148 cmpl $0, offsetof_thread_context_fpstate_changed(%rcx) 149 je fpu_restored 150 151 lea offsetof_thread_context_fpstate(%rcx), %rdi 152 mov __export_arch_state+offsetof_arch_state_xsave_mode(%rip), %eax 153 cmpl $XSAVE_MODE_FXSAVE, %eax 154 jz use_fxrstor 155 use_xrstor: 156 movl $XCR0_EAX, %eax 157 movl $XCR0_EDX, %edx 158 xrstor (%rdi) 159 jmp fpu_restored 160 use_fxrstor: 161 fxrstor (%rdi) 162 fpu_restored: 163 .endm 164 165 // Syshandler: 166 //////////////////////////////////////// 167 .globl __export_syshandler; 168 .type __export_syshandler, @function; 169 .align 4, 0x00; 170 __export_syshandler: 171 // The start of this function is in a usertrap trampoline: 172 // mov sysmsg.ThreadStatePrep, %gs:offset(msg.State) 173 // mov %rsp,%gs:0x20 // msg.AppStack 174 // mov %gs:0x18,%rsp // msg.SyshandlerStack 175 // movabs $ret_addr, %rax 176 // mov %rax,%gs:0x8 // msg.RetAddr 177 // mov sysno,%eax 178 // jmpq *%gs:0x10 // msg.Syshandler 179 prepare_enter_syshandler 180 save_fpstate 181 182 callq __syshandler 183 184 .globl asm_restore_state; 185 .type asm_restore_state, @function; 186 asm_restore_state: 187 // thread_context may have changed, therefore we reload it into %rcx anew. 188 movq %gs:offsetof_sysmsg_context, %rcx 189 restore_fpstate 190 191 prepare_exit_syshandler 192 193 // Now syshandler is exiting for good; restore user rflags and %rsp. 194 popfq 195 movq 0(%rsp), %rsp 196 jmp *%gs:offsetof_sysmsg_ret_addr // msg->ret_addr 197 198 .size __export_syshandler, . - __export_syshandler