Lines Matching +full:- +full:e
3 * See the COPYING file in the top-level directory.
12 CPUX86State *env = &cpu->env; in x86_cpu_xsave_all_areas()
13 const ExtSaveArea *e, *f; in x86_cpu_xsave_all_areas() local
22 e = &x86_ext_save_areas[XSTATE_FP_BIT]; in x86_cpu_xsave_all_areas()
24 legacy = buf + e->offset; in x86_cpu_xsave_all_areas()
25 header = buf + e->offset + sizeof(*legacy); in x86_cpu_xsave_all_areas()
28 swd = env->fpus & ~(7 << 11); in x86_cpu_xsave_all_areas()
29 swd |= (env->fpstt & 7) << 11; in x86_cpu_xsave_all_areas()
30 cwd = env->fpuc; in x86_cpu_xsave_all_areas()
32 twd |= (!env->fptags[i]) << i; in x86_cpu_xsave_all_areas()
34 legacy->fcw = cwd; in x86_cpu_xsave_all_areas()
35 legacy->fsw = swd; in x86_cpu_xsave_all_areas()
36 legacy->ftw = twd; in x86_cpu_xsave_all_areas()
37 legacy->fpop = env->fpop; in x86_cpu_xsave_all_areas()
38 legacy->fpip = env->fpip; in x86_cpu_xsave_all_areas()
39 legacy->fpdp = env->fpdp; in x86_cpu_xsave_all_areas()
40 memcpy(&legacy->fpregs, env->fpregs, in x86_cpu_xsave_all_areas()
41 sizeof(env->fpregs)); in x86_cpu_xsave_all_areas()
42 legacy->mxcsr = env->mxcsr; in x86_cpu_xsave_all_areas()
45 uint8_t *xmm = legacy->xmm_regs[i]; in x86_cpu_xsave_all_areas()
47 stq_p(xmm, env->xmm_regs[i].ZMM_Q(0)); in x86_cpu_xsave_all_areas()
48 stq_p(xmm + 8, env->xmm_regs[i].ZMM_Q(1)); in x86_cpu_xsave_all_areas()
51 header->xstate_bv = env->xstate_bv; in x86_cpu_xsave_all_areas()
53 e = &x86_ext_save_areas[XSTATE_YMM_BIT]; in x86_cpu_xsave_all_areas()
54 if (e->size && e->offset) { in x86_cpu_xsave_all_areas()
57 avx = buf + e->offset; in x86_cpu_xsave_all_areas()
60 uint8_t *ymmh = avx->ymmh[i]; in x86_cpu_xsave_all_areas()
62 stq_p(ymmh, env->xmm_regs[i].ZMM_Q(2)); in x86_cpu_xsave_all_areas()
63 stq_p(ymmh + 8, env->xmm_regs[i].ZMM_Q(3)); in x86_cpu_xsave_all_areas()
67 e = &x86_ext_save_areas[XSTATE_BNDREGS_BIT]; in x86_cpu_xsave_all_areas()
68 if (e->size && e->offset) { in x86_cpu_xsave_all_areas()
73 assert(f->size); in x86_cpu_xsave_all_areas()
74 assert(f->offset); in x86_cpu_xsave_all_areas()
76 bndreg = buf + e->offset; in x86_cpu_xsave_all_areas()
77 bndcsr = buf + f->offset; in x86_cpu_xsave_all_areas()
79 memcpy(&bndreg->bnd_regs, env->bnd_regs, in x86_cpu_xsave_all_areas()
80 sizeof(env->bnd_regs)); in x86_cpu_xsave_all_areas()
81 bndcsr->bndcsr = env->bndcs_regs; in x86_cpu_xsave_all_areas()
84 e = &x86_ext_save_areas[XSTATE_OPMASK_BIT]; in x86_cpu_xsave_all_areas()
85 if (e->size && e->offset) { in x86_cpu_xsave_all_areas()
93 assert(f->size); in x86_cpu_xsave_all_areas()
94 assert(f->offset); in x86_cpu_xsave_all_areas()
96 opmask = buf + e->offset; in x86_cpu_xsave_all_areas()
97 zmm_hi256 = buf + f->offset; in x86_cpu_xsave_all_areas()
99 memcpy(&opmask->opmask_regs, env->opmask_regs, in x86_cpu_xsave_all_areas()
100 sizeof(env->opmask_regs)); in x86_cpu_xsave_all_areas()
103 uint8_t *zmmh = zmm_hi256->zmm_hi256[i]; in x86_cpu_xsave_all_areas()
105 stq_p(zmmh, env->xmm_regs[i].ZMM_Q(4)); in x86_cpu_xsave_all_areas()
106 stq_p(zmmh + 8, env->xmm_regs[i].ZMM_Q(5)); in x86_cpu_xsave_all_areas()
107 stq_p(zmmh + 16, env->xmm_regs[i].ZMM_Q(6)); in x86_cpu_xsave_all_areas()
108 stq_p(zmmh + 24, env->xmm_regs[i].ZMM_Q(7)); in x86_cpu_xsave_all_areas()
113 assert(f->size); in x86_cpu_xsave_all_areas()
114 assert(f->offset); in x86_cpu_xsave_all_areas()
116 hi16_zmm = buf + f->offset; in x86_cpu_xsave_all_areas()
118 memcpy(&hi16_zmm->hi16_zmm, &env->xmm_regs[16], in x86_cpu_xsave_all_areas()
119 16 * sizeof(env->xmm_regs[16])); in x86_cpu_xsave_all_areas()
124 e = &x86_ext_save_areas[XSTATE_PKRU_BIT]; in x86_cpu_xsave_all_areas()
125 if (e->size && e->offset) { in x86_cpu_xsave_all_areas()
126 XSavePKRU *pkru = buf + e->offset; in x86_cpu_xsave_all_areas()
128 memcpy(pkru, &env->pkru, sizeof(env->pkru)); in x86_cpu_xsave_all_areas()
131 e = &x86_ext_save_areas[XSTATE_XTILE_CFG_BIT]; in x86_cpu_xsave_all_areas()
132 if (e->size && e->offset) { in x86_cpu_xsave_all_areas()
133 XSaveXTILECFG *tilecfg = buf + e->offset; in x86_cpu_xsave_all_areas()
135 memcpy(tilecfg, &env->xtilecfg, sizeof(env->xtilecfg)); in x86_cpu_xsave_all_areas()
138 e = &x86_ext_save_areas[XSTATE_XTILE_DATA_BIT]; in x86_cpu_xsave_all_areas()
139 if (e->size && e->offset && buflen >= e->size + e->offset) { in x86_cpu_xsave_all_areas()
140 XSaveXTILEDATA *tiledata = buf + e->offset; in x86_cpu_xsave_all_areas()
142 memcpy(tiledata, &env->xtiledata, sizeof(env->xtiledata)); in x86_cpu_xsave_all_areas()
149 CPUX86State *env = &cpu->env; in x86_cpu_xrstor_all_areas()
150 const ExtSaveArea *e, *f, *g; in x86_cpu_xrstor_all_areas() local
157 e = &x86_ext_save_areas[XSTATE_FP_BIT]; in x86_cpu_xrstor_all_areas()
159 legacy = buf + e->offset; in x86_cpu_xrstor_all_areas()
160 header = buf + e->offset + sizeof(*legacy); in x86_cpu_xrstor_all_areas()
162 cwd = legacy->fcw; in x86_cpu_xrstor_all_areas()
163 swd = legacy->fsw; in x86_cpu_xrstor_all_areas()
164 twd = legacy->ftw; in x86_cpu_xrstor_all_areas()
165 env->fpop = legacy->fpop; in x86_cpu_xrstor_all_areas()
166 env->fpstt = (swd >> 11) & 7; in x86_cpu_xrstor_all_areas()
167 env->fpus = swd; in x86_cpu_xrstor_all_areas()
168 env->fpuc = cwd; in x86_cpu_xrstor_all_areas()
170 env->fptags[i] = !((twd >> i) & 1); in x86_cpu_xrstor_all_areas()
172 env->fpip = legacy->fpip; in x86_cpu_xrstor_all_areas()
173 env->fpdp = legacy->fpdp; in x86_cpu_xrstor_all_areas()
174 env->mxcsr = legacy->mxcsr; in x86_cpu_xrstor_all_areas()
175 memcpy(env->fpregs, &legacy->fpregs, in x86_cpu_xrstor_all_areas()
176 sizeof(env->fpregs)); in x86_cpu_xrstor_all_areas()
179 const uint8_t *xmm = legacy->xmm_regs[i]; in x86_cpu_xrstor_all_areas()
181 env->xmm_regs[i].ZMM_Q(0) = ldq_p(xmm); in x86_cpu_xrstor_all_areas()
182 env->xmm_regs[i].ZMM_Q(1) = ldq_p(xmm + 8); in x86_cpu_xrstor_all_areas()
185 env->xstate_bv = header->xstate_bv; in x86_cpu_xrstor_all_areas()
187 e = &x86_ext_save_areas[XSTATE_YMM_BIT]; in x86_cpu_xrstor_all_areas()
188 if (e->size && e->offset) { in x86_cpu_xrstor_all_areas()
191 avx = buf + e->offset; in x86_cpu_xrstor_all_areas()
193 const uint8_t *ymmh = avx->ymmh[i]; in x86_cpu_xrstor_all_areas()
195 env->xmm_regs[i].ZMM_Q(2) = ldq_p(ymmh); in x86_cpu_xrstor_all_areas()
196 env->xmm_regs[i].ZMM_Q(3) = ldq_p(ymmh + 8); in x86_cpu_xrstor_all_areas()
200 e = &x86_ext_save_areas[XSTATE_BNDREGS_BIT]; in x86_cpu_xrstor_all_areas()
201 if (e->size && e->offset) { in x86_cpu_xrstor_all_areas()
206 assert(f->size); in x86_cpu_xrstor_all_areas()
207 assert(f->offset); in x86_cpu_xrstor_all_areas()
209 bndreg = buf + e->offset; in x86_cpu_xrstor_all_areas()
210 bndcsr = buf + f->offset; in x86_cpu_xrstor_all_areas()
212 memcpy(env->bnd_regs, &bndreg->bnd_regs, in x86_cpu_xrstor_all_areas()
213 sizeof(env->bnd_regs)); in x86_cpu_xrstor_all_areas()
214 env->bndcs_regs = bndcsr->bndcsr; in x86_cpu_xrstor_all_areas()
217 e = &x86_ext_save_areas[XSTATE_OPMASK_BIT]; in x86_cpu_xrstor_all_areas()
218 if (e->size && e->offset) { in x86_cpu_xrstor_all_areas()
226 assert(f->size); in x86_cpu_xrstor_all_areas()
227 assert(f->offset); in x86_cpu_xrstor_all_areas()
230 assert(g->size); in x86_cpu_xrstor_all_areas()
231 assert(g->offset); in x86_cpu_xrstor_all_areas()
233 opmask = buf + e->offset; in x86_cpu_xrstor_all_areas()
234 zmm_hi256 = buf + f->offset; in x86_cpu_xrstor_all_areas()
236 hi16_zmm = buf + g->offset; in x86_cpu_xrstor_all_areas()
239 memcpy(env->opmask_regs, &opmask->opmask_regs, in x86_cpu_xrstor_all_areas()
240 sizeof(env->opmask_regs)); in x86_cpu_xrstor_all_areas()
243 const uint8_t *zmmh = zmm_hi256->zmm_hi256[i]; in x86_cpu_xrstor_all_areas()
245 env->xmm_regs[i].ZMM_Q(4) = ldq_p(zmmh); in x86_cpu_xrstor_all_areas()
246 env->xmm_regs[i].ZMM_Q(5) = ldq_p(zmmh + 8); in x86_cpu_xrstor_all_areas()
247 env->xmm_regs[i].ZMM_Q(6) = ldq_p(zmmh + 16); in x86_cpu_xrstor_all_areas()
248 env->xmm_regs[i].ZMM_Q(7) = ldq_p(zmmh + 24); in x86_cpu_xrstor_all_areas()
252 memcpy(&env->xmm_regs[16], &hi16_zmm->hi16_zmm, in x86_cpu_xrstor_all_areas()
253 16 * sizeof(env->xmm_regs[16])); in x86_cpu_xrstor_all_areas()
258 e = &x86_ext_save_areas[XSTATE_PKRU_BIT]; in x86_cpu_xrstor_all_areas()
259 if (e->size && e->offset) { in x86_cpu_xrstor_all_areas()
262 pkru = buf + e->offset; in x86_cpu_xrstor_all_areas()
263 memcpy(&env->pkru, pkru, sizeof(env->pkru)); in x86_cpu_xrstor_all_areas()
266 e = &x86_ext_save_areas[XSTATE_XTILE_CFG_BIT]; in x86_cpu_xrstor_all_areas()
267 if (e->size && e->offset) { in x86_cpu_xrstor_all_areas()
268 const XSaveXTILECFG *tilecfg = buf + e->offset; in x86_cpu_xrstor_all_areas()
270 memcpy(&env->xtilecfg, tilecfg, sizeof(env->xtilecfg)); in x86_cpu_xrstor_all_areas()
273 e = &x86_ext_save_areas[XSTATE_XTILE_DATA_BIT]; in x86_cpu_xrstor_all_areas()
274 if (e->size && e->offset && buflen >= e->size + e->offset) { in x86_cpu_xrstor_all_areas()
275 const XSaveXTILEDATA *tiledata = buf + e->offset; in x86_cpu_xrstor_all_areas()
277 memcpy(&env->xtiledata, tiledata, sizeof(env->xtiledata)); in x86_cpu_xrstor_all_areas()