1 /* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2008 Fabrice Bellard 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 #include "tcg.h" 25 26 int gen_new_label(void); 27 28 static inline void tcg_gen_op0(TCGOpcode opc) 29 { 30 *gen_opc_ptr++ = opc; 31 } 32 33 static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1) 34 { 35 *gen_opc_ptr++ = opc; 36 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 37 } 38 39 static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1) 40 { 41 *gen_opc_ptr++ = opc; 42 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 43 } 44 45 static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1) 46 { 47 *gen_opc_ptr++ = opc; 48 *gen_opparam_ptr++ = arg1; 49 } 50 51 static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2) 52 { 53 *gen_opc_ptr++ = opc; 54 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 55 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 56 } 57 58 static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2) 59 { 60 *gen_opc_ptr++ = opc; 61 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 62 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 63 } 64 65 static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2) 66 { 67 *gen_opc_ptr++ = opc; 68 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 69 *gen_opparam_ptr++ = arg2; 70 } 71 72 static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2) 73 { 74 *gen_opc_ptr++ = opc; 75 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 76 *gen_opparam_ptr++ = arg2; 77 } 78 79 static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2) 80 { 81 *gen_opc_ptr++ = opc; 82 *gen_opparam_ptr++ = arg1; 83 *gen_opparam_ptr++ = arg2; 84 } 85 86 static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 87 TCGv_i32 arg3) 88 { 89 *gen_opc_ptr++ = opc; 90 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 91 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 92 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 93 } 94 95 static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 96 TCGv_i64 arg3) 97 { 98 *gen_opc_ptr++ = opc; 99 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 100 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 101 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 102 } 103 104 static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1, 105 TCGv_i32 arg2, TCGArg arg3) 106 { 107 *gen_opc_ptr++ = opc; 108 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 109 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 110 *gen_opparam_ptr++ = arg3; 111 } 112 113 static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1, 114 TCGv_i64 arg2, TCGArg arg3) 115 { 116 *gen_opc_ptr++ = opc; 117 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 118 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 119 *gen_opparam_ptr++ = arg3; 120 } 121 122 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val, 123 TCGv_ptr base, TCGArg offset) 124 { 125 *gen_opc_ptr++ = opc; 126 *gen_opparam_ptr++ = GET_TCGV_I32(val); 127 *gen_opparam_ptr++ = GET_TCGV_PTR(base); 128 *gen_opparam_ptr++ = offset; 129 } 130 131 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val, 132 TCGv_ptr base, TCGArg offset) 133 { 134 *gen_opc_ptr++ = opc; 135 *gen_opparam_ptr++ = GET_TCGV_I64(val); 136 *gen_opparam_ptr++ = GET_TCGV_PTR(base); 137 *gen_opparam_ptr++ = offset; 138 } 139 140 static inline void tcg_gen_qemu_ldst_op_i64_i32(TCGOpcode opc, TCGv_i64 val, 141 TCGv_i32 addr, TCGArg mem_index) 142 { 143 *gen_opc_ptr++ = opc; 144 *gen_opparam_ptr++ = GET_TCGV_I64(val); 145 *gen_opparam_ptr++ = GET_TCGV_I32(addr); 146 *gen_opparam_ptr++ = mem_index; 147 } 148 149 static inline void tcg_gen_qemu_ldst_op_i64_i64(TCGOpcode opc, TCGv_i64 val, 150 TCGv_i64 addr, TCGArg mem_index) 151 { 152 *gen_opc_ptr++ = opc; 153 *gen_opparam_ptr++ = GET_TCGV_I64(val); 154 *gen_opparam_ptr++ = GET_TCGV_I64(addr); 155 *gen_opparam_ptr++ = mem_index; 156 } 157 158 static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 159 TCGv_i32 arg3, TCGv_i32 arg4) 160 { 161 *gen_opc_ptr++ = opc; 162 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 163 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 164 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 165 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 166 } 167 168 static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 169 TCGv_i64 arg3, TCGv_i64 arg4) 170 { 171 *gen_opc_ptr++ = opc; 172 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 173 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 174 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 175 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 176 } 177 178 static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 179 TCGv_i32 arg3, TCGArg arg4) 180 { 181 *gen_opc_ptr++ = opc; 182 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 183 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 184 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 185 *gen_opparam_ptr++ = arg4; 186 } 187 188 static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 189 TCGv_i64 arg3, TCGArg arg4) 190 { 191 *gen_opc_ptr++ = opc; 192 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 193 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 194 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 195 *gen_opparam_ptr++ = arg4; 196 } 197 198 static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 199 TCGArg arg3, TCGArg arg4) 200 { 201 *gen_opc_ptr++ = opc; 202 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 203 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 204 *gen_opparam_ptr++ = arg3; 205 *gen_opparam_ptr++ = arg4; 206 } 207 208 static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 209 TCGArg arg3, TCGArg arg4) 210 { 211 *gen_opc_ptr++ = opc; 212 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 213 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 214 *gen_opparam_ptr++ = arg3; 215 *gen_opparam_ptr++ = arg4; 216 } 217 218 static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 219 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5) 220 { 221 *gen_opc_ptr++ = opc; 222 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 223 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 224 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 225 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 226 *gen_opparam_ptr++ = GET_TCGV_I32(arg5); 227 } 228 229 static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 230 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5) 231 { 232 *gen_opc_ptr++ = opc; 233 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 234 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 235 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 236 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 237 *gen_opparam_ptr++ = GET_TCGV_I64(arg5); 238 } 239 240 static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 241 TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5) 242 { 243 *gen_opc_ptr++ = opc; 244 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 245 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 246 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 247 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 248 *gen_opparam_ptr++ = arg5; 249 } 250 251 static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 252 TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5) 253 { 254 *gen_opc_ptr++ = opc; 255 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 256 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 257 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 258 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 259 *gen_opparam_ptr++ = arg5; 260 } 261 262 static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1, 263 TCGv_i32 arg2, TCGv_i32 arg3, 264 TCGArg arg4, TCGArg arg5) 265 { 266 *gen_opc_ptr++ = opc; 267 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 268 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 269 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 270 *gen_opparam_ptr++ = arg4; 271 *gen_opparam_ptr++ = arg5; 272 } 273 274 static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1, 275 TCGv_i64 arg2, TCGv_i64 arg3, 276 TCGArg arg4, TCGArg arg5) 277 { 278 *gen_opc_ptr++ = opc; 279 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 280 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 281 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 282 *gen_opparam_ptr++ = arg4; 283 *gen_opparam_ptr++ = arg5; 284 } 285 286 static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 287 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5, 288 TCGv_i32 arg6) 289 { 290 *gen_opc_ptr++ = opc; 291 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 292 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 293 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 294 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 295 *gen_opparam_ptr++ = GET_TCGV_I32(arg5); 296 *gen_opparam_ptr++ = GET_TCGV_I32(arg6); 297 } 298 299 static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 300 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5, 301 TCGv_i64 arg6) 302 { 303 *gen_opc_ptr++ = opc; 304 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 305 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 306 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 307 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 308 *gen_opparam_ptr++ = GET_TCGV_I64(arg5); 309 *gen_opparam_ptr++ = GET_TCGV_I64(arg6); 310 } 311 312 static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 313 TCGv_i32 arg3, TCGv_i32 arg4, 314 TCGv_i32 arg5, TCGArg arg6) 315 { 316 *gen_opc_ptr++ = opc; 317 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 318 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 319 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 320 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 321 *gen_opparam_ptr++ = GET_TCGV_I32(arg5); 322 *gen_opparam_ptr++ = arg6; 323 } 324 325 static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 326 TCGv_i64 arg3, TCGv_i64 arg4, 327 TCGv_i64 arg5, TCGArg arg6) 328 { 329 *gen_opc_ptr++ = opc; 330 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 331 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 332 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 333 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 334 *gen_opparam_ptr++ = GET_TCGV_I64(arg5); 335 *gen_opparam_ptr++ = arg6; 336 } 337 338 static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1, 339 TCGv_i32 arg2, TCGv_i32 arg3, 340 TCGv_i32 arg4, TCGArg arg5, TCGArg arg6) 341 { 342 *gen_opc_ptr++ = opc; 343 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 344 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 345 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 346 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 347 *gen_opparam_ptr++ = arg5; 348 *gen_opparam_ptr++ = arg6; 349 } 350 351 static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1, 352 TCGv_i64 arg2, TCGv_i64 arg3, 353 TCGv_i64 arg4, TCGArg arg5, TCGArg arg6) 354 { 355 *gen_opc_ptr++ = opc; 356 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 357 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 358 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 359 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 360 *gen_opparam_ptr++ = arg5; 361 *gen_opparam_ptr++ = arg6; 362 } 363 364 static inline void gen_set_label(int n) 365 { 366 tcg_gen_op1i(INDEX_op_set_label, n); 367 } 368 369 static inline void tcg_gen_br(int label) 370 { 371 tcg_gen_op1i(INDEX_op_br, label); 372 } 373 374 static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg) 375 { 376 if (!TCGV_EQUAL_I32(ret, arg)) 377 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg); 378 } 379 380 static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg) 381 { 382 tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg); 383 } 384 385 /* A version of dh_sizemask from def-helper.h that doesn't rely on 386 preprocessor magic. */ 387 static inline int tcg_gen_sizemask(int n, int is_64bit, int is_signed) 388 { 389 return (is_64bit << n*2) | (is_signed << (n*2 + 1)); 390 } 391 392 /* helper calls */ 393 static inline void tcg_gen_helperN(void *func, int flags, int sizemask, 394 TCGArg ret, int nargs, TCGArg *args) 395 { 396 TCGv_ptr fn; 397 fn = tcg_const_ptr(func); 398 tcg_gen_callN(&tcg_ctx, fn, flags, sizemask, ret, 399 nargs, args); 400 tcg_temp_free_ptr(fn); 401 } 402 403 /* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently 404 reserved for helpers in tcg-runtime.c. These helpers are all const 405 and pure, hence the call to tcg_gen_callN() with TCG_CALL_CONST | 406 TCG_CALL_PURE. This may need to be adjusted if these functions 407 start to be used with other helpers. */ 408 static inline void tcg_gen_helper32(void *func, int sizemask, TCGv_i32 ret, 409 TCGv_i32 a, TCGv_i32 b) 410 { 411 TCGv_ptr fn; 412 TCGArg args[2]; 413 fn = tcg_const_ptr(func); 414 args[0] = GET_TCGV_I32(a); 415 args[1] = GET_TCGV_I32(b); 416 tcg_gen_callN(&tcg_ctx, fn, TCG_CALL_CONST | TCG_CALL_PURE, sizemask, 417 GET_TCGV_I32(ret), 2, args); 418 tcg_temp_free_ptr(fn); 419 } 420 421 static inline void tcg_gen_helper64(void *func, int sizemask, TCGv_i64 ret, 422 TCGv_i64 a, TCGv_i64 b) 423 { 424 TCGv_ptr fn; 425 TCGArg args[2]; 426 fn = tcg_const_ptr(func); 427 args[0] = GET_TCGV_I64(a); 428 args[1] = GET_TCGV_I64(b); 429 tcg_gen_callN(&tcg_ctx, fn, TCG_CALL_CONST | TCG_CALL_PURE, sizemask, 430 GET_TCGV_I64(ret), 2, args); 431 tcg_temp_free_ptr(fn); 432 } 433 434 /* 32 bit ops */ 435 436 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 437 { 438 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset); 439 } 440 441 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 442 { 443 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset); 444 } 445 446 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 447 { 448 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset); 449 } 450 451 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 452 { 453 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset); 454 } 455 456 static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 457 { 458 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset); 459 } 460 461 static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 462 { 463 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset); 464 } 465 466 static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 467 { 468 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset); 469 } 470 471 static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 472 { 473 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset); 474 } 475 476 static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 477 { 478 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2); 479 } 480 481 static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 482 { 483 /* some cases can be optimized here */ 484 if (arg2 == 0) { 485 tcg_gen_mov_i32(ret, arg1); 486 } else { 487 TCGv_i32 t0 = tcg_const_i32(arg2); 488 tcg_gen_add_i32(ret, arg1, t0); 489 tcg_temp_free_i32(t0); 490 } 491 } 492 493 static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 494 { 495 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2); 496 } 497 498 static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2) 499 { 500 TCGv_i32 t0 = tcg_const_i32(arg1); 501 tcg_gen_sub_i32(ret, t0, arg2); 502 tcg_temp_free_i32(t0); 503 } 504 505 static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 506 { 507 /* some cases can be optimized here */ 508 if (arg2 == 0) { 509 tcg_gen_mov_i32(ret, arg1); 510 } else { 511 TCGv_i32 t0 = tcg_const_i32(arg2); 512 tcg_gen_sub_i32(ret, arg1, t0); 513 tcg_temp_free_i32(t0); 514 } 515 } 516 517 static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 518 { 519 if (TCGV_EQUAL_I32(arg1, arg2)) { 520 tcg_gen_mov_i32(ret, arg1); 521 } else { 522 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2); 523 } 524 } 525 526 static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2) 527 { 528 TCGv_i32 t0; 529 /* Some cases can be optimized here. */ 530 switch (arg2) { 531 case 0: 532 tcg_gen_movi_i32(ret, 0); 533 return; 534 case 0xffffffffu: 535 tcg_gen_mov_i32(ret, arg1); 536 return; 537 case 0xffu: 538 /* Don't recurse with tcg_gen_ext8u_i32. */ 539 if (TCG_TARGET_HAS_ext8u_i32) { 540 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1); 541 return; 542 } 543 break; 544 case 0xffffu: 545 if (TCG_TARGET_HAS_ext16u_i32) { 546 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1); 547 return; 548 } 549 break; 550 } 551 t0 = tcg_const_i32(arg2); 552 tcg_gen_and_i32(ret, arg1, t0); 553 tcg_temp_free_i32(t0); 554 } 555 556 static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 557 { 558 if (TCGV_EQUAL_I32(arg1, arg2)) { 559 tcg_gen_mov_i32(ret, arg1); 560 } else { 561 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2); 562 } 563 } 564 565 static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 566 { 567 /* Some cases can be optimized here. */ 568 if (arg2 == -1) { 569 tcg_gen_movi_i32(ret, -1); 570 } else if (arg2 == 0) { 571 tcg_gen_mov_i32(ret, arg1); 572 } else { 573 TCGv_i32 t0 = tcg_const_i32(arg2); 574 tcg_gen_or_i32(ret, arg1, t0); 575 tcg_temp_free_i32(t0); 576 } 577 } 578 579 static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 580 { 581 if (TCGV_EQUAL_I32(arg1, arg2)) { 582 tcg_gen_movi_i32(ret, 0); 583 } else { 584 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2); 585 } 586 } 587 588 static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 589 { 590 /* Some cases can be optimized here. */ 591 if (arg2 == 0) { 592 tcg_gen_mov_i32(ret, arg1); 593 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) { 594 /* Don't recurse with tcg_gen_not_i32. */ 595 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1); 596 } else { 597 TCGv_i32 t0 = tcg_const_i32(arg2); 598 tcg_gen_xor_i32(ret, arg1, t0); 599 tcg_temp_free_i32(t0); 600 } 601 } 602 603 static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 604 { 605 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2); 606 } 607 608 static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 609 { 610 if (arg2 == 0) { 611 tcg_gen_mov_i32(ret, arg1); 612 } else { 613 TCGv_i32 t0 = tcg_const_i32(arg2); 614 tcg_gen_shl_i32(ret, arg1, t0); 615 tcg_temp_free_i32(t0); 616 } 617 } 618 619 static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 620 { 621 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2); 622 } 623 624 static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 625 { 626 if (arg2 == 0) { 627 tcg_gen_mov_i32(ret, arg1); 628 } else { 629 TCGv_i32 t0 = tcg_const_i32(arg2); 630 tcg_gen_shr_i32(ret, arg1, t0); 631 tcg_temp_free_i32(t0); 632 } 633 } 634 635 static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 636 { 637 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2); 638 } 639 640 static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 641 { 642 if (arg2 == 0) { 643 tcg_gen_mov_i32(ret, arg1); 644 } else { 645 TCGv_i32 t0 = tcg_const_i32(arg2); 646 tcg_gen_sar_i32(ret, arg1, t0); 647 tcg_temp_free_i32(t0); 648 } 649 } 650 651 static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, 652 TCGv_i32 arg2, int label_index) 653 { 654 if (cond == TCG_COND_ALWAYS) { 655 tcg_gen_br(label_index); 656 } else if (cond != TCG_COND_NEVER) { 657 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index); 658 } 659 } 660 661 static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, 662 int32_t arg2, int label_index) 663 { 664 if (cond == TCG_COND_ALWAYS) { 665 tcg_gen_br(label_index); 666 } else if (cond != TCG_COND_NEVER) { 667 TCGv_i32 t0 = tcg_const_i32(arg2); 668 tcg_gen_brcond_i32(cond, arg1, t0, label_index); 669 tcg_temp_free_i32(t0); 670 } 671 } 672 673 static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret, 674 TCGv_i32 arg1, TCGv_i32 arg2) 675 { 676 if (cond == TCG_COND_ALWAYS) { 677 tcg_gen_movi_i32(ret, 1); 678 } else if (cond == TCG_COND_NEVER) { 679 tcg_gen_movi_i32(ret, 0); 680 } else { 681 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond); 682 } 683 } 684 685 static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret, 686 TCGv_i32 arg1, int32_t arg2) 687 { 688 if (cond == TCG_COND_ALWAYS) { 689 tcg_gen_movi_i32(ret, 1); 690 } else if (cond == TCG_COND_NEVER) { 691 tcg_gen_movi_i32(ret, 0); 692 } else { 693 TCGv_i32 t0 = tcg_const_i32(arg2); 694 tcg_gen_setcond_i32(cond, ret, arg1, t0); 695 tcg_temp_free_i32(t0); 696 } 697 } 698 699 static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 700 { 701 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2); 702 } 703 704 static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 705 { 706 TCGv_i32 t0 = tcg_const_i32(arg2); 707 tcg_gen_mul_i32(ret, arg1, t0); 708 tcg_temp_free_i32(t0); 709 } 710 711 static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 712 { 713 if (TCG_TARGET_HAS_div_i32) { 714 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2); 715 } else if (TCG_TARGET_HAS_div2_i32) { 716 TCGv_i32 t0 = tcg_temp_new_i32(); 717 tcg_gen_sari_i32(t0, arg1, 31); 718 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2); 719 tcg_temp_free_i32(t0); 720 } else { 721 int sizemask = 0; 722 /* Return value and both arguments are 32-bit and signed. */ 723 sizemask |= tcg_gen_sizemask(0, 0, 1); 724 sizemask |= tcg_gen_sizemask(1, 0, 1); 725 sizemask |= tcg_gen_sizemask(2, 0, 1); 726 tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2); 727 } 728 } 729 730 static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 731 { 732 if (TCG_TARGET_HAS_div_i32) { 733 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2); 734 } else if (TCG_TARGET_HAS_div2_i32) { 735 TCGv_i32 t0 = tcg_temp_new_i32(); 736 tcg_gen_sari_i32(t0, arg1, 31); 737 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2); 738 tcg_temp_free_i32(t0); 739 } else { 740 int sizemask = 0; 741 /* Return value and both arguments are 32-bit and signed. */ 742 sizemask |= tcg_gen_sizemask(0, 0, 1); 743 sizemask |= tcg_gen_sizemask(1, 0, 1); 744 sizemask |= tcg_gen_sizemask(2, 0, 1); 745 tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2); 746 } 747 } 748 749 static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 750 { 751 if (TCG_TARGET_HAS_div_i32) { 752 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2); 753 } else if (TCG_TARGET_HAS_div2_i32) { 754 TCGv_i32 t0 = tcg_temp_new_i32(); 755 tcg_gen_movi_i32(t0, 0); 756 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2); 757 tcg_temp_free_i32(t0); 758 } else { 759 int sizemask = 0; 760 /* Return value and both arguments are 32-bit and unsigned. */ 761 sizemask |= tcg_gen_sizemask(0, 0, 0); 762 sizemask |= tcg_gen_sizemask(1, 0, 0); 763 sizemask |= tcg_gen_sizemask(2, 0, 0); 764 tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2); 765 } 766 } 767 768 static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 769 { 770 if (TCG_TARGET_HAS_div_i32) { 771 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2); 772 } else if (TCG_TARGET_HAS_div2_i32) { 773 TCGv_i32 t0 = tcg_temp_new_i32(); 774 tcg_gen_movi_i32(t0, 0); 775 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2); 776 tcg_temp_free_i32(t0); 777 } else { 778 int sizemask = 0; 779 /* Return value and both arguments are 32-bit and unsigned. */ 780 sizemask |= tcg_gen_sizemask(0, 0, 0); 781 sizemask |= tcg_gen_sizemask(1, 0, 0); 782 sizemask |= tcg_gen_sizemask(2, 0, 0); 783 tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2); 784 } 785 } 786 787 #if TCG_TARGET_REG_BITS == 32 788 789 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) 790 { 791 if (!TCGV_EQUAL_I64(ret, arg)) { 792 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 793 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 794 } 795 } 796 797 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg) 798 { 799 tcg_gen_movi_i32(TCGV_LOW(ret), arg); 800 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32); 801 } 802 803 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, 804 tcg_target_long offset) 805 { 806 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset); 807 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 808 } 809 810 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, 811 tcg_target_long offset) 812 { 813 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset); 814 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31); 815 } 816 817 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, 818 tcg_target_long offset) 819 { 820 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset); 821 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 822 } 823 824 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, 825 tcg_target_long offset) 826 { 827 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset); 828 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 829 } 830 831 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, 832 tcg_target_long offset) 833 { 834 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 835 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 836 } 837 838 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, 839 tcg_target_long offset) 840 { 841 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 842 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 843 } 844 845 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, 846 tcg_target_long offset) 847 { 848 /* since arg2 and ret have different types, they cannot be the 849 same temporary */ 850 #ifdef TCG_TARGET_WORDS_BIGENDIAN 851 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset); 852 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4); 853 #else 854 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 855 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4); 856 #endif 857 } 858 859 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, 860 tcg_target_long offset) 861 { 862 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset); 863 } 864 865 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, 866 tcg_target_long offset) 867 { 868 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset); 869 } 870 871 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, 872 tcg_target_long offset) 873 { 874 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset); 875 } 876 877 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, 878 tcg_target_long offset) 879 { 880 #ifdef TCG_TARGET_WORDS_BIGENDIAN 881 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset); 882 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4); 883 #else 884 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset); 885 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4); 886 #endif 887 } 888 889 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 890 { 891 tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret), 892 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 893 TCGV_HIGH(arg2)); 894 /* Allow the optimizer room to replace add2 with two moves. */ 895 tcg_gen_op0(INDEX_op_nop); 896 } 897 898 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 899 { 900 tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret), 901 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 902 TCGV_HIGH(arg2)); 903 /* Allow the optimizer room to replace sub2 with two moves. */ 904 tcg_gen_op0(INDEX_op_nop); 905 } 906 907 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 908 { 909 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 910 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 911 } 912 913 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 914 { 915 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 916 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 917 } 918 919 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 920 { 921 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 922 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 923 } 924 925 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 926 { 927 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 928 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 929 } 930 931 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 932 { 933 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 934 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 935 } 936 937 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 938 { 939 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 940 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 941 } 942 943 /* XXX: use generic code when basic block handling is OK or CPU 944 specific code (x86) */ 945 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 946 { 947 int sizemask = 0; 948 /* Return value and both arguments are 64-bit and signed. */ 949 sizemask |= tcg_gen_sizemask(0, 1, 1); 950 sizemask |= tcg_gen_sizemask(1, 1, 1); 951 sizemask |= tcg_gen_sizemask(2, 1, 1); 952 953 tcg_gen_helper64(tcg_helper_shl_i64, sizemask, ret, arg1, arg2); 954 } 955 956 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 957 { 958 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0); 959 } 960 961 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 962 { 963 int sizemask = 0; 964 /* Return value and both arguments are 64-bit and signed. */ 965 sizemask |= tcg_gen_sizemask(0, 1, 1); 966 sizemask |= tcg_gen_sizemask(1, 1, 1); 967 sizemask |= tcg_gen_sizemask(2, 1, 1); 968 969 tcg_gen_helper64(tcg_helper_shr_i64, sizemask, ret, arg1, arg2); 970 } 971 972 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 973 { 974 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0); 975 } 976 977 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 978 { 979 int sizemask = 0; 980 /* Return value and both arguments are 64-bit and signed. */ 981 sizemask |= tcg_gen_sizemask(0, 1, 1); 982 sizemask |= tcg_gen_sizemask(1, 1, 1); 983 sizemask |= tcg_gen_sizemask(2, 1, 1); 984 985 tcg_gen_helper64(tcg_helper_sar_i64, sizemask, ret, arg1, arg2); 986 } 987 988 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 989 { 990 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1); 991 } 992 993 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, 994 TCGv_i64 arg2, int label_index) 995 { 996 if (cond == TCG_COND_ALWAYS) { 997 tcg_gen_br(label_index); 998 } else if (cond != TCG_COND_NEVER) { 999 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, 1000 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 1001 TCGV_HIGH(arg2), cond, label_index); 1002 } 1003 } 1004 1005 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret, 1006 TCGv_i64 arg1, TCGv_i64 arg2) 1007 { 1008 if (cond == TCG_COND_ALWAYS) { 1009 tcg_gen_movi_i32(TCGV_LOW(ret), 1); 1010 } else if (cond == TCG_COND_NEVER) { 1011 tcg_gen_movi_i32(TCGV_LOW(ret), 0); 1012 } else { 1013 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret), 1014 TCGV_LOW(arg1), TCGV_HIGH(arg1), 1015 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond); 1016 } 1017 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1018 } 1019 1020 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1021 { 1022 TCGv_i64 t0; 1023 TCGv_i32 t1; 1024 1025 t0 = tcg_temp_new_i64(); 1026 t1 = tcg_temp_new_i32(); 1027 1028 tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0), 1029 TCGV_LOW(arg1), TCGV_LOW(arg2)); 1030 /* Allow the optimizer room to replace mulu2 with two moves. */ 1031 tcg_gen_op0(INDEX_op_nop); 1032 1033 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2)); 1034 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1); 1035 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2)); 1036 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1); 1037 1038 tcg_gen_mov_i64(ret, t0); 1039 tcg_temp_free_i64(t0); 1040 tcg_temp_free_i32(t1); 1041 } 1042 1043 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1044 { 1045 int sizemask = 0; 1046 /* Return value and both arguments are 64-bit and signed. */ 1047 sizemask |= tcg_gen_sizemask(0, 1, 1); 1048 sizemask |= tcg_gen_sizemask(1, 1, 1); 1049 sizemask |= tcg_gen_sizemask(2, 1, 1); 1050 1051 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2); 1052 } 1053 1054 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1055 { 1056 int sizemask = 0; 1057 /* Return value and both arguments are 64-bit and signed. */ 1058 sizemask |= tcg_gen_sizemask(0, 1, 1); 1059 sizemask |= tcg_gen_sizemask(1, 1, 1); 1060 sizemask |= tcg_gen_sizemask(2, 1, 1); 1061 1062 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2); 1063 } 1064 1065 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1066 { 1067 int sizemask = 0; 1068 /* Return value and both arguments are 64-bit and unsigned. */ 1069 sizemask |= tcg_gen_sizemask(0, 1, 0); 1070 sizemask |= tcg_gen_sizemask(1, 1, 0); 1071 sizemask |= tcg_gen_sizemask(2, 1, 0); 1072 1073 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2); 1074 } 1075 1076 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1077 { 1078 int sizemask = 0; 1079 /* Return value and both arguments are 64-bit and unsigned. */ 1080 sizemask |= tcg_gen_sizemask(0, 1, 0); 1081 sizemask |= tcg_gen_sizemask(1, 1, 0); 1082 sizemask |= tcg_gen_sizemask(2, 1, 0); 1083 1084 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2); 1085 } 1086 1087 #else 1088 1089 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) 1090 { 1091 if (!TCGV_EQUAL_I64(ret, arg)) 1092 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg); 1093 } 1094 1095 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg) 1096 { 1097 tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg); 1098 } 1099 1100 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1101 tcg_target_long offset) 1102 { 1103 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset); 1104 } 1105 1106 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1107 tcg_target_long offset) 1108 { 1109 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset); 1110 } 1111 1112 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1113 tcg_target_long offset) 1114 { 1115 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset); 1116 } 1117 1118 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1119 tcg_target_long offset) 1120 { 1121 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset); 1122 } 1123 1124 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1125 tcg_target_long offset) 1126 { 1127 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset); 1128 } 1129 1130 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1131 tcg_target_long offset) 1132 { 1133 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset); 1134 } 1135 1136 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset) 1137 { 1138 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset); 1139 } 1140 1141 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1142 tcg_target_long offset) 1143 { 1144 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset); 1145 } 1146 1147 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1148 tcg_target_long offset) 1149 { 1150 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset); 1151 } 1152 1153 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1154 tcg_target_long offset) 1155 { 1156 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset); 1157 } 1158 1159 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset) 1160 { 1161 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset); 1162 } 1163 1164 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1165 { 1166 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2); 1167 } 1168 1169 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1170 { 1171 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2); 1172 } 1173 1174 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1175 { 1176 if (TCGV_EQUAL_I64(arg1, arg2)) { 1177 tcg_gen_mov_i64(ret, arg1); 1178 } else { 1179 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2); 1180 } 1181 } 1182 1183 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2) 1184 { 1185 TCGv_i64 t0; 1186 /* Some cases can be optimized here. */ 1187 switch (arg2) { 1188 case 0: 1189 tcg_gen_movi_i64(ret, 0); 1190 return; 1191 case 0xffffffffffffffffull: 1192 tcg_gen_mov_i64(ret, arg1); 1193 return; 1194 case 0xffull: 1195 /* Don't recurse with tcg_gen_ext8u_i32. */ 1196 if (TCG_TARGET_HAS_ext8u_i64) { 1197 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1); 1198 return; 1199 } 1200 break; 1201 case 0xffffu: 1202 if (TCG_TARGET_HAS_ext16u_i64) { 1203 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1); 1204 return; 1205 } 1206 break; 1207 case 0xffffffffull: 1208 if (TCG_TARGET_HAS_ext32u_i64) { 1209 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1); 1210 return; 1211 } 1212 break; 1213 } 1214 t0 = tcg_const_i64(arg2); 1215 tcg_gen_and_i64(ret, arg1, t0); 1216 tcg_temp_free_i64(t0); 1217 } 1218 1219 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1220 { 1221 if (TCGV_EQUAL_I64(arg1, arg2)) { 1222 tcg_gen_mov_i64(ret, arg1); 1223 } else { 1224 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2); 1225 } 1226 } 1227 1228 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1229 { 1230 /* Some cases can be optimized here. */ 1231 if (arg2 == -1) { 1232 tcg_gen_movi_i64(ret, -1); 1233 } else if (arg2 == 0) { 1234 tcg_gen_mov_i64(ret, arg1); 1235 } else { 1236 TCGv_i64 t0 = tcg_const_i64(arg2); 1237 tcg_gen_or_i64(ret, arg1, t0); 1238 tcg_temp_free_i64(t0); 1239 } 1240 } 1241 1242 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1243 { 1244 if (TCGV_EQUAL_I64(arg1, arg2)) { 1245 tcg_gen_movi_i64(ret, 0); 1246 } else { 1247 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2); 1248 } 1249 } 1250 1251 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1252 { 1253 /* Some cases can be optimized here. */ 1254 if (arg2 == 0) { 1255 tcg_gen_mov_i64(ret, arg1); 1256 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) { 1257 /* Don't recurse with tcg_gen_not_i64. */ 1258 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1); 1259 } else { 1260 TCGv_i64 t0 = tcg_const_i64(arg2); 1261 tcg_gen_xor_i64(ret, arg1, t0); 1262 tcg_temp_free_i64(t0); 1263 } 1264 } 1265 1266 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1267 { 1268 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2); 1269 } 1270 1271 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1272 { 1273 if (arg2 == 0) { 1274 tcg_gen_mov_i64(ret, arg1); 1275 } else { 1276 TCGv_i64 t0 = tcg_const_i64(arg2); 1277 tcg_gen_shl_i64(ret, arg1, t0); 1278 tcg_temp_free_i64(t0); 1279 } 1280 } 1281 1282 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1283 { 1284 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2); 1285 } 1286 1287 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1288 { 1289 if (arg2 == 0) { 1290 tcg_gen_mov_i64(ret, arg1); 1291 } else { 1292 TCGv_i64 t0 = tcg_const_i64(arg2); 1293 tcg_gen_shr_i64(ret, arg1, t0); 1294 tcg_temp_free_i64(t0); 1295 } 1296 } 1297 1298 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1299 { 1300 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2); 1301 } 1302 1303 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1304 { 1305 if (arg2 == 0) { 1306 tcg_gen_mov_i64(ret, arg1); 1307 } else { 1308 TCGv_i64 t0 = tcg_const_i64(arg2); 1309 tcg_gen_sar_i64(ret, arg1, t0); 1310 tcg_temp_free_i64(t0); 1311 } 1312 } 1313 1314 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, 1315 TCGv_i64 arg2, int label_index) 1316 { 1317 if (cond == TCG_COND_ALWAYS) { 1318 tcg_gen_br(label_index); 1319 } else if (cond != TCG_COND_NEVER) { 1320 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index); 1321 } 1322 } 1323 1324 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret, 1325 TCGv_i64 arg1, TCGv_i64 arg2) 1326 { 1327 if (cond == TCG_COND_ALWAYS) { 1328 tcg_gen_movi_i64(ret, 1); 1329 } else if (cond == TCG_COND_NEVER) { 1330 tcg_gen_movi_i64(ret, 0); 1331 } else { 1332 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond); 1333 } 1334 } 1335 1336 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1337 { 1338 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2); 1339 } 1340 1341 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1342 { 1343 if (TCG_TARGET_HAS_div_i64) { 1344 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2); 1345 } else if (TCG_TARGET_HAS_div2_i64) { 1346 TCGv_i64 t0 = tcg_temp_new_i64(); 1347 tcg_gen_sari_i64(t0, arg1, 63); 1348 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2); 1349 tcg_temp_free_i64(t0); 1350 } else { 1351 int sizemask = 0; 1352 /* Return value and both arguments are 64-bit and signed. */ 1353 sizemask |= tcg_gen_sizemask(0, 1, 1); 1354 sizemask |= tcg_gen_sizemask(1, 1, 1); 1355 sizemask |= tcg_gen_sizemask(2, 1, 1); 1356 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2); 1357 } 1358 } 1359 1360 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1361 { 1362 if (TCG_TARGET_HAS_div_i64) { 1363 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2); 1364 } else if (TCG_TARGET_HAS_div2_i64) { 1365 TCGv_i64 t0 = tcg_temp_new_i64(); 1366 tcg_gen_sari_i64(t0, arg1, 63); 1367 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2); 1368 tcg_temp_free_i64(t0); 1369 } else { 1370 int sizemask = 0; 1371 /* Return value and both arguments are 64-bit and signed. */ 1372 sizemask |= tcg_gen_sizemask(0, 1, 1); 1373 sizemask |= tcg_gen_sizemask(1, 1, 1); 1374 sizemask |= tcg_gen_sizemask(2, 1, 1); 1375 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2); 1376 } 1377 } 1378 1379 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1380 { 1381 if (TCG_TARGET_HAS_div_i64) { 1382 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2); 1383 } else if (TCG_TARGET_HAS_div2_i64) { 1384 TCGv_i64 t0 = tcg_temp_new_i64(); 1385 tcg_gen_movi_i64(t0, 0); 1386 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2); 1387 tcg_temp_free_i64(t0); 1388 } else { 1389 int sizemask = 0; 1390 /* Return value and both arguments are 64-bit and unsigned. */ 1391 sizemask |= tcg_gen_sizemask(0, 1, 0); 1392 sizemask |= tcg_gen_sizemask(1, 1, 0); 1393 sizemask |= tcg_gen_sizemask(2, 1, 0); 1394 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2); 1395 } 1396 } 1397 1398 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1399 { 1400 if (TCG_TARGET_HAS_div_i64) { 1401 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2); 1402 } else if (TCG_TARGET_HAS_div2_i64) { 1403 TCGv_i64 t0 = tcg_temp_new_i64(); 1404 tcg_gen_movi_i64(t0, 0); 1405 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2); 1406 tcg_temp_free_i64(t0); 1407 } else { 1408 int sizemask = 0; 1409 /* Return value and both arguments are 64-bit and unsigned. */ 1410 sizemask |= tcg_gen_sizemask(0, 1, 0); 1411 sizemask |= tcg_gen_sizemask(1, 1, 0); 1412 sizemask |= tcg_gen_sizemask(2, 1, 0); 1413 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2); 1414 } 1415 } 1416 #endif /* TCG_TARGET_REG_BITS == 32 */ 1417 1418 static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1419 { 1420 /* some cases can be optimized here */ 1421 if (arg2 == 0) { 1422 tcg_gen_mov_i64(ret, arg1); 1423 } else { 1424 TCGv_i64 t0 = tcg_const_i64(arg2); 1425 tcg_gen_add_i64(ret, arg1, t0); 1426 tcg_temp_free_i64(t0); 1427 } 1428 } 1429 1430 static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2) 1431 { 1432 TCGv_i64 t0 = tcg_const_i64(arg1); 1433 tcg_gen_sub_i64(ret, t0, arg2); 1434 tcg_temp_free_i64(t0); 1435 } 1436 1437 static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1438 { 1439 /* some cases can be optimized here */ 1440 if (arg2 == 0) { 1441 tcg_gen_mov_i64(ret, arg1); 1442 } else { 1443 TCGv_i64 t0 = tcg_const_i64(arg2); 1444 tcg_gen_sub_i64(ret, arg1, t0); 1445 tcg_temp_free_i64(t0); 1446 } 1447 } 1448 static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, 1449 int64_t arg2, int label_index) 1450 { 1451 if (cond == TCG_COND_ALWAYS) { 1452 tcg_gen_br(label_index); 1453 } else if (cond != TCG_COND_NEVER) { 1454 TCGv_i64 t0 = tcg_const_i64(arg2); 1455 tcg_gen_brcond_i64(cond, arg1, t0, label_index); 1456 tcg_temp_free_i64(t0); 1457 } 1458 } 1459 1460 static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret, 1461 TCGv_i64 arg1, int64_t arg2) 1462 { 1463 TCGv_i64 t0 = tcg_const_i64(arg2); 1464 tcg_gen_setcond_i64(cond, ret, arg1, t0); 1465 tcg_temp_free_i64(t0); 1466 } 1467 1468 static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1469 { 1470 TCGv_i64 t0 = tcg_const_i64(arg2); 1471 tcg_gen_mul_i64(ret, arg1, t0); 1472 tcg_temp_free_i64(t0); 1473 } 1474 1475 1476 /***************************************/ 1477 /* optional operations */ 1478 1479 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg) 1480 { 1481 if (TCG_TARGET_HAS_ext8s_i32) { 1482 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg); 1483 } else { 1484 tcg_gen_shli_i32(ret, arg, 24); 1485 tcg_gen_sari_i32(ret, ret, 24); 1486 } 1487 } 1488 1489 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg) 1490 { 1491 if (TCG_TARGET_HAS_ext16s_i32) { 1492 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg); 1493 } else { 1494 tcg_gen_shli_i32(ret, arg, 16); 1495 tcg_gen_sari_i32(ret, ret, 16); 1496 } 1497 } 1498 1499 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg) 1500 { 1501 if (TCG_TARGET_HAS_ext8u_i32) { 1502 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg); 1503 } else { 1504 tcg_gen_andi_i32(ret, arg, 0xffu); 1505 } 1506 } 1507 1508 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg) 1509 { 1510 if (TCG_TARGET_HAS_ext16u_i32) { 1511 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg); 1512 } else { 1513 tcg_gen_andi_i32(ret, arg, 0xffffu); 1514 } 1515 } 1516 1517 /* Note: we assume the two high bytes are set to zero */ 1518 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg) 1519 { 1520 if (TCG_TARGET_HAS_bswap16_i32) { 1521 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg); 1522 } else { 1523 TCGv_i32 t0 = tcg_temp_new_i32(); 1524 1525 tcg_gen_ext8u_i32(t0, arg); 1526 tcg_gen_shli_i32(t0, t0, 8); 1527 tcg_gen_shri_i32(ret, arg, 8); 1528 tcg_gen_or_i32(ret, ret, t0); 1529 tcg_temp_free_i32(t0); 1530 } 1531 } 1532 1533 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg) 1534 { 1535 if (TCG_TARGET_HAS_bswap32_i32) { 1536 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg); 1537 } else { 1538 TCGv_i32 t0, t1; 1539 t0 = tcg_temp_new_i32(); 1540 t1 = tcg_temp_new_i32(); 1541 1542 tcg_gen_shli_i32(t0, arg, 24); 1543 1544 tcg_gen_andi_i32(t1, arg, 0x0000ff00); 1545 tcg_gen_shli_i32(t1, t1, 8); 1546 tcg_gen_or_i32(t0, t0, t1); 1547 1548 tcg_gen_shri_i32(t1, arg, 8); 1549 tcg_gen_andi_i32(t1, t1, 0x0000ff00); 1550 tcg_gen_or_i32(t0, t0, t1); 1551 1552 tcg_gen_shri_i32(t1, arg, 24); 1553 tcg_gen_or_i32(ret, t0, t1); 1554 tcg_temp_free_i32(t0); 1555 tcg_temp_free_i32(t1); 1556 } 1557 } 1558 1559 #if TCG_TARGET_REG_BITS == 32 1560 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg) 1561 { 1562 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1563 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1564 } 1565 1566 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg) 1567 { 1568 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1569 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1570 } 1571 1572 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg) 1573 { 1574 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1575 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1576 } 1577 1578 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg) 1579 { 1580 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1581 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1582 } 1583 1584 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg) 1585 { 1586 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1587 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1588 } 1589 1590 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg) 1591 { 1592 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1593 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1594 } 1595 1596 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg) 1597 { 1598 tcg_gen_mov_i32(ret, TCGV_LOW(arg)); 1599 } 1600 1601 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1602 { 1603 tcg_gen_mov_i32(TCGV_LOW(ret), arg); 1604 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1605 } 1606 1607 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1608 { 1609 tcg_gen_mov_i32(TCGV_LOW(ret), arg); 1610 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1611 } 1612 1613 /* Note: we assume the six high bytes are set to zero */ 1614 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) 1615 { 1616 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1617 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1618 } 1619 1620 /* Note: we assume the four high bytes are set to zero */ 1621 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) 1622 { 1623 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1624 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1625 } 1626 1627 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) 1628 { 1629 TCGv_i32 t0, t1; 1630 t0 = tcg_temp_new_i32(); 1631 t1 = tcg_temp_new_i32(); 1632 1633 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg)); 1634 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg)); 1635 tcg_gen_mov_i32(TCGV_LOW(ret), t1); 1636 tcg_gen_mov_i32(TCGV_HIGH(ret), t0); 1637 tcg_temp_free_i32(t0); 1638 tcg_temp_free_i32(t1); 1639 } 1640 #else 1641 1642 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg) 1643 { 1644 if (TCG_TARGET_HAS_ext8s_i64) { 1645 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg); 1646 } else { 1647 tcg_gen_shli_i64(ret, arg, 56); 1648 tcg_gen_sari_i64(ret, ret, 56); 1649 } 1650 } 1651 1652 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg) 1653 { 1654 if (TCG_TARGET_HAS_ext16s_i64) { 1655 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg); 1656 } else { 1657 tcg_gen_shli_i64(ret, arg, 48); 1658 tcg_gen_sari_i64(ret, ret, 48); 1659 } 1660 } 1661 1662 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg) 1663 { 1664 if (TCG_TARGET_HAS_ext32s_i64) { 1665 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg); 1666 } else { 1667 tcg_gen_shli_i64(ret, arg, 32); 1668 tcg_gen_sari_i64(ret, ret, 32); 1669 } 1670 } 1671 1672 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg) 1673 { 1674 if (TCG_TARGET_HAS_ext8u_i64) { 1675 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg); 1676 } else { 1677 tcg_gen_andi_i64(ret, arg, 0xffu); 1678 } 1679 } 1680 1681 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg) 1682 { 1683 if (TCG_TARGET_HAS_ext16u_i64) { 1684 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg); 1685 } else { 1686 tcg_gen_andi_i64(ret, arg, 0xffffu); 1687 } 1688 } 1689 1690 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg) 1691 { 1692 if (TCG_TARGET_HAS_ext32u_i64) { 1693 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg); 1694 } else { 1695 tcg_gen_andi_i64(ret, arg, 0xffffffffu); 1696 } 1697 } 1698 1699 /* Note: we assume the target supports move between 32 and 64 bit 1700 registers. This will probably break MIPS64 targets. */ 1701 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg) 1702 { 1703 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg))); 1704 } 1705 1706 /* Note: we assume the target supports move between 32 and 64 bit 1707 registers */ 1708 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1709 { 1710 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); 1711 } 1712 1713 /* Note: we assume the target supports move between 32 and 64 bit 1714 registers */ 1715 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1716 { 1717 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); 1718 } 1719 1720 /* Note: we assume the six high bytes are set to zero */ 1721 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) 1722 { 1723 if (TCG_TARGET_HAS_bswap16_i64) { 1724 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg); 1725 } else { 1726 TCGv_i64 t0 = tcg_temp_new_i64(); 1727 1728 tcg_gen_ext8u_i64(t0, arg); 1729 tcg_gen_shli_i64(t0, t0, 8); 1730 tcg_gen_shri_i64(ret, arg, 8); 1731 tcg_gen_or_i64(ret, ret, t0); 1732 tcg_temp_free_i64(t0); 1733 } 1734 } 1735 1736 /* Note: we assume the four high bytes are set to zero */ 1737 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) 1738 { 1739 if (TCG_TARGET_HAS_bswap32_i64) { 1740 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg); 1741 } else { 1742 TCGv_i64 t0, t1; 1743 t0 = tcg_temp_new_i64(); 1744 t1 = tcg_temp_new_i64(); 1745 1746 tcg_gen_shli_i64(t0, arg, 24); 1747 tcg_gen_ext32u_i64(t0, t0); 1748 1749 tcg_gen_andi_i64(t1, arg, 0x0000ff00); 1750 tcg_gen_shli_i64(t1, t1, 8); 1751 tcg_gen_or_i64(t0, t0, t1); 1752 1753 tcg_gen_shri_i64(t1, arg, 8); 1754 tcg_gen_andi_i64(t1, t1, 0x0000ff00); 1755 tcg_gen_or_i64(t0, t0, t1); 1756 1757 tcg_gen_shri_i64(t1, arg, 24); 1758 tcg_gen_or_i64(ret, t0, t1); 1759 tcg_temp_free_i64(t0); 1760 tcg_temp_free_i64(t1); 1761 } 1762 } 1763 1764 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) 1765 { 1766 if (TCG_TARGET_HAS_bswap64_i64) { 1767 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg); 1768 } else { 1769 TCGv_i64 t0 = tcg_temp_new_i64(); 1770 TCGv_i64 t1 = tcg_temp_new_i64(); 1771 1772 tcg_gen_shli_i64(t0, arg, 56); 1773 1774 tcg_gen_andi_i64(t1, arg, 0x0000ff00); 1775 tcg_gen_shli_i64(t1, t1, 40); 1776 tcg_gen_or_i64(t0, t0, t1); 1777 1778 tcg_gen_andi_i64(t1, arg, 0x00ff0000); 1779 tcg_gen_shli_i64(t1, t1, 24); 1780 tcg_gen_or_i64(t0, t0, t1); 1781 1782 tcg_gen_andi_i64(t1, arg, 0xff000000); 1783 tcg_gen_shli_i64(t1, t1, 8); 1784 tcg_gen_or_i64(t0, t0, t1); 1785 1786 tcg_gen_shri_i64(t1, arg, 8); 1787 tcg_gen_andi_i64(t1, t1, 0xff000000); 1788 tcg_gen_or_i64(t0, t0, t1); 1789 1790 tcg_gen_shri_i64(t1, arg, 24); 1791 tcg_gen_andi_i64(t1, t1, 0x00ff0000); 1792 tcg_gen_or_i64(t0, t0, t1); 1793 1794 tcg_gen_shri_i64(t1, arg, 40); 1795 tcg_gen_andi_i64(t1, t1, 0x0000ff00); 1796 tcg_gen_or_i64(t0, t0, t1); 1797 1798 tcg_gen_shri_i64(t1, arg, 56); 1799 tcg_gen_or_i64(ret, t0, t1); 1800 tcg_temp_free_i64(t0); 1801 tcg_temp_free_i64(t1); 1802 } 1803 } 1804 1805 #endif 1806 1807 static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg) 1808 { 1809 if (TCG_TARGET_HAS_neg_i32) { 1810 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg); 1811 } else { 1812 TCGv_i32 t0 = tcg_const_i32(0); 1813 tcg_gen_sub_i32(ret, t0, arg); 1814 tcg_temp_free_i32(t0); 1815 } 1816 } 1817 1818 static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg) 1819 { 1820 if (TCG_TARGET_HAS_neg_i64) { 1821 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg); 1822 } else { 1823 TCGv_i64 t0 = tcg_const_i64(0); 1824 tcg_gen_sub_i64(ret, t0, arg); 1825 tcg_temp_free_i64(t0); 1826 } 1827 } 1828 1829 static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg) 1830 { 1831 if (TCG_TARGET_HAS_not_i32) { 1832 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg); 1833 } else { 1834 tcg_gen_xori_i32(ret, arg, -1); 1835 } 1836 } 1837 1838 static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg) 1839 { 1840 #if TCG_TARGET_REG_BITS == 64 1841 if (TCG_TARGET_HAS_not_i64) { 1842 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg); 1843 } else { 1844 tcg_gen_xori_i64(ret, arg, -1); 1845 } 1846 #else 1847 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1848 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1849 #endif 1850 } 1851 1852 static inline void tcg_gen_discard_i32(TCGv_i32 arg) 1853 { 1854 tcg_gen_op1_i32(INDEX_op_discard, arg); 1855 } 1856 1857 static inline void tcg_gen_discard_i64(TCGv_i64 arg) 1858 { 1859 #if TCG_TARGET_REG_BITS == 32 1860 tcg_gen_discard_i32(TCGV_LOW(arg)); 1861 tcg_gen_discard_i32(TCGV_HIGH(arg)); 1862 #else 1863 tcg_gen_op1_i64(INDEX_op_discard, arg); 1864 #endif 1865 } 1866 1867 static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1868 { 1869 if (TCG_TARGET_HAS_andc_i32) { 1870 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2); 1871 } else { 1872 TCGv_i32 t0 = tcg_temp_new_i32(); 1873 tcg_gen_not_i32(t0, arg2); 1874 tcg_gen_and_i32(ret, arg1, t0); 1875 tcg_temp_free_i32(t0); 1876 } 1877 } 1878 1879 static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1880 { 1881 #if TCG_TARGET_REG_BITS == 64 1882 if (TCG_TARGET_HAS_andc_i64) { 1883 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2); 1884 } else { 1885 TCGv_i64 t0 = tcg_temp_new_i64(); 1886 tcg_gen_not_i64(t0, arg2); 1887 tcg_gen_and_i64(ret, arg1, t0); 1888 tcg_temp_free_i64(t0); 1889 } 1890 #else 1891 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1892 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1893 #endif 1894 } 1895 1896 static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1897 { 1898 if (TCG_TARGET_HAS_eqv_i32) { 1899 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2); 1900 } else { 1901 tcg_gen_xor_i32(ret, arg1, arg2); 1902 tcg_gen_not_i32(ret, ret); 1903 } 1904 } 1905 1906 static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1907 { 1908 #if TCG_TARGET_REG_BITS == 64 1909 if (TCG_TARGET_HAS_eqv_i64) { 1910 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2); 1911 } else { 1912 tcg_gen_xor_i64(ret, arg1, arg2); 1913 tcg_gen_not_i64(ret, ret); 1914 } 1915 #else 1916 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1917 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1918 #endif 1919 } 1920 1921 static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1922 { 1923 if (TCG_TARGET_HAS_nand_i32) { 1924 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2); 1925 } else { 1926 tcg_gen_and_i32(ret, arg1, arg2); 1927 tcg_gen_not_i32(ret, ret); 1928 } 1929 } 1930 1931 static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1932 { 1933 #if TCG_TARGET_REG_BITS == 64 1934 if (TCG_TARGET_HAS_nand_i64) { 1935 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2); 1936 } else { 1937 tcg_gen_and_i64(ret, arg1, arg2); 1938 tcg_gen_not_i64(ret, ret); 1939 } 1940 #else 1941 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1942 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1943 #endif 1944 } 1945 1946 static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1947 { 1948 if (TCG_TARGET_HAS_nor_i32) { 1949 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2); 1950 } else { 1951 tcg_gen_or_i32(ret, arg1, arg2); 1952 tcg_gen_not_i32(ret, ret); 1953 } 1954 } 1955 1956 static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1957 { 1958 #if TCG_TARGET_REG_BITS == 64 1959 if (TCG_TARGET_HAS_nor_i64) { 1960 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2); 1961 } else { 1962 tcg_gen_or_i64(ret, arg1, arg2); 1963 tcg_gen_not_i64(ret, ret); 1964 } 1965 #else 1966 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1967 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1968 #endif 1969 } 1970 1971 static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1972 { 1973 if (TCG_TARGET_HAS_orc_i32) { 1974 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2); 1975 } else { 1976 TCGv_i32 t0 = tcg_temp_new_i32(); 1977 tcg_gen_not_i32(t0, arg2); 1978 tcg_gen_or_i32(ret, arg1, t0); 1979 tcg_temp_free_i32(t0); 1980 } 1981 } 1982 1983 static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1984 { 1985 #if TCG_TARGET_REG_BITS == 64 1986 if (TCG_TARGET_HAS_orc_i64) { 1987 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2); 1988 } else { 1989 TCGv_i64 t0 = tcg_temp_new_i64(); 1990 tcg_gen_not_i64(t0, arg2); 1991 tcg_gen_or_i64(ret, arg1, t0); 1992 tcg_temp_free_i64(t0); 1993 } 1994 #else 1995 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1996 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1997 #endif 1998 } 1999 2000 static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2001 { 2002 if (TCG_TARGET_HAS_rot_i32) { 2003 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2); 2004 } else { 2005 TCGv_i32 t0, t1; 2006 2007 t0 = tcg_temp_new_i32(); 2008 t1 = tcg_temp_new_i32(); 2009 tcg_gen_shl_i32(t0, arg1, arg2); 2010 tcg_gen_subfi_i32(t1, 32, arg2); 2011 tcg_gen_shr_i32(t1, arg1, t1); 2012 tcg_gen_or_i32(ret, t0, t1); 2013 tcg_temp_free_i32(t0); 2014 tcg_temp_free_i32(t1); 2015 } 2016 } 2017 2018 static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2019 { 2020 if (TCG_TARGET_HAS_rot_i64) { 2021 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2); 2022 } else { 2023 TCGv_i64 t0, t1; 2024 t0 = tcg_temp_new_i64(); 2025 t1 = tcg_temp_new_i64(); 2026 tcg_gen_shl_i64(t0, arg1, arg2); 2027 tcg_gen_subfi_i64(t1, 64, arg2); 2028 tcg_gen_shr_i64(t1, arg1, t1); 2029 tcg_gen_or_i64(ret, t0, t1); 2030 tcg_temp_free_i64(t0); 2031 tcg_temp_free_i64(t1); 2032 } 2033 } 2034 2035 static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 2036 { 2037 /* some cases can be optimized here */ 2038 if (arg2 == 0) { 2039 tcg_gen_mov_i32(ret, arg1); 2040 } else if (TCG_TARGET_HAS_rot_i32) { 2041 TCGv_i32 t0 = tcg_const_i32(arg2); 2042 tcg_gen_rotl_i32(ret, arg1, t0); 2043 tcg_temp_free_i32(t0); 2044 } else { 2045 TCGv_i32 t0, t1; 2046 t0 = tcg_temp_new_i32(); 2047 t1 = tcg_temp_new_i32(); 2048 tcg_gen_shli_i32(t0, arg1, arg2); 2049 tcg_gen_shri_i32(t1, arg1, 32 - arg2); 2050 tcg_gen_or_i32(ret, t0, t1); 2051 tcg_temp_free_i32(t0); 2052 tcg_temp_free_i32(t1); 2053 } 2054 } 2055 2056 static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 2057 { 2058 /* some cases can be optimized here */ 2059 if (arg2 == 0) { 2060 tcg_gen_mov_i64(ret, arg1); 2061 } else if (TCG_TARGET_HAS_rot_i64) { 2062 TCGv_i64 t0 = tcg_const_i64(arg2); 2063 tcg_gen_rotl_i64(ret, arg1, t0); 2064 tcg_temp_free_i64(t0); 2065 } else { 2066 TCGv_i64 t0, t1; 2067 t0 = tcg_temp_new_i64(); 2068 t1 = tcg_temp_new_i64(); 2069 tcg_gen_shli_i64(t0, arg1, arg2); 2070 tcg_gen_shri_i64(t1, arg1, 64 - arg2); 2071 tcg_gen_or_i64(ret, t0, t1); 2072 tcg_temp_free_i64(t0); 2073 tcg_temp_free_i64(t1); 2074 } 2075 } 2076 2077 static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2078 { 2079 if (TCG_TARGET_HAS_rot_i32) { 2080 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2); 2081 } else { 2082 TCGv_i32 t0, t1; 2083 2084 t0 = tcg_temp_new_i32(); 2085 t1 = tcg_temp_new_i32(); 2086 tcg_gen_shr_i32(t0, arg1, arg2); 2087 tcg_gen_subfi_i32(t1, 32, arg2); 2088 tcg_gen_shl_i32(t1, arg1, t1); 2089 tcg_gen_or_i32(ret, t0, t1); 2090 tcg_temp_free_i32(t0); 2091 tcg_temp_free_i32(t1); 2092 } 2093 } 2094 2095 static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2096 { 2097 if (TCG_TARGET_HAS_rot_i64) { 2098 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2); 2099 } else { 2100 TCGv_i64 t0, t1; 2101 t0 = tcg_temp_new_i64(); 2102 t1 = tcg_temp_new_i64(); 2103 tcg_gen_shr_i64(t0, arg1, arg2); 2104 tcg_gen_subfi_i64(t1, 64, arg2); 2105 tcg_gen_shl_i64(t1, arg1, t1); 2106 tcg_gen_or_i64(ret, t0, t1); 2107 tcg_temp_free_i64(t0); 2108 tcg_temp_free_i64(t1); 2109 } 2110 } 2111 2112 static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 2113 { 2114 /* some cases can be optimized here */ 2115 if (arg2 == 0) { 2116 tcg_gen_mov_i32(ret, arg1); 2117 } else { 2118 tcg_gen_rotli_i32(ret, arg1, 32 - arg2); 2119 } 2120 } 2121 2122 static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 2123 { 2124 /* some cases can be optimized here */ 2125 if (arg2 == 0) { 2126 tcg_gen_mov_i64(ret, arg1); 2127 } else { 2128 tcg_gen_rotli_i64(ret, arg1, 64 - arg2); 2129 } 2130 } 2131 2132 static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, 2133 TCGv_i32 arg2, unsigned int ofs, 2134 unsigned int len) 2135 { 2136 uint32_t mask; 2137 TCGv_i32 t1; 2138 2139 tcg_debug_assert(ofs < 32); 2140 tcg_debug_assert(len <= 32); 2141 tcg_debug_assert(ofs + len <= 32); 2142 2143 if (ofs == 0 && len == 32) { 2144 tcg_gen_mov_i32(ret, arg2); 2145 return; 2146 } 2147 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) { 2148 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len); 2149 return; 2150 } 2151 2152 mask = (1u << len) - 1; 2153 t1 = tcg_temp_new_i32(); 2154 2155 if (ofs + len < 32) { 2156 tcg_gen_andi_i32(t1, arg2, mask); 2157 tcg_gen_shli_i32(t1, t1, ofs); 2158 } else { 2159 tcg_gen_shli_i32(t1, arg2, ofs); 2160 } 2161 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs)); 2162 tcg_gen_or_i32(ret, ret, t1); 2163 2164 tcg_temp_free_i32(t1); 2165 } 2166 2167 static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, 2168 TCGv_i64 arg2, unsigned int ofs, 2169 unsigned int len) 2170 { 2171 uint64_t mask; 2172 TCGv_i64 t1; 2173 2174 tcg_debug_assert(ofs < 64); 2175 tcg_debug_assert(len <= 64); 2176 tcg_debug_assert(ofs + len <= 64); 2177 2178 if (ofs == 0 && len == 64) { 2179 tcg_gen_mov_i64(ret, arg2); 2180 return; 2181 } 2182 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) { 2183 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len); 2184 return; 2185 } 2186 2187 #if TCG_TARGET_REG_BITS == 32 2188 if (ofs >= 32) { 2189 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1)); 2190 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 2191 TCGV_LOW(arg2), ofs - 32, len); 2192 return; 2193 } 2194 if (ofs + len <= 32) { 2195 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1), 2196 TCGV_LOW(arg2), ofs, len); 2197 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1)); 2198 return; 2199 } 2200 #endif 2201 2202 mask = (1ull << len) - 1; 2203 t1 = tcg_temp_new_i64(); 2204 2205 if (ofs + len < 64) { 2206 tcg_gen_andi_i64(t1, arg2, mask); 2207 tcg_gen_shli_i64(t1, t1, ofs); 2208 } else { 2209 tcg_gen_shli_i64(t1, arg2, ofs); 2210 } 2211 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs)); 2212 tcg_gen_or_i64(ret, ret, t1); 2213 2214 tcg_temp_free_i64(t1); 2215 } 2216 2217 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, 2218 TCGv_i32 high) 2219 { 2220 #if TCG_TARGET_REG_BITS == 32 2221 tcg_gen_mov_i32(TCGV_LOW(dest), low); 2222 tcg_gen_mov_i32(TCGV_HIGH(dest), high); 2223 #else 2224 TCGv_i64 tmp = tcg_temp_new_i64(); 2225 /* These extensions are only needed for type correctness. 2226 We may be able to do better given target specific information. */ 2227 tcg_gen_extu_i32_i64(tmp, high); 2228 tcg_gen_extu_i32_i64(dest, low); 2229 /* If deposit is available, use it. Otherwise use the extra 2230 knowledge that we have of the zero-extensions above. */ 2231 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) { 2232 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32); 2233 } else { 2234 tcg_gen_shli_i64(tmp, tmp, 32); 2235 tcg_gen_or_i64(dest, dest, tmp); 2236 } 2237 tcg_temp_free_i64(tmp); 2238 #endif 2239 } 2240 2241 static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low, 2242 TCGv_i64 high) 2243 { 2244 tcg_gen_deposit_i64(dest, low, high, 32, 32); 2245 } 2246 2247 static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, 2248 TCGv_i32 c1, TCGv_i32 c2, 2249 TCGv_i32 v1, TCGv_i32 v2) 2250 { 2251 if (TCG_TARGET_HAS_movcond_i32) { 2252 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond); 2253 } else { 2254 TCGv_i32 t0 = tcg_temp_new_i32(); 2255 TCGv_i32 t1 = tcg_temp_new_i32(); 2256 tcg_gen_setcond_i32(cond, t0, c1, c2); 2257 tcg_gen_neg_i32(t0, t0); 2258 tcg_gen_and_i32(t1, v1, t0); 2259 tcg_gen_andc_i32(ret, v2, t0); 2260 tcg_gen_or_i32(ret, ret, t1); 2261 tcg_temp_free_i32(t0); 2262 tcg_temp_free_i32(t1); 2263 } 2264 } 2265 2266 static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, 2267 TCGv_i64 c1, TCGv_i64 c2, 2268 TCGv_i64 v1, TCGv_i64 v2) 2269 { 2270 #if TCG_TARGET_REG_BITS == 32 2271 TCGv_i32 t0 = tcg_temp_new_i32(); 2272 TCGv_i32 t1 = tcg_temp_new_i32(); 2273 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0, 2274 TCGV_LOW(c1), TCGV_HIGH(c1), 2275 TCGV_LOW(c2), TCGV_HIGH(c2), cond); 2276 2277 if (TCG_TARGET_HAS_movcond_i32) { 2278 tcg_gen_movi_i32(t1, 0); 2279 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1, 2280 TCGV_LOW(v1), TCGV_LOW(v2)); 2281 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1, 2282 TCGV_HIGH(v1), TCGV_HIGH(v2)); 2283 } else { 2284 tcg_gen_neg_i32(t0, t0); 2285 2286 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0); 2287 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0); 2288 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1); 2289 2290 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0); 2291 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0); 2292 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1); 2293 } 2294 tcg_temp_free_i32(t0); 2295 tcg_temp_free_i32(t1); 2296 #else 2297 if (TCG_TARGET_HAS_movcond_i64) { 2298 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond); 2299 } else { 2300 TCGv_i64 t0 = tcg_temp_new_i64(); 2301 TCGv_i64 t1 = tcg_temp_new_i64(); 2302 tcg_gen_setcond_i64(cond, t0, c1, c2); 2303 tcg_gen_neg_i64(t0, t0); 2304 tcg_gen_and_i64(t1, v1, t0); 2305 tcg_gen_andc_i64(ret, v2, t0); 2306 tcg_gen_or_i64(ret, ret, t1); 2307 tcg_temp_free_i64(t0); 2308 tcg_temp_free_i64(t1); 2309 } 2310 #endif 2311 } 2312 2313 /***************************************/ 2314 /* QEMU specific operations. Their type depend on the QEMU CPU 2315 type. */ 2316 #ifndef TARGET_LONG_BITS 2317 #error must include QEMU headers 2318 #endif 2319 2320 #if TARGET_LONG_BITS == 32 2321 #define TCGv TCGv_i32 2322 #define tcg_temp_new() tcg_temp_new_i32() 2323 #define tcg_global_reg_new tcg_global_reg_new_i32 2324 #define tcg_global_mem_new tcg_global_mem_new_i32 2325 #define tcg_temp_local_new() tcg_temp_local_new_i32() 2326 #define tcg_temp_free tcg_temp_free_i32 2327 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32 2328 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32 2329 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x) 2330 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b) 2331 #else 2332 #define TCGv TCGv_i64 2333 #define tcg_temp_new() tcg_temp_new_i64() 2334 #define tcg_global_reg_new tcg_global_reg_new_i64 2335 #define tcg_global_mem_new tcg_global_mem_new_i64 2336 #define tcg_temp_local_new() tcg_temp_local_new_i64() 2337 #define tcg_temp_free tcg_temp_free_i64 2338 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64 2339 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64 2340 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x) 2341 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b) 2342 #endif 2343 2344 /* debug info: write the PC of the corresponding QEMU CPU instruction */ 2345 static inline void tcg_gen_debug_insn_start(uint64_t pc) 2346 { 2347 /* XXX: must really use a 32 bit size for TCGArg in all cases */ 2348 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS 2349 tcg_gen_op2ii(INDEX_op_debug_insn_start, 2350 (uint32_t)(pc), (uint32_t)(pc >> 32)); 2351 #else 2352 tcg_gen_op1i(INDEX_op_debug_insn_start, pc); 2353 #endif 2354 } 2355 2356 static inline void tcg_gen_exit_tb(tcg_target_long val) 2357 { 2358 tcg_gen_op1i(INDEX_op_exit_tb, val); 2359 } 2360 2361 static inline void tcg_gen_goto_tb(unsigned idx) 2362 { 2363 /* We only support two chained exits. */ 2364 tcg_debug_assert(idx <= 1); 2365 #ifdef CONFIG_DEBUG_TCG 2366 /* Verify that we havn't seen this numbered exit before. */ 2367 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0); 2368 tcg_ctx.goto_tb_issue_mask |= 1 << idx; 2369 #endif 2370 tcg_gen_op1i(INDEX_op_goto_tb, idx); 2371 } 2372 2373 #if TCG_TARGET_REG_BITS == 32 2374 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2375 { 2376 #if TARGET_LONG_BITS == 32 2377 tcg_gen_op3i_i32(INDEX_op_qemu_ld8u, ret, addr, mem_index); 2378 #else 2379 tcg_gen_op4i_i32(INDEX_op_qemu_ld8u, TCGV_LOW(ret), TCGV_LOW(addr), 2380 TCGV_HIGH(addr), mem_index); 2381 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 2382 #endif 2383 } 2384 2385 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2386 { 2387 #if TARGET_LONG_BITS == 32 2388 tcg_gen_op3i_i32(INDEX_op_qemu_ld8s, ret, addr, mem_index); 2389 #else 2390 tcg_gen_op4i_i32(INDEX_op_qemu_ld8s, TCGV_LOW(ret), TCGV_LOW(addr), 2391 TCGV_HIGH(addr), mem_index); 2392 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 2393 #endif 2394 } 2395 2396 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2397 { 2398 #if TARGET_LONG_BITS == 32 2399 tcg_gen_op3i_i32(INDEX_op_qemu_ld16u, ret, addr, mem_index); 2400 #else 2401 tcg_gen_op4i_i32(INDEX_op_qemu_ld16u, TCGV_LOW(ret), TCGV_LOW(addr), 2402 TCGV_HIGH(addr), mem_index); 2403 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 2404 #endif 2405 } 2406 2407 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2408 { 2409 #if TARGET_LONG_BITS == 32 2410 tcg_gen_op3i_i32(INDEX_op_qemu_ld16s, ret, addr, mem_index); 2411 #else 2412 tcg_gen_op4i_i32(INDEX_op_qemu_ld16s, TCGV_LOW(ret), TCGV_LOW(addr), 2413 TCGV_HIGH(addr), mem_index); 2414 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 2415 #endif 2416 } 2417 2418 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2419 { 2420 #if TARGET_LONG_BITS == 32 2421 tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index); 2422 #else 2423 tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr), 2424 TCGV_HIGH(addr), mem_index); 2425 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 2426 #endif 2427 } 2428 2429 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2430 { 2431 #if TARGET_LONG_BITS == 32 2432 tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index); 2433 #else 2434 tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr), 2435 TCGV_HIGH(addr), mem_index); 2436 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 2437 #endif 2438 } 2439 2440 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index) 2441 { 2442 #if TARGET_LONG_BITS == 32 2443 tcg_gen_op4i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), addr, mem_index); 2444 #else 2445 tcg_gen_op5i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), 2446 TCGV_LOW(addr), TCGV_HIGH(addr), mem_index); 2447 #endif 2448 } 2449 2450 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2451 { 2452 #if TARGET_LONG_BITS == 32 2453 tcg_gen_op3i_i32(INDEX_op_qemu_st8, arg, addr, mem_index); 2454 #else 2455 tcg_gen_op4i_i32(INDEX_op_qemu_st8, TCGV_LOW(arg), TCGV_LOW(addr), 2456 TCGV_HIGH(addr), mem_index); 2457 #endif 2458 } 2459 2460 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2461 { 2462 #if TARGET_LONG_BITS == 32 2463 tcg_gen_op3i_i32(INDEX_op_qemu_st16, arg, addr, mem_index); 2464 #else 2465 tcg_gen_op4i_i32(INDEX_op_qemu_st16, TCGV_LOW(arg), TCGV_LOW(addr), 2466 TCGV_HIGH(addr), mem_index); 2467 #endif 2468 } 2469 2470 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2471 { 2472 #if TARGET_LONG_BITS == 32 2473 tcg_gen_op3i_i32(INDEX_op_qemu_st32, arg, addr, mem_index); 2474 #else 2475 tcg_gen_op4i_i32(INDEX_op_qemu_st32, TCGV_LOW(arg), TCGV_LOW(addr), 2476 TCGV_HIGH(addr), mem_index); 2477 #endif 2478 } 2479 2480 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) 2481 { 2482 #if TARGET_LONG_BITS == 32 2483 tcg_gen_op4i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), addr, 2484 mem_index); 2485 #else 2486 tcg_gen_op5i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), 2487 TCGV_LOW(addr), TCGV_HIGH(addr), mem_index); 2488 #endif 2489 } 2490 2491 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O)) 2492 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A)) 2493 2494 #else /* TCG_TARGET_REG_BITS == 32 */ 2495 2496 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2497 { 2498 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u, ret, addr, mem_index); 2499 } 2500 2501 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2502 { 2503 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s, ret, addr, mem_index); 2504 } 2505 2506 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2507 { 2508 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u, ret, addr, mem_index); 2509 } 2510 2511 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2512 { 2513 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s, ret, addr, mem_index); 2514 } 2515 2516 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2517 { 2518 #if TARGET_LONG_BITS == 32 2519 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index); 2520 #else 2521 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u, ret, addr, mem_index); 2522 #endif 2523 } 2524 2525 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2526 { 2527 #if TARGET_LONG_BITS == 32 2528 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index); 2529 #else 2530 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s, ret, addr, mem_index); 2531 #endif 2532 } 2533 2534 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index) 2535 { 2536 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64, ret, addr, mem_index); 2537 } 2538 2539 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2540 { 2541 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8, arg, addr, mem_index); 2542 } 2543 2544 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2545 { 2546 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16, arg, addr, mem_index); 2547 } 2548 2549 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2550 { 2551 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32, arg, addr, mem_index); 2552 } 2553 2554 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) 2555 { 2556 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64, arg, addr, mem_index); 2557 } 2558 2559 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O)) 2560 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A)) 2561 2562 #endif /* TCG_TARGET_REG_BITS != 32 */ 2563 2564 #if TARGET_LONG_BITS == 64 2565 #define tcg_gen_movi_tl tcg_gen_movi_i64 2566 #define tcg_gen_mov_tl tcg_gen_mov_i64 2567 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64 2568 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64 2569 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64 2570 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64 2571 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64 2572 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64 2573 #define tcg_gen_ld_tl tcg_gen_ld_i64 2574 #define tcg_gen_st8_tl tcg_gen_st8_i64 2575 #define tcg_gen_st16_tl tcg_gen_st16_i64 2576 #define tcg_gen_st32_tl tcg_gen_st32_i64 2577 #define tcg_gen_st_tl tcg_gen_st_i64 2578 #define tcg_gen_add_tl tcg_gen_add_i64 2579 #define tcg_gen_addi_tl tcg_gen_addi_i64 2580 #define tcg_gen_sub_tl tcg_gen_sub_i64 2581 #define tcg_gen_neg_tl tcg_gen_neg_i64 2582 #define tcg_gen_subfi_tl tcg_gen_subfi_i64 2583 #define tcg_gen_subi_tl tcg_gen_subi_i64 2584 #define tcg_gen_and_tl tcg_gen_and_i64 2585 #define tcg_gen_andi_tl tcg_gen_andi_i64 2586 #define tcg_gen_or_tl tcg_gen_or_i64 2587 #define tcg_gen_ori_tl tcg_gen_ori_i64 2588 #define tcg_gen_xor_tl tcg_gen_xor_i64 2589 #define tcg_gen_xori_tl tcg_gen_xori_i64 2590 #define tcg_gen_not_tl tcg_gen_not_i64 2591 #define tcg_gen_shl_tl tcg_gen_shl_i64 2592 #define tcg_gen_shli_tl tcg_gen_shli_i64 2593 #define tcg_gen_shr_tl tcg_gen_shr_i64 2594 #define tcg_gen_shri_tl tcg_gen_shri_i64 2595 #define tcg_gen_sar_tl tcg_gen_sar_i64 2596 #define tcg_gen_sari_tl tcg_gen_sari_i64 2597 #define tcg_gen_brcond_tl tcg_gen_brcond_i64 2598 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64 2599 #define tcg_gen_setcond_tl tcg_gen_setcond_i64 2600 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64 2601 #define tcg_gen_mul_tl tcg_gen_mul_i64 2602 #define tcg_gen_muli_tl tcg_gen_muli_i64 2603 #define tcg_gen_div_tl tcg_gen_div_i64 2604 #define tcg_gen_rem_tl tcg_gen_rem_i64 2605 #define tcg_gen_divu_tl tcg_gen_divu_i64 2606 #define tcg_gen_remu_tl tcg_gen_remu_i64 2607 #define tcg_gen_discard_tl tcg_gen_discard_i64 2608 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32 2609 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64 2610 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64 2611 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64 2612 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64 2613 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64 2614 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64 2615 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64 2616 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64 2617 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64 2618 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64 2619 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64 2620 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64 2621 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64 2622 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64 2623 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64 2624 #define tcg_gen_andc_tl tcg_gen_andc_i64 2625 #define tcg_gen_eqv_tl tcg_gen_eqv_i64 2626 #define tcg_gen_nand_tl tcg_gen_nand_i64 2627 #define tcg_gen_nor_tl tcg_gen_nor_i64 2628 #define tcg_gen_orc_tl tcg_gen_orc_i64 2629 #define tcg_gen_rotl_tl tcg_gen_rotl_i64 2630 #define tcg_gen_rotli_tl tcg_gen_rotli_i64 2631 #define tcg_gen_rotr_tl tcg_gen_rotr_i64 2632 #define tcg_gen_rotri_tl tcg_gen_rotri_i64 2633 #define tcg_gen_deposit_tl tcg_gen_deposit_i64 2634 #define tcg_const_tl tcg_const_i64 2635 #define tcg_const_local_tl tcg_const_local_i64 2636 #define tcg_gen_movcond_tl tcg_gen_movcond_i64 2637 #else 2638 #define tcg_gen_movi_tl tcg_gen_movi_i32 2639 #define tcg_gen_mov_tl tcg_gen_mov_i32 2640 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32 2641 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32 2642 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32 2643 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32 2644 #define tcg_gen_ld32u_tl tcg_gen_ld_i32 2645 #define tcg_gen_ld32s_tl tcg_gen_ld_i32 2646 #define tcg_gen_ld_tl tcg_gen_ld_i32 2647 #define tcg_gen_st8_tl tcg_gen_st8_i32 2648 #define tcg_gen_st16_tl tcg_gen_st16_i32 2649 #define tcg_gen_st32_tl tcg_gen_st_i32 2650 #define tcg_gen_st_tl tcg_gen_st_i32 2651 #define tcg_gen_add_tl tcg_gen_add_i32 2652 #define tcg_gen_addi_tl tcg_gen_addi_i32 2653 #define tcg_gen_sub_tl tcg_gen_sub_i32 2654 #define tcg_gen_neg_tl tcg_gen_neg_i32 2655 #define tcg_gen_subfi_tl tcg_gen_subfi_i32 2656 #define tcg_gen_subi_tl tcg_gen_subi_i32 2657 #define tcg_gen_and_tl tcg_gen_and_i32 2658 #define tcg_gen_andi_tl tcg_gen_andi_i32 2659 #define tcg_gen_or_tl tcg_gen_or_i32 2660 #define tcg_gen_ori_tl tcg_gen_ori_i32 2661 #define tcg_gen_xor_tl tcg_gen_xor_i32 2662 #define tcg_gen_xori_tl tcg_gen_xori_i32 2663 #define tcg_gen_not_tl tcg_gen_not_i32 2664 #define tcg_gen_shl_tl tcg_gen_shl_i32 2665 #define tcg_gen_shli_tl tcg_gen_shli_i32 2666 #define tcg_gen_shr_tl tcg_gen_shr_i32 2667 #define tcg_gen_shri_tl tcg_gen_shri_i32 2668 #define tcg_gen_sar_tl tcg_gen_sar_i32 2669 #define tcg_gen_sari_tl tcg_gen_sari_i32 2670 #define tcg_gen_brcond_tl tcg_gen_brcond_i32 2671 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32 2672 #define tcg_gen_setcond_tl tcg_gen_setcond_i32 2673 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32 2674 #define tcg_gen_mul_tl tcg_gen_mul_i32 2675 #define tcg_gen_muli_tl tcg_gen_muli_i32 2676 #define tcg_gen_div_tl tcg_gen_div_i32 2677 #define tcg_gen_rem_tl tcg_gen_rem_i32 2678 #define tcg_gen_divu_tl tcg_gen_divu_i32 2679 #define tcg_gen_remu_tl tcg_gen_remu_i32 2680 #define tcg_gen_discard_tl tcg_gen_discard_i32 2681 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32 2682 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32 2683 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32 2684 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32 2685 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64 2686 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64 2687 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32 2688 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32 2689 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32 2690 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32 2691 #define tcg_gen_ext32u_tl tcg_gen_mov_i32 2692 #define tcg_gen_ext32s_tl tcg_gen_mov_i32 2693 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32 2694 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32 2695 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64 2696 #define tcg_gen_andc_tl tcg_gen_andc_i32 2697 #define tcg_gen_eqv_tl tcg_gen_eqv_i32 2698 #define tcg_gen_nand_tl tcg_gen_nand_i32 2699 #define tcg_gen_nor_tl tcg_gen_nor_i32 2700 #define tcg_gen_orc_tl tcg_gen_orc_i32 2701 #define tcg_gen_rotl_tl tcg_gen_rotl_i32 2702 #define tcg_gen_rotli_tl tcg_gen_rotli_i32 2703 #define tcg_gen_rotr_tl tcg_gen_rotr_i32 2704 #define tcg_gen_rotri_tl tcg_gen_rotri_i32 2705 #define tcg_gen_deposit_tl tcg_gen_deposit_i32 2706 #define tcg_const_tl tcg_const_i32 2707 #define tcg_const_local_tl tcg_const_local_i32 2708 #define tcg_gen_movcond_tl tcg_gen_movcond_i32 2709 #endif 2710 2711 #if TCG_TARGET_REG_BITS == 32 2712 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), \ 2713 TCGV_PTR_TO_NAT(A), \ 2714 TCGV_PTR_TO_NAT(B)) 2715 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), \ 2716 TCGV_PTR_TO_NAT(A), (B)) 2717 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A)) 2718 #else /* TCG_TARGET_REG_BITS == 32 */ 2719 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), \ 2720 TCGV_PTR_TO_NAT(A), \ 2721 TCGV_PTR_TO_NAT(B)) 2722 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), \ 2723 TCGV_PTR_TO_NAT(A), (B)) 2724 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A)) 2725 #endif /* TCG_TARGET_REG_BITS != 32 */ 2726