1 /* 2 * Tiny Code Generator for QEMU 3 * 4 * Copyright (c) 2008 Fabrice Bellard 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a copy 7 * of this software and associated documentation files (the "Software"), to deal 8 * in the Software without restriction, including without limitation the rights 9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 * copies of the Software, and to permit persons to whom the Software is 11 * furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 * THE SOFTWARE. 23 */ 24 #include "tcg.h" 25 26 int gen_new_label(void); 27 28 static inline void tcg_gen_op0(TCGOpcode opc) 29 { 30 *gen_opc_ptr++ = opc; 31 } 32 33 static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1) 34 { 35 *gen_opc_ptr++ = opc; 36 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 37 } 38 39 static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1) 40 { 41 *gen_opc_ptr++ = opc; 42 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 43 } 44 45 static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1) 46 { 47 *gen_opc_ptr++ = opc; 48 *gen_opparam_ptr++ = arg1; 49 } 50 51 static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2) 52 { 53 *gen_opc_ptr++ = opc; 54 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 55 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 56 } 57 58 static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2) 59 { 60 *gen_opc_ptr++ = opc; 61 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 62 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 63 } 64 65 static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2) 66 { 67 *gen_opc_ptr++ = opc; 68 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 69 *gen_opparam_ptr++ = arg2; 70 } 71 72 static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2) 73 { 74 *gen_opc_ptr++ = opc; 75 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 76 *gen_opparam_ptr++ = arg2; 77 } 78 79 static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2) 80 { 81 *gen_opc_ptr++ = opc; 82 *gen_opparam_ptr++ = arg1; 83 *gen_opparam_ptr++ = arg2; 84 } 85 86 static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 87 TCGv_i32 arg3) 88 { 89 *gen_opc_ptr++ = opc; 90 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 91 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 92 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 93 } 94 95 static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 96 TCGv_i64 arg3) 97 { 98 *gen_opc_ptr++ = opc; 99 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 100 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 101 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 102 } 103 104 static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1, 105 TCGv_i32 arg2, TCGArg arg3) 106 { 107 *gen_opc_ptr++ = opc; 108 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 109 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 110 *gen_opparam_ptr++ = arg3; 111 } 112 113 static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1, 114 TCGv_i64 arg2, TCGArg arg3) 115 { 116 *gen_opc_ptr++ = opc; 117 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 118 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 119 *gen_opparam_ptr++ = arg3; 120 } 121 122 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val, 123 TCGv_ptr base, TCGArg offset) 124 { 125 *gen_opc_ptr++ = opc; 126 *gen_opparam_ptr++ = GET_TCGV_I32(val); 127 *gen_opparam_ptr++ = GET_TCGV_PTR(base); 128 *gen_opparam_ptr++ = offset; 129 } 130 131 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val, 132 TCGv_ptr base, TCGArg offset) 133 { 134 *gen_opc_ptr++ = opc; 135 *gen_opparam_ptr++ = GET_TCGV_I64(val); 136 *gen_opparam_ptr++ = GET_TCGV_PTR(base); 137 *gen_opparam_ptr++ = offset; 138 } 139 140 static inline void tcg_gen_qemu_ldst_op_i64_i32(TCGOpcode opc, TCGv_i64 val, 141 TCGv_i32 addr, TCGArg mem_index) 142 { 143 *gen_opc_ptr++ = opc; 144 *gen_opparam_ptr++ = GET_TCGV_I64(val); 145 *gen_opparam_ptr++ = GET_TCGV_I32(addr); 146 *gen_opparam_ptr++ = mem_index; 147 } 148 149 static inline void tcg_gen_qemu_ldst_op_i64_i64(TCGOpcode opc, TCGv_i64 val, 150 TCGv_i64 addr, TCGArg mem_index) 151 { 152 *gen_opc_ptr++ = opc; 153 *gen_opparam_ptr++ = GET_TCGV_I64(val); 154 *gen_opparam_ptr++ = GET_TCGV_I64(addr); 155 *gen_opparam_ptr++ = mem_index; 156 } 157 158 static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 159 TCGv_i32 arg3, TCGv_i32 arg4) 160 { 161 *gen_opc_ptr++ = opc; 162 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 163 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 164 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 165 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 166 } 167 168 static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 169 TCGv_i64 arg3, TCGv_i64 arg4) 170 { 171 *gen_opc_ptr++ = opc; 172 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 173 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 174 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 175 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 176 } 177 178 static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 179 TCGv_i32 arg3, TCGArg arg4) 180 { 181 *gen_opc_ptr++ = opc; 182 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 183 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 184 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 185 *gen_opparam_ptr++ = arg4; 186 } 187 188 static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 189 TCGv_i64 arg3, TCGArg arg4) 190 { 191 *gen_opc_ptr++ = opc; 192 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 193 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 194 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 195 *gen_opparam_ptr++ = arg4; 196 } 197 198 static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 199 TCGArg arg3, TCGArg arg4) 200 { 201 *gen_opc_ptr++ = opc; 202 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 203 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 204 *gen_opparam_ptr++ = arg3; 205 *gen_opparam_ptr++ = arg4; 206 } 207 208 static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 209 TCGArg arg3, TCGArg arg4) 210 { 211 *gen_opc_ptr++ = opc; 212 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 213 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 214 *gen_opparam_ptr++ = arg3; 215 *gen_opparam_ptr++ = arg4; 216 } 217 218 static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 219 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5) 220 { 221 *gen_opc_ptr++ = opc; 222 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 223 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 224 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 225 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 226 *gen_opparam_ptr++ = GET_TCGV_I32(arg5); 227 } 228 229 static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 230 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5) 231 { 232 *gen_opc_ptr++ = opc; 233 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 234 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 235 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 236 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 237 *gen_opparam_ptr++ = GET_TCGV_I64(arg5); 238 } 239 240 static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 241 TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5) 242 { 243 *gen_opc_ptr++ = opc; 244 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 245 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 246 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 247 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 248 *gen_opparam_ptr++ = arg5; 249 } 250 251 static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 252 TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5) 253 { 254 *gen_opc_ptr++ = opc; 255 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 256 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 257 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 258 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 259 *gen_opparam_ptr++ = arg5; 260 } 261 262 static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1, 263 TCGv_i32 arg2, TCGv_i32 arg3, 264 TCGArg arg4, TCGArg arg5) 265 { 266 *gen_opc_ptr++ = opc; 267 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 268 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 269 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 270 *gen_opparam_ptr++ = arg4; 271 *gen_opparam_ptr++ = arg5; 272 } 273 274 static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1, 275 TCGv_i64 arg2, TCGv_i64 arg3, 276 TCGArg arg4, TCGArg arg5) 277 { 278 *gen_opc_ptr++ = opc; 279 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 280 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 281 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 282 *gen_opparam_ptr++ = arg4; 283 *gen_opparam_ptr++ = arg5; 284 } 285 286 static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 287 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5, 288 TCGv_i32 arg6) 289 { 290 *gen_opc_ptr++ = opc; 291 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 292 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 293 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 294 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 295 *gen_opparam_ptr++ = GET_TCGV_I32(arg5); 296 *gen_opparam_ptr++ = GET_TCGV_I32(arg6); 297 } 298 299 static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 300 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5, 301 TCGv_i64 arg6) 302 { 303 *gen_opc_ptr++ = opc; 304 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 305 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 306 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 307 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 308 *gen_opparam_ptr++ = GET_TCGV_I64(arg5); 309 *gen_opparam_ptr++ = GET_TCGV_I64(arg6); 310 } 311 312 static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2, 313 TCGv_i32 arg3, TCGv_i32 arg4, 314 TCGv_i32 arg5, TCGArg arg6) 315 { 316 *gen_opc_ptr++ = opc; 317 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 318 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 319 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 320 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 321 *gen_opparam_ptr++ = GET_TCGV_I32(arg5); 322 *gen_opparam_ptr++ = arg6; 323 } 324 325 static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2, 326 TCGv_i64 arg3, TCGv_i64 arg4, 327 TCGv_i64 arg5, TCGArg arg6) 328 { 329 *gen_opc_ptr++ = opc; 330 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 331 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 332 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 333 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 334 *gen_opparam_ptr++ = GET_TCGV_I64(arg5); 335 *gen_opparam_ptr++ = arg6; 336 } 337 338 static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1, 339 TCGv_i32 arg2, TCGv_i32 arg3, 340 TCGv_i32 arg4, TCGArg arg5, TCGArg arg6) 341 { 342 *gen_opc_ptr++ = opc; 343 *gen_opparam_ptr++ = GET_TCGV_I32(arg1); 344 *gen_opparam_ptr++ = GET_TCGV_I32(arg2); 345 *gen_opparam_ptr++ = GET_TCGV_I32(arg3); 346 *gen_opparam_ptr++ = GET_TCGV_I32(arg4); 347 *gen_opparam_ptr++ = arg5; 348 *gen_opparam_ptr++ = arg6; 349 } 350 351 static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1, 352 TCGv_i64 arg2, TCGv_i64 arg3, 353 TCGv_i64 arg4, TCGArg arg5, TCGArg arg6) 354 { 355 *gen_opc_ptr++ = opc; 356 *gen_opparam_ptr++ = GET_TCGV_I64(arg1); 357 *gen_opparam_ptr++ = GET_TCGV_I64(arg2); 358 *gen_opparam_ptr++ = GET_TCGV_I64(arg3); 359 *gen_opparam_ptr++ = GET_TCGV_I64(arg4); 360 *gen_opparam_ptr++ = arg5; 361 *gen_opparam_ptr++ = arg6; 362 } 363 364 static inline void gen_set_label(int n) 365 { 366 tcg_gen_op1i(INDEX_op_set_label, n); 367 } 368 369 static inline void tcg_gen_br(int label) 370 { 371 tcg_gen_op1i(INDEX_op_br, label); 372 } 373 374 static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg) 375 { 376 if (!TCGV_EQUAL_I32(ret, arg)) 377 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg); 378 } 379 380 static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg) 381 { 382 tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg); 383 } 384 385 /* A version of dh_sizemask from def-helper.h that doesn't rely on 386 preprocessor magic. */ 387 static inline int tcg_gen_sizemask(int n, int is_64bit, int is_signed) 388 { 389 return (is_64bit << n*2) | (is_signed << (n*2 + 1)); 390 } 391 392 /* helper calls */ 393 static inline void tcg_gen_helperN(void *func, int flags, int sizemask, 394 TCGArg ret, int nargs, TCGArg *args) 395 { 396 TCGv_ptr fn; 397 fn = tcg_const_ptr(func); 398 tcg_gen_callN(&tcg_ctx, fn, flags, sizemask, ret, 399 nargs, args); 400 tcg_temp_free_ptr(fn); 401 } 402 403 /* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently 404 reserved for helpers in tcg-runtime.c. These helpers all do not read 405 globals and do not have side effects, hence the call to tcg_gen_callN() 406 with TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS. This may need 407 to be adjusted if these functions start to be used with other helpers. */ 408 static inline void tcg_gen_helper32(void *func, int sizemask, TCGv_i32 ret, 409 TCGv_i32 a, TCGv_i32 b) 410 { 411 TCGv_ptr fn; 412 TCGArg args[2]; 413 fn = tcg_const_ptr(func); 414 args[0] = GET_TCGV_I32(a); 415 args[1] = GET_TCGV_I32(b); 416 tcg_gen_callN(&tcg_ctx, fn, 417 TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS, 418 sizemask, GET_TCGV_I32(ret), 2, args); 419 tcg_temp_free_ptr(fn); 420 } 421 422 static inline void tcg_gen_helper64(void *func, int sizemask, TCGv_i64 ret, 423 TCGv_i64 a, TCGv_i64 b) 424 { 425 TCGv_ptr fn; 426 TCGArg args[2]; 427 fn = tcg_const_ptr(func); 428 args[0] = GET_TCGV_I64(a); 429 args[1] = GET_TCGV_I64(b); 430 tcg_gen_callN(&tcg_ctx, fn, 431 TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS, 432 sizemask, GET_TCGV_I64(ret), 2, args); 433 tcg_temp_free_ptr(fn); 434 } 435 436 /* 32 bit ops */ 437 438 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 439 { 440 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset); 441 } 442 443 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 444 { 445 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset); 446 } 447 448 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 449 { 450 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset); 451 } 452 453 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 454 { 455 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset); 456 } 457 458 static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) 459 { 460 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset); 461 } 462 463 static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 464 { 465 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset); 466 } 467 468 static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 469 { 470 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset); 471 } 472 473 static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) 474 { 475 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset); 476 } 477 478 static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 479 { 480 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2); 481 } 482 483 static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 484 { 485 /* some cases can be optimized here */ 486 if (arg2 == 0) { 487 tcg_gen_mov_i32(ret, arg1); 488 } else { 489 TCGv_i32 t0 = tcg_const_i32(arg2); 490 tcg_gen_add_i32(ret, arg1, t0); 491 tcg_temp_free_i32(t0); 492 } 493 } 494 495 static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 496 { 497 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2); 498 } 499 500 static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2) 501 { 502 TCGv_i32 t0 = tcg_const_i32(arg1); 503 tcg_gen_sub_i32(ret, t0, arg2); 504 tcg_temp_free_i32(t0); 505 } 506 507 static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 508 { 509 /* some cases can be optimized here */ 510 if (arg2 == 0) { 511 tcg_gen_mov_i32(ret, arg1); 512 } else { 513 TCGv_i32 t0 = tcg_const_i32(arg2); 514 tcg_gen_sub_i32(ret, arg1, t0); 515 tcg_temp_free_i32(t0); 516 } 517 } 518 519 static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 520 { 521 if (TCGV_EQUAL_I32(arg1, arg2)) { 522 tcg_gen_mov_i32(ret, arg1); 523 } else { 524 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2); 525 } 526 } 527 528 static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2) 529 { 530 TCGv_i32 t0; 531 /* Some cases can be optimized here. */ 532 switch (arg2) { 533 case 0: 534 tcg_gen_movi_i32(ret, 0); 535 return; 536 case 0xffffffffu: 537 tcg_gen_mov_i32(ret, arg1); 538 return; 539 case 0xffu: 540 /* Don't recurse with tcg_gen_ext8u_i32. */ 541 if (TCG_TARGET_HAS_ext8u_i32) { 542 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1); 543 return; 544 } 545 break; 546 case 0xffffu: 547 if (TCG_TARGET_HAS_ext16u_i32) { 548 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1); 549 return; 550 } 551 break; 552 } 553 t0 = tcg_const_i32(arg2); 554 tcg_gen_and_i32(ret, arg1, t0); 555 tcg_temp_free_i32(t0); 556 } 557 558 static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 559 { 560 if (TCGV_EQUAL_I32(arg1, arg2)) { 561 tcg_gen_mov_i32(ret, arg1); 562 } else { 563 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2); 564 } 565 } 566 567 static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 568 { 569 /* Some cases can be optimized here. */ 570 if (arg2 == -1) { 571 tcg_gen_movi_i32(ret, -1); 572 } else if (arg2 == 0) { 573 tcg_gen_mov_i32(ret, arg1); 574 } else { 575 TCGv_i32 t0 = tcg_const_i32(arg2); 576 tcg_gen_or_i32(ret, arg1, t0); 577 tcg_temp_free_i32(t0); 578 } 579 } 580 581 static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 582 { 583 if (TCGV_EQUAL_I32(arg1, arg2)) { 584 tcg_gen_movi_i32(ret, 0); 585 } else { 586 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2); 587 } 588 } 589 590 static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 591 { 592 /* Some cases can be optimized here. */ 593 if (arg2 == 0) { 594 tcg_gen_mov_i32(ret, arg1); 595 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) { 596 /* Don't recurse with tcg_gen_not_i32. */ 597 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1); 598 } else { 599 TCGv_i32 t0 = tcg_const_i32(arg2); 600 tcg_gen_xor_i32(ret, arg1, t0); 601 tcg_temp_free_i32(t0); 602 } 603 } 604 605 static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 606 { 607 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2); 608 } 609 610 static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 611 { 612 if (arg2 == 0) { 613 tcg_gen_mov_i32(ret, arg1); 614 } else { 615 TCGv_i32 t0 = tcg_const_i32(arg2); 616 tcg_gen_shl_i32(ret, arg1, t0); 617 tcg_temp_free_i32(t0); 618 } 619 } 620 621 static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 622 { 623 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2); 624 } 625 626 static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 627 { 628 if (arg2 == 0) { 629 tcg_gen_mov_i32(ret, arg1); 630 } else { 631 TCGv_i32 t0 = tcg_const_i32(arg2); 632 tcg_gen_shr_i32(ret, arg1, t0); 633 tcg_temp_free_i32(t0); 634 } 635 } 636 637 static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 638 { 639 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2); 640 } 641 642 static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 643 { 644 if (arg2 == 0) { 645 tcg_gen_mov_i32(ret, arg1); 646 } else { 647 TCGv_i32 t0 = tcg_const_i32(arg2); 648 tcg_gen_sar_i32(ret, arg1, t0); 649 tcg_temp_free_i32(t0); 650 } 651 } 652 653 static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, 654 TCGv_i32 arg2, int label_index) 655 { 656 if (cond == TCG_COND_ALWAYS) { 657 tcg_gen_br(label_index); 658 } else if (cond != TCG_COND_NEVER) { 659 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index); 660 } 661 } 662 663 static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, 664 int32_t arg2, int label_index) 665 { 666 if (cond == TCG_COND_ALWAYS) { 667 tcg_gen_br(label_index); 668 } else if (cond != TCG_COND_NEVER) { 669 TCGv_i32 t0 = tcg_const_i32(arg2); 670 tcg_gen_brcond_i32(cond, arg1, t0, label_index); 671 tcg_temp_free_i32(t0); 672 } 673 } 674 675 static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret, 676 TCGv_i32 arg1, TCGv_i32 arg2) 677 { 678 if (cond == TCG_COND_ALWAYS) { 679 tcg_gen_movi_i32(ret, 1); 680 } else if (cond == TCG_COND_NEVER) { 681 tcg_gen_movi_i32(ret, 0); 682 } else { 683 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond); 684 } 685 } 686 687 static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret, 688 TCGv_i32 arg1, int32_t arg2) 689 { 690 if (cond == TCG_COND_ALWAYS) { 691 tcg_gen_movi_i32(ret, 1); 692 } else if (cond == TCG_COND_NEVER) { 693 tcg_gen_movi_i32(ret, 0); 694 } else { 695 TCGv_i32 t0 = tcg_const_i32(arg2); 696 tcg_gen_setcond_i32(cond, ret, arg1, t0); 697 tcg_temp_free_i32(t0); 698 } 699 } 700 701 static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 702 { 703 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2); 704 } 705 706 static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 707 { 708 TCGv_i32 t0 = tcg_const_i32(arg2); 709 tcg_gen_mul_i32(ret, arg1, t0); 710 tcg_temp_free_i32(t0); 711 } 712 713 static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 714 { 715 if (TCG_TARGET_HAS_div_i32) { 716 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2); 717 } else if (TCG_TARGET_HAS_div2_i32) { 718 TCGv_i32 t0 = tcg_temp_new_i32(); 719 tcg_gen_sari_i32(t0, arg1, 31); 720 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2); 721 tcg_temp_free_i32(t0); 722 } else { 723 int sizemask = 0; 724 /* Return value and both arguments are 32-bit and signed. */ 725 sizemask |= tcg_gen_sizemask(0, 0, 1); 726 sizemask |= tcg_gen_sizemask(1, 0, 1); 727 sizemask |= tcg_gen_sizemask(2, 0, 1); 728 tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2); 729 } 730 } 731 732 static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 733 { 734 if (TCG_TARGET_HAS_div_i32) { 735 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2); 736 } else if (TCG_TARGET_HAS_div2_i32) { 737 TCGv_i32 t0 = tcg_temp_new_i32(); 738 tcg_gen_sari_i32(t0, arg1, 31); 739 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2); 740 tcg_temp_free_i32(t0); 741 } else { 742 int sizemask = 0; 743 /* Return value and both arguments are 32-bit and signed. */ 744 sizemask |= tcg_gen_sizemask(0, 0, 1); 745 sizemask |= tcg_gen_sizemask(1, 0, 1); 746 sizemask |= tcg_gen_sizemask(2, 0, 1); 747 tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2); 748 } 749 } 750 751 static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 752 { 753 if (TCG_TARGET_HAS_div_i32) { 754 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2); 755 } else if (TCG_TARGET_HAS_div2_i32) { 756 TCGv_i32 t0 = tcg_temp_new_i32(); 757 tcg_gen_movi_i32(t0, 0); 758 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2); 759 tcg_temp_free_i32(t0); 760 } else { 761 int sizemask = 0; 762 /* Return value and both arguments are 32-bit and unsigned. */ 763 sizemask |= tcg_gen_sizemask(0, 0, 0); 764 sizemask |= tcg_gen_sizemask(1, 0, 0); 765 sizemask |= tcg_gen_sizemask(2, 0, 0); 766 tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2); 767 } 768 } 769 770 static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 771 { 772 if (TCG_TARGET_HAS_div_i32) { 773 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2); 774 } else if (TCG_TARGET_HAS_div2_i32) { 775 TCGv_i32 t0 = tcg_temp_new_i32(); 776 tcg_gen_movi_i32(t0, 0); 777 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2); 778 tcg_temp_free_i32(t0); 779 } else { 780 int sizemask = 0; 781 /* Return value and both arguments are 32-bit and unsigned. */ 782 sizemask |= tcg_gen_sizemask(0, 0, 0); 783 sizemask |= tcg_gen_sizemask(1, 0, 0); 784 sizemask |= tcg_gen_sizemask(2, 0, 0); 785 tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2); 786 } 787 } 788 789 #if TCG_TARGET_REG_BITS == 32 790 791 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) 792 { 793 if (!TCGV_EQUAL_I64(ret, arg)) { 794 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 795 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 796 } 797 } 798 799 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg) 800 { 801 tcg_gen_movi_i32(TCGV_LOW(ret), arg); 802 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32); 803 } 804 805 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, 806 tcg_target_long offset) 807 { 808 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset); 809 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 810 } 811 812 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, 813 tcg_target_long offset) 814 { 815 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset); 816 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31); 817 } 818 819 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, 820 tcg_target_long offset) 821 { 822 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset); 823 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 824 } 825 826 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, 827 tcg_target_long offset) 828 { 829 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset); 830 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 831 } 832 833 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, 834 tcg_target_long offset) 835 { 836 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 837 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 838 } 839 840 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, 841 tcg_target_long offset) 842 { 843 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 844 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 845 } 846 847 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, 848 tcg_target_long offset) 849 { 850 /* since arg2 and ret have different types, they cannot be the 851 same temporary */ 852 #ifdef TCG_TARGET_WORDS_BIGENDIAN 853 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset); 854 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4); 855 #else 856 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset); 857 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4); 858 #endif 859 } 860 861 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, 862 tcg_target_long offset) 863 { 864 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset); 865 } 866 867 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, 868 tcg_target_long offset) 869 { 870 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset); 871 } 872 873 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, 874 tcg_target_long offset) 875 { 876 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset); 877 } 878 879 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, 880 tcg_target_long offset) 881 { 882 #ifdef TCG_TARGET_WORDS_BIGENDIAN 883 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset); 884 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4); 885 #else 886 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset); 887 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4); 888 #endif 889 } 890 891 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 892 { 893 tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret), 894 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 895 TCGV_HIGH(arg2)); 896 /* Allow the optimizer room to replace add2 with two moves. */ 897 tcg_gen_op0(INDEX_op_nop); 898 } 899 900 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 901 { 902 tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret), 903 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 904 TCGV_HIGH(arg2)); 905 /* Allow the optimizer room to replace sub2 with two moves. */ 906 tcg_gen_op0(INDEX_op_nop); 907 } 908 909 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 910 { 911 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 912 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 913 } 914 915 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 916 { 917 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 918 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 919 } 920 921 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 922 { 923 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 924 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 925 } 926 927 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 928 { 929 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 930 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 931 } 932 933 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 934 { 935 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 936 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 937 } 938 939 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 940 { 941 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2); 942 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32); 943 } 944 945 /* XXX: use generic code when basic block handling is OK or CPU 946 specific code (x86) */ 947 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 948 { 949 int sizemask = 0; 950 /* Return value and both arguments are 64-bit and signed. */ 951 sizemask |= tcg_gen_sizemask(0, 1, 1); 952 sizemask |= tcg_gen_sizemask(1, 1, 1); 953 sizemask |= tcg_gen_sizemask(2, 1, 1); 954 955 tcg_gen_helper64(tcg_helper_shl_i64, sizemask, ret, arg1, arg2); 956 } 957 958 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 959 { 960 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0); 961 } 962 963 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 964 { 965 int sizemask = 0; 966 /* Return value and both arguments are 64-bit and signed. */ 967 sizemask |= tcg_gen_sizemask(0, 1, 1); 968 sizemask |= tcg_gen_sizemask(1, 1, 1); 969 sizemask |= tcg_gen_sizemask(2, 1, 1); 970 971 tcg_gen_helper64(tcg_helper_shr_i64, sizemask, ret, arg1, arg2); 972 } 973 974 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 975 { 976 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0); 977 } 978 979 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 980 { 981 int sizemask = 0; 982 /* Return value and both arguments are 64-bit and signed. */ 983 sizemask |= tcg_gen_sizemask(0, 1, 1); 984 sizemask |= tcg_gen_sizemask(1, 1, 1); 985 sizemask |= tcg_gen_sizemask(2, 1, 1); 986 987 tcg_gen_helper64(tcg_helper_sar_i64, sizemask, ret, arg1, arg2); 988 } 989 990 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 991 { 992 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1); 993 } 994 995 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, 996 TCGv_i64 arg2, int label_index) 997 { 998 if (cond == TCG_COND_ALWAYS) { 999 tcg_gen_br(label_index); 1000 } else if (cond != TCG_COND_NEVER) { 1001 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, 1002 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2), 1003 TCGV_HIGH(arg2), cond, label_index); 1004 } 1005 } 1006 1007 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret, 1008 TCGv_i64 arg1, TCGv_i64 arg2) 1009 { 1010 if (cond == TCG_COND_ALWAYS) { 1011 tcg_gen_movi_i32(TCGV_LOW(ret), 1); 1012 } else if (cond == TCG_COND_NEVER) { 1013 tcg_gen_movi_i32(TCGV_LOW(ret), 0); 1014 } else { 1015 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret), 1016 TCGV_LOW(arg1), TCGV_HIGH(arg1), 1017 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond); 1018 } 1019 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1020 } 1021 1022 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1023 { 1024 TCGv_i64 t0; 1025 TCGv_i32 t1; 1026 1027 t0 = tcg_temp_new_i64(); 1028 t1 = tcg_temp_new_i32(); 1029 1030 tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0), 1031 TCGV_LOW(arg1), TCGV_LOW(arg2)); 1032 /* Allow the optimizer room to replace mulu2 with two moves. */ 1033 tcg_gen_op0(INDEX_op_nop); 1034 1035 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2)); 1036 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1); 1037 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2)); 1038 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1); 1039 1040 tcg_gen_mov_i64(ret, t0); 1041 tcg_temp_free_i64(t0); 1042 tcg_temp_free_i32(t1); 1043 } 1044 1045 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1046 { 1047 int sizemask = 0; 1048 /* Return value and both arguments are 64-bit and signed. */ 1049 sizemask |= tcg_gen_sizemask(0, 1, 1); 1050 sizemask |= tcg_gen_sizemask(1, 1, 1); 1051 sizemask |= tcg_gen_sizemask(2, 1, 1); 1052 1053 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2); 1054 } 1055 1056 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1057 { 1058 int sizemask = 0; 1059 /* Return value and both arguments are 64-bit and signed. */ 1060 sizemask |= tcg_gen_sizemask(0, 1, 1); 1061 sizemask |= tcg_gen_sizemask(1, 1, 1); 1062 sizemask |= tcg_gen_sizemask(2, 1, 1); 1063 1064 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2); 1065 } 1066 1067 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1068 { 1069 int sizemask = 0; 1070 /* Return value and both arguments are 64-bit and unsigned. */ 1071 sizemask |= tcg_gen_sizemask(0, 1, 0); 1072 sizemask |= tcg_gen_sizemask(1, 1, 0); 1073 sizemask |= tcg_gen_sizemask(2, 1, 0); 1074 1075 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2); 1076 } 1077 1078 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1079 { 1080 int sizemask = 0; 1081 /* Return value and both arguments are 64-bit and unsigned. */ 1082 sizemask |= tcg_gen_sizemask(0, 1, 0); 1083 sizemask |= tcg_gen_sizemask(1, 1, 0); 1084 sizemask |= tcg_gen_sizemask(2, 1, 0); 1085 1086 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2); 1087 } 1088 1089 #else 1090 1091 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) 1092 { 1093 if (!TCGV_EQUAL_I64(ret, arg)) 1094 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg); 1095 } 1096 1097 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg) 1098 { 1099 tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg); 1100 } 1101 1102 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1103 tcg_target_long offset) 1104 { 1105 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset); 1106 } 1107 1108 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1109 tcg_target_long offset) 1110 { 1111 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset); 1112 } 1113 1114 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1115 tcg_target_long offset) 1116 { 1117 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset); 1118 } 1119 1120 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1121 tcg_target_long offset) 1122 { 1123 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset); 1124 } 1125 1126 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, 1127 tcg_target_long offset) 1128 { 1129 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset); 1130 } 1131 1132 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, 1133 tcg_target_long offset) 1134 { 1135 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset); 1136 } 1137 1138 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset) 1139 { 1140 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset); 1141 } 1142 1143 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1144 tcg_target_long offset) 1145 { 1146 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset); 1147 } 1148 1149 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1150 tcg_target_long offset) 1151 { 1152 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset); 1153 } 1154 1155 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, 1156 tcg_target_long offset) 1157 { 1158 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset); 1159 } 1160 1161 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset) 1162 { 1163 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset); 1164 } 1165 1166 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1167 { 1168 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2); 1169 } 1170 1171 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1172 { 1173 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2); 1174 } 1175 1176 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1177 { 1178 if (TCGV_EQUAL_I64(arg1, arg2)) { 1179 tcg_gen_mov_i64(ret, arg1); 1180 } else { 1181 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2); 1182 } 1183 } 1184 1185 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2) 1186 { 1187 TCGv_i64 t0; 1188 /* Some cases can be optimized here. */ 1189 switch (arg2) { 1190 case 0: 1191 tcg_gen_movi_i64(ret, 0); 1192 return; 1193 case 0xffffffffffffffffull: 1194 tcg_gen_mov_i64(ret, arg1); 1195 return; 1196 case 0xffull: 1197 /* Don't recurse with tcg_gen_ext8u_i32. */ 1198 if (TCG_TARGET_HAS_ext8u_i64) { 1199 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1); 1200 return; 1201 } 1202 break; 1203 case 0xffffu: 1204 if (TCG_TARGET_HAS_ext16u_i64) { 1205 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1); 1206 return; 1207 } 1208 break; 1209 case 0xffffffffull: 1210 if (TCG_TARGET_HAS_ext32u_i64) { 1211 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1); 1212 return; 1213 } 1214 break; 1215 } 1216 t0 = tcg_const_i64(arg2); 1217 tcg_gen_and_i64(ret, arg1, t0); 1218 tcg_temp_free_i64(t0); 1219 } 1220 1221 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1222 { 1223 if (TCGV_EQUAL_I64(arg1, arg2)) { 1224 tcg_gen_mov_i64(ret, arg1); 1225 } else { 1226 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2); 1227 } 1228 } 1229 1230 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1231 { 1232 /* Some cases can be optimized here. */ 1233 if (arg2 == -1) { 1234 tcg_gen_movi_i64(ret, -1); 1235 } else if (arg2 == 0) { 1236 tcg_gen_mov_i64(ret, arg1); 1237 } else { 1238 TCGv_i64 t0 = tcg_const_i64(arg2); 1239 tcg_gen_or_i64(ret, arg1, t0); 1240 tcg_temp_free_i64(t0); 1241 } 1242 } 1243 1244 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1245 { 1246 if (TCGV_EQUAL_I64(arg1, arg2)) { 1247 tcg_gen_movi_i64(ret, 0); 1248 } else { 1249 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2); 1250 } 1251 } 1252 1253 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1254 { 1255 /* Some cases can be optimized here. */ 1256 if (arg2 == 0) { 1257 tcg_gen_mov_i64(ret, arg1); 1258 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) { 1259 /* Don't recurse with tcg_gen_not_i64. */ 1260 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1); 1261 } else { 1262 TCGv_i64 t0 = tcg_const_i64(arg2); 1263 tcg_gen_xor_i64(ret, arg1, t0); 1264 tcg_temp_free_i64(t0); 1265 } 1266 } 1267 1268 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1269 { 1270 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2); 1271 } 1272 1273 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1274 { 1275 if (arg2 == 0) { 1276 tcg_gen_mov_i64(ret, arg1); 1277 } else { 1278 TCGv_i64 t0 = tcg_const_i64(arg2); 1279 tcg_gen_shl_i64(ret, arg1, t0); 1280 tcg_temp_free_i64(t0); 1281 } 1282 } 1283 1284 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1285 { 1286 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2); 1287 } 1288 1289 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1290 { 1291 if (arg2 == 0) { 1292 tcg_gen_mov_i64(ret, arg1); 1293 } else { 1294 TCGv_i64 t0 = tcg_const_i64(arg2); 1295 tcg_gen_shr_i64(ret, arg1, t0); 1296 tcg_temp_free_i64(t0); 1297 } 1298 } 1299 1300 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1301 { 1302 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2); 1303 } 1304 1305 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1306 { 1307 if (arg2 == 0) { 1308 tcg_gen_mov_i64(ret, arg1); 1309 } else { 1310 TCGv_i64 t0 = tcg_const_i64(arg2); 1311 tcg_gen_sar_i64(ret, arg1, t0); 1312 tcg_temp_free_i64(t0); 1313 } 1314 } 1315 1316 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, 1317 TCGv_i64 arg2, int label_index) 1318 { 1319 if (cond == TCG_COND_ALWAYS) { 1320 tcg_gen_br(label_index); 1321 } else if (cond != TCG_COND_NEVER) { 1322 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index); 1323 } 1324 } 1325 1326 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret, 1327 TCGv_i64 arg1, TCGv_i64 arg2) 1328 { 1329 if (cond == TCG_COND_ALWAYS) { 1330 tcg_gen_movi_i64(ret, 1); 1331 } else if (cond == TCG_COND_NEVER) { 1332 tcg_gen_movi_i64(ret, 0); 1333 } else { 1334 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond); 1335 } 1336 } 1337 1338 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1339 { 1340 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2); 1341 } 1342 1343 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1344 { 1345 if (TCG_TARGET_HAS_div_i64) { 1346 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2); 1347 } else if (TCG_TARGET_HAS_div2_i64) { 1348 TCGv_i64 t0 = tcg_temp_new_i64(); 1349 tcg_gen_sari_i64(t0, arg1, 63); 1350 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2); 1351 tcg_temp_free_i64(t0); 1352 } else { 1353 int sizemask = 0; 1354 /* Return value and both arguments are 64-bit and signed. */ 1355 sizemask |= tcg_gen_sizemask(0, 1, 1); 1356 sizemask |= tcg_gen_sizemask(1, 1, 1); 1357 sizemask |= tcg_gen_sizemask(2, 1, 1); 1358 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2); 1359 } 1360 } 1361 1362 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1363 { 1364 if (TCG_TARGET_HAS_div_i64) { 1365 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2); 1366 } else if (TCG_TARGET_HAS_div2_i64) { 1367 TCGv_i64 t0 = tcg_temp_new_i64(); 1368 tcg_gen_sari_i64(t0, arg1, 63); 1369 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2); 1370 tcg_temp_free_i64(t0); 1371 } else { 1372 int sizemask = 0; 1373 /* Return value and both arguments are 64-bit and signed. */ 1374 sizemask |= tcg_gen_sizemask(0, 1, 1); 1375 sizemask |= tcg_gen_sizemask(1, 1, 1); 1376 sizemask |= tcg_gen_sizemask(2, 1, 1); 1377 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2); 1378 } 1379 } 1380 1381 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1382 { 1383 if (TCG_TARGET_HAS_div_i64) { 1384 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2); 1385 } else if (TCG_TARGET_HAS_div2_i64) { 1386 TCGv_i64 t0 = tcg_temp_new_i64(); 1387 tcg_gen_movi_i64(t0, 0); 1388 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2); 1389 tcg_temp_free_i64(t0); 1390 } else { 1391 int sizemask = 0; 1392 /* Return value and both arguments are 64-bit and unsigned. */ 1393 sizemask |= tcg_gen_sizemask(0, 1, 0); 1394 sizemask |= tcg_gen_sizemask(1, 1, 0); 1395 sizemask |= tcg_gen_sizemask(2, 1, 0); 1396 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2); 1397 } 1398 } 1399 1400 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1401 { 1402 if (TCG_TARGET_HAS_div_i64) { 1403 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2); 1404 } else if (TCG_TARGET_HAS_div2_i64) { 1405 TCGv_i64 t0 = tcg_temp_new_i64(); 1406 tcg_gen_movi_i64(t0, 0); 1407 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2); 1408 tcg_temp_free_i64(t0); 1409 } else { 1410 int sizemask = 0; 1411 /* Return value and both arguments are 64-bit and unsigned. */ 1412 sizemask |= tcg_gen_sizemask(0, 1, 0); 1413 sizemask |= tcg_gen_sizemask(1, 1, 0); 1414 sizemask |= tcg_gen_sizemask(2, 1, 0); 1415 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2); 1416 } 1417 } 1418 #endif /* TCG_TARGET_REG_BITS == 32 */ 1419 1420 static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1421 { 1422 /* some cases can be optimized here */ 1423 if (arg2 == 0) { 1424 tcg_gen_mov_i64(ret, arg1); 1425 } else { 1426 TCGv_i64 t0 = tcg_const_i64(arg2); 1427 tcg_gen_add_i64(ret, arg1, t0); 1428 tcg_temp_free_i64(t0); 1429 } 1430 } 1431 1432 static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2) 1433 { 1434 TCGv_i64 t0 = tcg_const_i64(arg1); 1435 tcg_gen_sub_i64(ret, t0, arg2); 1436 tcg_temp_free_i64(t0); 1437 } 1438 1439 static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1440 { 1441 /* some cases can be optimized here */ 1442 if (arg2 == 0) { 1443 tcg_gen_mov_i64(ret, arg1); 1444 } else { 1445 TCGv_i64 t0 = tcg_const_i64(arg2); 1446 tcg_gen_sub_i64(ret, arg1, t0); 1447 tcg_temp_free_i64(t0); 1448 } 1449 } 1450 static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, 1451 int64_t arg2, int label_index) 1452 { 1453 if (cond == TCG_COND_ALWAYS) { 1454 tcg_gen_br(label_index); 1455 } else if (cond != TCG_COND_NEVER) { 1456 TCGv_i64 t0 = tcg_const_i64(arg2); 1457 tcg_gen_brcond_i64(cond, arg1, t0, label_index); 1458 tcg_temp_free_i64(t0); 1459 } 1460 } 1461 1462 static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret, 1463 TCGv_i64 arg1, int64_t arg2) 1464 { 1465 TCGv_i64 t0 = tcg_const_i64(arg2); 1466 tcg_gen_setcond_i64(cond, ret, arg1, t0); 1467 tcg_temp_free_i64(t0); 1468 } 1469 1470 static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 1471 { 1472 TCGv_i64 t0 = tcg_const_i64(arg2); 1473 tcg_gen_mul_i64(ret, arg1, t0); 1474 tcg_temp_free_i64(t0); 1475 } 1476 1477 1478 /***************************************/ 1479 /* optional operations */ 1480 1481 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg) 1482 { 1483 if (TCG_TARGET_HAS_ext8s_i32) { 1484 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg); 1485 } else { 1486 tcg_gen_shli_i32(ret, arg, 24); 1487 tcg_gen_sari_i32(ret, ret, 24); 1488 } 1489 } 1490 1491 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg) 1492 { 1493 if (TCG_TARGET_HAS_ext16s_i32) { 1494 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg); 1495 } else { 1496 tcg_gen_shli_i32(ret, arg, 16); 1497 tcg_gen_sari_i32(ret, ret, 16); 1498 } 1499 } 1500 1501 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg) 1502 { 1503 if (TCG_TARGET_HAS_ext8u_i32) { 1504 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg); 1505 } else { 1506 tcg_gen_andi_i32(ret, arg, 0xffu); 1507 } 1508 } 1509 1510 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg) 1511 { 1512 if (TCG_TARGET_HAS_ext16u_i32) { 1513 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg); 1514 } else { 1515 tcg_gen_andi_i32(ret, arg, 0xffffu); 1516 } 1517 } 1518 1519 /* Note: we assume the two high bytes are set to zero */ 1520 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg) 1521 { 1522 if (TCG_TARGET_HAS_bswap16_i32) { 1523 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg); 1524 } else { 1525 TCGv_i32 t0 = tcg_temp_new_i32(); 1526 1527 tcg_gen_ext8u_i32(t0, arg); 1528 tcg_gen_shli_i32(t0, t0, 8); 1529 tcg_gen_shri_i32(ret, arg, 8); 1530 tcg_gen_or_i32(ret, ret, t0); 1531 tcg_temp_free_i32(t0); 1532 } 1533 } 1534 1535 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg) 1536 { 1537 if (TCG_TARGET_HAS_bswap32_i32) { 1538 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg); 1539 } else { 1540 TCGv_i32 t0, t1; 1541 t0 = tcg_temp_new_i32(); 1542 t1 = tcg_temp_new_i32(); 1543 1544 tcg_gen_shli_i32(t0, arg, 24); 1545 1546 tcg_gen_andi_i32(t1, arg, 0x0000ff00); 1547 tcg_gen_shli_i32(t1, t1, 8); 1548 tcg_gen_or_i32(t0, t0, t1); 1549 1550 tcg_gen_shri_i32(t1, arg, 8); 1551 tcg_gen_andi_i32(t1, t1, 0x0000ff00); 1552 tcg_gen_or_i32(t0, t0, t1); 1553 1554 tcg_gen_shri_i32(t1, arg, 24); 1555 tcg_gen_or_i32(ret, t0, t1); 1556 tcg_temp_free_i32(t0); 1557 tcg_temp_free_i32(t1); 1558 } 1559 } 1560 1561 #if TCG_TARGET_REG_BITS == 32 1562 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg) 1563 { 1564 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1565 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1566 } 1567 1568 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg) 1569 { 1570 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1571 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1572 } 1573 1574 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg) 1575 { 1576 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1577 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1578 } 1579 1580 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg) 1581 { 1582 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1583 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1584 } 1585 1586 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg) 1587 { 1588 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1589 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1590 } 1591 1592 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg) 1593 { 1594 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1595 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1596 } 1597 1598 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg) 1599 { 1600 tcg_gen_mov_i32(ret, TCGV_LOW(arg)); 1601 } 1602 1603 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1604 { 1605 tcg_gen_mov_i32(TCGV_LOW(ret), arg); 1606 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 1607 } 1608 1609 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1610 { 1611 tcg_gen_mov_i32(TCGV_LOW(ret), arg); 1612 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 1613 } 1614 1615 /* Note: we assume the six high bytes are set to zero */ 1616 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) 1617 { 1618 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1619 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1620 } 1621 1622 /* Note: we assume the four high bytes are set to zero */ 1623 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) 1624 { 1625 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1626 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1627 } 1628 1629 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) 1630 { 1631 TCGv_i32 t0, t1; 1632 t0 = tcg_temp_new_i32(); 1633 t1 = tcg_temp_new_i32(); 1634 1635 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg)); 1636 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg)); 1637 tcg_gen_mov_i32(TCGV_LOW(ret), t1); 1638 tcg_gen_mov_i32(TCGV_HIGH(ret), t0); 1639 tcg_temp_free_i32(t0); 1640 tcg_temp_free_i32(t1); 1641 } 1642 #else 1643 1644 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg) 1645 { 1646 if (TCG_TARGET_HAS_ext8s_i64) { 1647 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg); 1648 } else { 1649 tcg_gen_shli_i64(ret, arg, 56); 1650 tcg_gen_sari_i64(ret, ret, 56); 1651 } 1652 } 1653 1654 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg) 1655 { 1656 if (TCG_TARGET_HAS_ext16s_i64) { 1657 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg); 1658 } else { 1659 tcg_gen_shli_i64(ret, arg, 48); 1660 tcg_gen_sari_i64(ret, ret, 48); 1661 } 1662 } 1663 1664 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg) 1665 { 1666 if (TCG_TARGET_HAS_ext32s_i64) { 1667 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg); 1668 } else { 1669 tcg_gen_shli_i64(ret, arg, 32); 1670 tcg_gen_sari_i64(ret, ret, 32); 1671 } 1672 } 1673 1674 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg) 1675 { 1676 if (TCG_TARGET_HAS_ext8u_i64) { 1677 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg); 1678 } else { 1679 tcg_gen_andi_i64(ret, arg, 0xffu); 1680 } 1681 } 1682 1683 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg) 1684 { 1685 if (TCG_TARGET_HAS_ext16u_i64) { 1686 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg); 1687 } else { 1688 tcg_gen_andi_i64(ret, arg, 0xffffu); 1689 } 1690 } 1691 1692 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg) 1693 { 1694 if (TCG_TARGET_HAS_ext32u_i64) { 1695 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg); 1696 } else { 1697 tcg_gen_andi_i64(ret, arg, 0xffffffffu); 1698 } 1699 } 1700 1701 /* Note: we assume the target supports move between 32 and 64 bit 1702 registers. This will probably break MIPS64 targets. */ 1703 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg) 1704 { 1705 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg))); 1706 } 1707 1708 /* Note: we assume the target supports move between 32 and 64 bit 1709 registers */ 1710 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1711 { 1712 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); 1713 } 1714 1715 /* Note: we assume the target supports move between 32 and 64 bit 1716 registers */ 1717 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) 1718 { 1719 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); 1720 } 1721 1722 /* Note: we assume the six high bytes are set to zero */ 1723 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) 1724 { 1725 if (TCG_TARGET_HAS_bswap16_i64) { 1726 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg); 1727 } else { 1728 TCGv_i64 t0 = tcg_temp_new_i64(); 1729 1730 tcg_gen_ext8u_i64(t0, arg); 1731 tcg_gen_shli_i64(t0, t0, 8); 1732 tcg_gen_shri_i64(ret, arg, 8); 1733 tcg_gen_or_i64(ret, ret, t0); 1734 tcg_temp_free_i64(t0); 1735 } 1736 } 1737 1738 /* Note: we assume the four high bytes are set to zero */ 1739 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) 1740 { 1741 if (TCG_TARGET_HAS_bswap32_i64) { 1742 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg); 1743 } else { 1744 TCGv_i64 t0, t1; 1745 t0 = tcg_temp_new_i64(); 1746 t1 = tcg_temp_new_i64(); 1747 1748 tcg_gen_shli_i64(t0, arg, 24); 1749 tcg_gen_ext32u_i64(t0, t0); 1750 1751 tcg_gen_andi_i64(t1, arg, 0x0000ff00); 1752 tcg_gen_shli_i64(t1, t1, 8); 1753 tcg_gen_or_i64(t0, t0, t1); 1754 1755 tcg_gen_shri_i64(t1, arg, 8); 1756 tcg_gen_andi_i64(t1, t1, 0x0000ff00); 1757 tcg_gen_or_i64(t0, t0, t1); 1758 1759 tcg_gen_shri_i64(t1, arg, 24); 1760 tcg_gen_or_i64(ret, t0, t1); 1761 tcg_temp_free_i64(t0); 1762 tcg_temp_free_i64(t1); 1763 } 1764 } 1765 1766 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) 1767 { 1768 if (TCG_TARGET_HAS_bswap64_i64) { 1769 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg); 1770 } else { 1771 TCGv_i64 t0 = tcg_temp_new_i64(); 1772 TCGv_i64 t1 = tcg_temp_new_i64(); 1773 1774 tcg_gen_shli_i64(t0, arg, 56); 1775 1776 tcg_gen_andi_i64(t1, arg, 0x0000ff00); 1777 tcg_gen_shli_i64(t1, t1, 40); 1778 tcg_gen_or_i64(t0, t0, t1); 1779 1780 tcg_gen_andi_i64(t1, arg, 0x00ff0000); 1781 tcg_gen_shli_i64(t1, t1, 24); 1782 tcg_gen_or_i64(t0, t0, t1); 1783 1784 tcg_gen_andi_i64(t1, arg, 0xff000000); 1785 tcg_gen_shli_i64(t1, t1, 8); 1786 tcg_gen_or_i64(t0, t0, t1); 1787 1788 tcg_gen_shri_i64(t1, arg, 8); 1789 tcg_gen_andi_i64(t1, t1, 0xff000000); 1790 tcg_gen_or_i64(t0, t0, t1); 1791 1792 tcg_gen_shri_i64(t1, arg, 24); 1793 tcg_gen_andi_i64(t1, t1, 0x00ff0000); 1794 tcg_gen_or_i64(t0, t0, t1); 1795 1796 tcg_gen_shri_i64(t1, arg, 40); 1797 tcg_gen_andi_i64(t1, t1, 0x0000ff00); 1798 tcg_gen_or_i64(t0, t0, t1); 1799 1800 tcg_gen_shri_i64(t1, arg, 56); 1801 tcg_gen_or_i64(ret, t0, t1); 1802 tcg_temp_free_i64(t0); 1803 tcg_temp_free_i64(t1); 1804 } 1805 } 1806 1807 #endif 1808 1809 static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg) 1810 { 1811 if (TCG_TARGET_HAS_neg_i32) { 1812 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg); 1813 } else { 1814 TCGv_i32 t0 = tcg_const_i32(0); 1815 tcg_gen_sub_i32(ret, t0, arg); 1816 tcg_temp_free_i32(t0); 1817 } 1818 } 1819 1820 static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg) 1821 { 1822 if (TCG_TARGET_HAS_neg_i64) { 1823 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg); 1824 } else { 1825 TCGv_i64 t0 = tcg_const_i64(0); 1826 tcg_gen_sub_i64(ret, t0, arg); 1827 tcg_temp_free_i64(t0); 1828 } 1829 } 1830 1831 static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg) 1832 { 1833 if (TCG_TARGET_HAS_not_i32) { 1834 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg); 1835 } else { 1836 tcg_gen_xori_i32(ret, arg, -1); 1837 } 1838 } 1839 1840 static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg) 1841 { 1842 #if TCG_TARGET_REG_BITS == 64 1843 if (TCG_TARGET_HAS_not_i64) { 1844 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg); 1845 } else { 1846 tcg_gen_xori_i64(ret, arg, -1); 1847 } 1848 #else 1849 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg)); 1850 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); 1851 #endif 1852 } 1853 1854 static inline void tcg_gen_discard_i32(TCGv_i32 arg) 1855 { 1856 tcg_gen_op1_i32(INDEX_op_discard, arg); 1857 } 1858 1859 static inline void tcg_gen_discard_i64(TCGv_i64 arg) 1860 { 1861 #if TCG_TARGET_REG_BITS == 32 1862 tcg_gen_discard_i32(TCGV_LOW(arg)); 1863 tcg_gen_discard_i32(TCGV_HIGH(arg)); 1864 #else 1865 tcg_gen_op1_i64(INDEX_op_discard, arg); 1866 #endif 1867 } 1868 1869 static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1870 { 1871 if (TCG_TARGET_HAS_andc_i32) { 1872 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2); 1873 } else { 1874 TCGv_i32 t0 = tcg_temp_new_i32(); 1875 tcg_gen_not_i32(t0, arg2); 1876 tcg_gen_and_i32(ret, arg1, t0); 1877 tcg_temp_free_i32(t0); 1878 } 1879 } 1880 1881 static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1882 { 1883 #if TCG_TARGET_REG_BITS == 64 1884 if (TCG_TARGET_HAS_andc_i64) { 1885 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2); 1886 } else { 1887 TCGv_i64 t0 = tcg_temp_new_i64(); 1888 tcg_gen_not_i64(t0, arg2); 1889 tcg_gen_and_i64(ret, arg1, t0); 1890 tcg_temp_free_i64(t0); 1891 } 1892 #else 1893 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1894 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1895 #endif 1896 } 1897 1898 static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1899 { 1900 if (TCG_TARGET_HAS_eqv_i32) { 1901 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2); 1902 } else { 1903 tcg_gen_xor_i32(ret, arg1, arg2); 1904 tcg_gen_not_i32(ret, ret); 1905 } 1906 } 1907 1908 static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1909 { 1910 #if TCG_TARGET_REG_BITS == 64 1911 if (TCG_TARGET_HAS_eqv_i64) { 1912 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2); 1913 } else { 1914 tcg_gen_xor_i64(ret, arg1, arg2); 1915 tcg_gen_not_i64(ret, ret); 1916 } 1917 #else 1918 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1919 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1920 #endif 1921 } 1922 1923 static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1924 { 1925 if (TCG_TARGET_HAS_nand_i32) { 1926 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2); 1927 } else { 1928 tcg_gen_and_i32(ret, arg1, arg2); 1929 tcg_gen_not_i32(ret, ret); 1930 } 1931 } 1932 1933 static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1934 { 1935 #if TCG_TARGET_REG_BITS == 64 1936 if (TCG_TARGET_HAS_nand_i64) { 1937 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2); 1938 } else { 1939 tcg_gen_and_i64(ret, arg1, arg2); 1940 tcg_gen_not_i64(ret, ret); 1941 } 1942 #else 1943 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1944 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1945 #endif 1946 } 1947 1948 static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1949 { 1950 if (TCG_TARGET_HAS_nor_i32) { 1951 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2); 1952 } else { 1953 tcg_gen_or_i32(ret, arg1, arg2); 1954 tcg_gen_not_i32(ret, ret); 1955 } 1956 } 1957 1958 static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1959 { 1960 #if TCG_TARGET_REG_BITS == 64 1961 if (TCG_TARGET_HAS_nor_i64) { 1962 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2); 1963 } else { 1964 tcg_gen_or_i64(ret, arg1, arg2); 1965 tcg_gen_not_i64(ret, ret); 1966 } 1967 #else 1968 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1969 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1970 #endif 1971 } 1972 1973 static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 1974 { 1975 if (TCG_TARGET_HAS_orc_i32) { 1976 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2); 1977 } else { 1978 TCGv_i32 t0 = tcg_temp_new_i32(); 1979 tcg_gen_not_i32(t0, arg2); 1980 tcg_gen_or_i32(ret, arg1, t0); 1981 tcg_temp_free_i32(t0); 1982 } 1983 } 1984 1985 static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 1986 { 1987 #if TCG_TARGET_REG_BITS == 64 1988 if (TCG_TARGET_HAS_orc_i64) { 1989 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2); 1990 } else { 1991 TCGv_i64 t0 = tcg_temp_new_i64(); 1992 tcg_gen_not_i64(t0, arg2); 1993 tcg_gen_or_i64(ret, arg1, t0); 1994 tcg_temp_free_i64(t0); 1995 } 1996 #else 1997 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2)); 1998 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2)); 1999 #endif 2000 } 2001 2002 static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2003 { 2004 if (TCG_TARGET_HAS_rot_i32) { 2005 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2); 2006 } else { 2007 TCGv_i32 t0, t1; 2008 2009 t0 = tcg_temp_new_i32(); 2010 t1 = tcg_temp_new_i32(); 2011 tcg_gen_shl_i32(t0, arg1, arg2); 2012 tcg_gen_subfi_i32(t1, 32, arg2); 2013 tcg_gen_shr_i32(t1, arg1, t1); 2014 tcg_gen_or_i32(ret, t0, t1); 2015 tcg_temp_free_i32(t0); 2016 tcg_temp_free_i32(t1); 2017 } 2018 } 2019 2020 static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2021 { 2022 if (TCG_TARGET_HAS_rot_i64) { 2023 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2); 2024 } else { 2025 TCGv_i64 t0, t1; 2026 t0 = tcg_temp_new_i64(); 2027 t1 = tcg_temp_new_i64(); 2028 tcg_gen_shl_i64(t0, arg1, arg2); 2029 tcg_gen_subfi_i64(t1, 64, arg2); 2030 tcg_gen_shr_i64(t1, arg1, t1); 2031 tcg_gen_or_i64(ret, t0, t1); 2032 tcg_temp_free_i64(t0); 2033 tcg_temp_free_i64(t1); 2034 } 2035 } 2036 2037 static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 2038 { 2039 /* some cases can be optimized here */ 2040 if (arg2 == 0) { 2041 tcg_gen_mov_i32(ret, arg1); 2042 } else if (TCG_TARGET_HAS_rot_i32) { 2043 TCGv_i32 t0 = tcg_const_i32(arg2); 2044 tcg_gen_rotl_i32(ret, arg1, t0); 2045 tcg_temp_free_i32(t0); 2046 } else { 2047 TCGv_i32 t0, t1; 2048 t0 = tcg_temp_new_i32(); 2049 t1 = tcg_temp_new_i32(); 2050 tcg_gen_shli_i32(t0, arg1, arg2); 2051 tcg_gen_shri_i32(t1, arg1, 32 - arg2); 2052 tcg_gen_or_i32(ret, t0, t1); 2053 tcg_temp_free_i32(t0); 2054 tcg_temp_free_i32(t1); 2055 } 2056 } 2057 2058 static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 2059 { 2060 /* some cases can be optimized here */ 2061 if (arg2 == 0) { 2062 tcg_gen_mov_i64(ret, arg1); 2063 } else if (TCG_TARGET_HAS_rot_i64) { 2064 TCGv_i64 t0 = tcg_const_i64(arg2); 2065 tcg_gen_rotl_i64(ret, arg1, t0); 2066 tcg_temp_free_i64(t0); 2067 } else { 2068 TCGv_i64 t0, t1; 2069 t0 = tcg_temp_new_i64(); 2070 t1 = tcg_temp_new_i64(); 2071 tcg_gen_shli_i64(t0, arg1, arg2); 2072 tcg_gen_shri_i64(t1, arg1, 64 - arg2); 2073 tcg_gen_or_i64(ret, t0, t1); 2074 tcg_temp_free_i64(t0); 2075 tcg_temp_free_i64(t1); 2076 } 2077 } 2078 2079 static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) 2080 { 2081 if (TCG_TARGET_HAS_rot_i32) { 2082 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2); 2083 } else { 2084 TCGv_i32 t0, t1; 2085 2086 t0 = tcg_temp_new_i32(); 2087 t1 = tcg_temp_new_i32(); 2088 tcg_gen_shr_i32(t0, arg1, arg2); 2089 tcg_gen_subfi_i32(t1, 32, arg2); 2090 tcg_gen_shl_i32(t1, arg1, t1); 2091 tcg_gen_or_i32(ret, t0, t1); 2092 tcg_temp_free_i32(t0); 2093 tcg_temp_free_i32(t1); 2094 } 2095 } 2096 2097 static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) 2098 { 2099 if (TCG_TARGET_HAS_rot_i64) { 2100 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2); 2101 } else { 2102 TCGv_i64 t0, t1; 2103 t0 = tcg_temp_new_i64(); 2104 t1 = tcg_temp_new_i64(); 2105 tcg_gen_shr_i64(t0, arg1, arg2); 2106 tcg_gen_subfi_i64(t1, 64, arg2); 2107 tcg_gen_shl_i64(t1, arg1, t1); 2108 tcg_gen_or_i64(ret, t0, t1); 2109 tcg_temp_free_i64(t0); 2110 tcg_temp_free_i64(t1); 2111 } 2112 } 2113 2114 static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) 2115 { 2116 /* some cases can be optimized here */ 2117 if (arg2 == 0) { 2118 tcg_gen_mov_i32(ret, arg1); 2119 } else { 2120 tcg_gen_rotli_i32(ret, arg1, 32 - arg2); 2121 } 2122 } 2123 2124 static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) 2125 { 2126 /* some cases can be optimized here */ 2127 if (arg2 == 0) { 2128 tcg_gen_mov_i64(ret, arg1); 2129 } else { 2130 tcg_gen_rotli_i64(ret, arg1, 64 - arg2); 2131 } 2132 } 2133 2134 static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, 2135 TCGv_i32 arg2, unsigned int ofs, 2136 unsigned int len) 2137 { 2138 uint32_t mask; 2139 TCGv_i32 t1; 2140 2141 tcg_debug_assert(ofs < 32); 2142 tcg_debug_assert(len <= 32); 2143 tcg_debug_assert(ofs + len <= 32); 2144 2145 if (ofs == 0 && len == 32) { 2146 tcg_gen_mov_i32(ret, arg2); 2147 return; 2148 } 2149 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) { 2150 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len); 2151 return; 2152 } 2153 2154 mask = (1u << len) - 1; 2155 t1 = tcg_temp_new_i32(); 2156 2157 if (ofs + len < 32) { 2158 tcg_gen_andi_i32(t1, arg2, mask); 2159 tcg_gen_shli_i32(t1, t1, ofs); 2160 } else { 2161 tcg_gen_shli_i32(t1, arg2, ofs); 2162 } 2163 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs)); 2164 tcg_gen_or_i32(ret, ret, t1); 2165 2166 tcg_temp_free_i32(t1); 2167 } 2168 2169 static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, 2170 TCGv_i64 arg2, unsigned int ofs, 2171 unsigned int len) 2172 { 2173 uint64_t mask; 2174 TCGv_i64 t1; 2175 2176 tcg_debug_assert(ofs < 64); 2177 tcg_debug_assert(len <= 64); 2178 tcg_debug_assert(ofs + len <= 64); 2179 2180 if (ofs == 0 && len == 64) { 2181 tcg_gen_mov_i64(ret, arg2); 2182 return; 2183 } 2184 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) { 2185 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len); 2186 return; 2187 } 2188 2189 #if TCG_TARGET_REG_BITS == 32 2190 if (ofs >= 32) { 2191 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1)); 2192 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 2193 TCGV_LOW(arg2), ofs - 32, len); 2194 return; 2195 } 2196 if (ofs + len <= 32) { 2197 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1), 2198 TCGV_LOW(arg2), ofs, len); 2199 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1)); 2200 return; 2201 } 2202 #endif 2203 2204 mask = (1ull << len) - 1; 2205 t1 = tcg_temp_new_i64(); 2206 2207 if (ofs + len < 64) { 2208 tcg_gen_andi_i64(t1, arg2, mask); 2209 tcg_gen_shli_i64(t1, t1, ofs); 2210 } else { 2211 tcg_gen_shli_i64(t1, arg2, ofs); 2212 } 2213 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs)); 2214 tcg_gen_or_i64(ret, ret, t1); 2215 2216 tcg_temp_free_i64(t1); 2217 } 2218 2219 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, 2220 TCGv_i32 high) 2221 { 2222 #if TCG_TARGET_REG_BITS == 32 2223 tcg_gen_mov_i32(TCGV_LOW(dest), low); 2224 tcg_gen_mov_i32(TCGV_HIGH(dest), high); 2225 #else 2226 TCGv_i64 tmp = tcg_temp_new_i64(); 2227 /* These extensions are only needed for type correctness. 2228 We may be able to do better given target specific information. */ 2229 tcg_gen_extu_i32_i64(tmp, high); 2230 tcg_gen_extu_i32_i64(dest, low); 2231 /* If deposit is available, use it. Otherwise use the extra 2232 knowledge that we have of the zero-extensions above. */ 2233 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) { 2234 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32); 2235 } else { 2236 tcg_gen_shli_i64(tmp, tmp, 32); 2237 tcg_gen_or_i64(dest, dest, tmp); 2238 } 2239 tcg_temp_free_i64(tmp); 2240 #endif 2241 } 2242 2243 static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low, 2244 TCGv_i64 high) 2245 { 2246 tcg_gen_deposit_i64(dest, low, high, 32, 32); 2247 } 2248 2249 static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, 2250 TCGv_i32 c1, TCGv_i32 c2, 2251 TCGv_i32 v1, TCGv_i32 v2) 2252 { 2253 if (TCG_TARGET_HAS_movcond_i32) { 2254 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond); 2255 } else { 2256 TCGv_i32 t0 = tcg_temp_new_i32(); 2257 TCGv_i32 t1 = tcg_temp_new_i32(); 2258 tcg_gen_setcond_i32(cond, t0, c1, c2); 2259 tcg_gen_neg_i32(t0, t0); 2260 tcg_gen_and_i32(t1, v1, t0); 2261 tcg_gen_andc_i32(ret, v2, t0); 2262 tcg_gen_or_i32(ret, ret, t1); 2263 tcg_temp_free_i32(t0); 2264 tcg_temp_free_i32(t1); 2265 } 2266 } 2267 2268 static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, 2269 TCGv_i64 c1, TCGv_i64 c2, 2270 TCGv_i64 v1, TCGv_i64 v2) 2271 { 2272 #if TCG_TARGET_REG_BITS == 32 2273 TCGv_i32 t0 = tcg_temp_new_i32(); 2274 TCGv_i32 t1 = tcg_temp_new_i32(); 2275 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0, 2276 TCGV_LOW(c1), TCGV_HIGH(c1), 2277 TCGV_LOW(c2), TCGV_HIGH(c2), cond); 2278 2279 if (TCG_TARGET_HAS_movcond_i32) { 2280 tcg_gen_movi_i32(t1, 0); 2281 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1, 2282 TCGV_LOW(v1), TCGV_LOW(v2)); 2283 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1, 2284 TCGV_HIGH(v1), TCGV_HIGH(v2)); 2285 } else { 2286 tcg_gen_neg_i32(t0, t0); 2287 2288 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0); 2289 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0); 2290 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1); 2291 2292 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0); 2293 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0); 2294 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1); 2295 } 2296 tcg_temp_free_i32(t0); 2297 tcg_temp_free_i32(t1); 2298 #else 2299 if (TCG_TARGET_HAS_movcond_i64) { 2300 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond); 2301 } else { 2302 TCGv_i64 t0 = tcg_temp_new_i64(); 2303 TCGv_i64 t1 = tcg_temp_new_i64(); 2304 tcg_gen_setcond_i64(cond, t0, c1, c2); 2305 tcg_gen_neg_i64(t0, t0); 2306 tcg_gen_and_i64(t1, v1, t0); 2307 tcg_gen_andc_i64(ret, v2, t0); 2308 tcg_gen_or_i64(ret, ret, t1); 2309 tcg_temp_free_i64(t0); 2310 tcg_temp_free_i64(t1); 2311 } 2312 #endif 2313 } 2314 2315 /***************************************/ 2316 /* QEMU specific operations. Their type depend on the QEMU CPU 2317 type. */ 2318 #ifndef TARGET_LONG_BITS 2319 #error must include QEMU headers 2320 #endif 2321 2322 #if TARGET_LONG_BITS == 32 2323 #define TCGv TCGv_i32 2324 #define tcg_temp_new() tcg_temp_new_i32() 2325 #define tcg_global_reg_new tcg_global_reg_new_i32 2326 #define tcg_global_mem_new tcg_global_mem_new_i32 2327 #define tcg_temp_local_new() tcg_temp_local_new_i32() 2328 #define tcg_temp_free tcg_temp_free_i32 2329 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32 2330 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32 2331 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x) 2332 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b) 2333 #else 2334 #define TCGv TCGv_i64 2335 #define tcg_temp_new() tcg_temp_new_i64() 2336 #define tcg_global_reg_new tcg_global_reg_new_i64 2337 #define tcg_global_mem_new tcg_global_mem_new_i64 2338 #define tcg_temp_local_new() tcg_temp_local_new_i64() 2339 #define tcg_temp_free tcg_temp_free_i64 2340 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64 2341 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64 2342 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x) 2343 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b) 2344 #endif 2345 2346 /* debug info: write the PC of the corresponding QEMU CPU instruction */ 2347 static inline void tcg_gen_debug_insn_start(uint64_t pc) 2348 { 2349 /* XXX: must really use a 32 bit size for TCGArg in all cases */ 2350 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS 2351 tcg_gen_op2ii(INDEX_op_debug_insn_start, 2352 (uint32_t)(pc), (uint32_t)(pc >> 32)); 2353 #else 2354 tcg_gen_op1i(INDEX_op_debug_insn_start, pc); 2355 #endif 2356 } 2357 2358 static inline void tcg_gen_exit_tb(tcg_target_long val) 2359 { 2360 tcg_gen_op1i(INDEX_op_exit_tb, val); 2361 } 2362 2363 static inline void tcg_gen_goto_tb(unsigned idx) 2364 { 2365 /* We only support two chained exits. */ 2366 tcg_debug_assert(idx <= 1); 2367 #ifdef CONFIG_DEBUG_TCG 2368 /* Verify that we havn't seen this numbered exit before. */ 2369 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0); 2370 tcg_ctx.goto_tb_issue_mask |= 1 << idx; 2371 #endif 2372 tcg_gen_op1i(INDEX_op_goto_tb, idx); 2373 } 2374 2375 #if TCG_TARGET_REG_BITS == 32 2376 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2377 { 2378 #if TARGET_LONG_BITS == 32 2379 tcg_gen_op3i_i32(INDEX_op_qemu_ld8u, ret, addr, mem_index); 2380 #else 2381 tcg_gen_op4i_i32(INDEX_op_qemu_ld8u, TCGV_LOW(ret), TCGV_LOW(addr), 2382 TCGV_HIGH(addr), mem_index); 2383 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 2384 #endif 2385 } 2386 2387 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2388 { 2389 #if TARGET_LONG_BITS == 32 2390 tcg_gen_op3i_i32(INDEX_op_qemu_ld8s, ret, addr, mem_index); 2391 #else 2392 tcg_gen_op4i_i32(INDEX_op_qemu_ld8s, TCGV_LOW(ret), TCGV_LOW(addr), 2393 TCGV_HIGH(addr), mem_index); 2394 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 2395 #endif 2396 } 2397 2398 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2399 { 2400 #if TARGET_LONG_BITS == 32 2401 tcg_gen_op3i_i32(INDEX_op_qemu_ld16u, ret, addr, mem_index); 2402 #else 2403 tcg_gen_op4i_i32(INDEX_op_qemu_ld16u, TCGV_LOW(ret), TCGV_LOW(addr), 2404 TCGV_HIGH(addr), mem_index); 2405 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 2406 #endif 2407 } 2408 2409 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2410 { 2411 #if TARGET_LONG_BITS == 32 2412 tcg_gen_op3i_i32(INDEX_op_qemu_ld16s, ret, addr, mem_index); 2413 #else 2414 tcg_gen_op4i_i32(INDEX_op_qemu_ld16s, TCGV_LOW(ret), TCGV_LOW(addr), 2415 TCGV_HIGH(addr), mem_index); 2416 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 2417 #endif 2418 } 2419 2420 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2421 { 2422 #if TARGET_LONG_BITS == 32 2423 tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index); 2424 #else 2425 tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr), 2426 TCGV_HIGH(addr), mem_index); 2427 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 2428 #endif 2429 } 2430 2431 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2432 { 2433 #if TARGET_LONG_BITS == 32 2434 tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index); 2435 #else 2436 tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr), 2437 TCGV_HIGH(addr), mem_index); 2438 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); 2439 #endif 2440 } 2441 2442 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index) 2443 { 2444 #if TARGET_LONG_BITS == 32 2445 tcg_gen_op4i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), addr, mem_index); 2446 #else 2447 tcg_gen_op5i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), 2448 TCGV_LOW(addr), TCGV_HIGH(addr), mem_index); 2449 #endif 2450 } 2451 2452 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2453 { 2454 #if TARGET_LONG_BITS == 32 2455 tcg_gen_op3i_i32(INDEX_op_qemu_st8, arg, addr, mem_index); 2456 #else 2457 tcg_gen_op4i_i32(INDEX_op_qemu_st8, TCGV_LOW(arg), TCGV_LOW(addr), 2458 TCGV_HIGH(addr), mem_index); 2459 #endif 2460 } 2461 2462 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2463 { 2464 #if TARGET_LONG_BITS == 32 2465 tcg_gen_op3i_i32(INDEX_op_qemu_st16, arg, addr, mem_index); 2466 #else 2467 tcg_gen_op4i_i32(INDEX_op_qemu_st16, TCGV_LOW(arg), TCGV_LOW(addr), 2468 TCGV_HIGH(addr), mem_index); 2469 #endif 2470 } 2471 2472 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2473 { 2474 #if TARGET_LONG_BITS == 32 2475 tcg_gen_op3i_i32(INDEX_op_qemu_st32, arg, addr, mem_index); 2476 #else 2477 tcg_gen_op4i_i32(INDEX_op_qemu_st32, TCGV_LOW(arg), TCGV_LOW(addr), 2478 TCGV_HIGH(addr), mem_index); 2479 #endif 2480 } 2481 2482 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) 2483 { 2484 #if TARGET_LONG_BITS == 32 2485 tcg_gen_op4i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), addr, 2486 mem_index); 2487 #else 2488 tcg_gen_op5i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), 2489 TCGV_LOW(addr), TCGV_HIGH(addr), mem_index); 2490 #endif 2491 } 2492 2493 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O)) 2494 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A)) 2495 2496 #else /* TCG_TARGET_REG_BITS == 32 */ 2497 2498 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2499 { 2500 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u, ret, addr, mem_index); 2501 } 2502 2503 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2504 { 2505 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s, ret, addr, mem_index); 2506 } 2507 2508 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2509 { 2510 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u, ret, addr, mem_index); 2511 } 2512 2513 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2514 { 2515 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s, ret, addr, mem_index); 2516 } 2517 2518 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2519 { 2520 #if TARGET_LONG_BITS == 32 2521 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index); 2522 #else 2523 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u, ret, addr, mem_index); 2524 #endif 2525 } 2526 2527 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2528 { 2529 #if TARGET_LONG_BITS == 32 2530 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index); 2531 #else 2532 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s, ret, addr, mem_index); 2533 #endif 2534 } 2535 2536 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index) 2537 { 2538 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64, ret, addr, mem_index); 2539 } 2540 2541 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2542 { 2543 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8, arg, addr, mem_index); 2544 } 2545 2546 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2547 { 2548 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16, arg, addr, mem_index); 2549 } 2550 2551 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2552 { 2553 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32, arg, addr, mem_index); 2554 } 2555 2556 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) 2557 { 2558 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64, arg, addr, mem_index); 2559 } 2560 2561 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O)) 2562 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A)) 2563 2564 #endif /* TCG_TARGET_REG_BITS != 32 */ 2565 2566 #if TARGET_LONG_BITS == 64 2567 #define tcg_gen_movi_tl tcg_gen_movi_i64 2568 #define tcg_gen_mov_tl tcg_gen_mov_i64 2569 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64 2570 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64 2571 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64 2572 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64 2573 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64 2574 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64 2575 #define tcg_gen_ld_tl tcg_gen_ld_i64 2576 #define tcg_gen_st8_tl tcg_gen_st8_i64 2577 #define tcg_gen_st16_tl tcg_gen_st16_i64 2578 #define tcg_gen_st32_tl tcg_gen_st32_i64 2579 #define tcg_gen_st_tl tcg_gen_st_i64 2580 #define tcg_gen_add_tl tcg_gen_add_i64 2581 #define tcg_gen_addi_tl tcg_gen_addi_i64 2582 #define tcg_gen_sub_tl tcg_gen_sub_i64 2583 #define tcg_gen_neg_tl tcg_gen_neg_i64 2584 #define tcg_gen_subfi_tl tcg_gen_subfi_i64 2585 #define tcg_gen_subi_tl tcg_gen_subi_i64 2586 #define tcg_gen_and_tl tcg_gen_and_i64 2587 #define tcg_gen_andi_tl tcg_gen_andi_i64 2588 #define tcg_gen_or_tl tcg_gen_or_i64 2589 #define tcg_gen_ori_tl tcg_gen_ori_i64 2590 #define tcg_gen_xor_tl tcg_gen_xor_i64 2591 #define tcg_gen_xori_tl tcg_gen_xori_i64 2592 #define tcg_gen_not_tl tcg_gen_not_i64 2593 #define tcg_gen_shl_tl tcg_gen_shl_i64 2594 #define tcg_gen_shli_tl tcg_gen_shli_i64 2595 #define tcg_gen_shr_tl tcg_gen_shr_i64 2596 #define tcg_gen_shri_tl tcg_gen_shri_i64 2597 #define tcg_gen_sar_tl tcg_gen_sar_i64 2598 #define tcg_gen_sari_tl tcg_gen_sari_i64 2599 #define tcg_gen_brcond_tl tcg_gen_brcond_i64 2600 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64 2601 #define tcg_gen_setcond_tl tcg_gen_setcond_i64 2602 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64 2603 #define tcg_gen_mul_tl tcg_gen_mul_i64 2604 #define tcg_gen_muli_tl tcg_gen_muli_i64 2605 #define tcg_gen_div_tl tcg_gen_div_i64 2606 #define tcg_gen_rem_tl tcg_gen_rem_i64 2607 #define tcg_gen_divu_tl tcg_gen_divu_i64 2608 #define tcg_gen_remu_tl tcg_gen_remu_i64 2609 #define tcg_gen_discard_tl tcg_gen_discard_i64 2610 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32 2611 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64 2612 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64 2613 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64 2614 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64 2615 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64 2616 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64 2617 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64 2618 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64 2619 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64 2620 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64 2621 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64 2622 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64 2623 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64 2624 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64 2625 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64 2626 #define tcg_gen_andc_tl tcg_gen_andc_i64 2627 #define tcg_gen_eqv_tl tcg_gen_eqv_i64 2628 #define tcg_gen_nand_tl tcg_gen_nand_i64 2629 #define tcg_gen_nor_tl tcg_gen_nor_i64 2630 #define tcg_gen_orc_tl tcg_gen_orc_i64 2631 #define tcg_gen_rotl_tl tcg_gen_rotl_i64 2632 #define tcg_gen_rotli_tl tcg_gen_rotli_i64 2633 #define tcg_gen_rotr_tl tcg_gen_rotr_i64 2634 #define tcg_gen_rotri_tl tcg_gen_rotri_i64 2635 #define tcg_gen_deposit_tl tcg_gen_deposit_i64 2636 #define tcg_const_tl tcg_const_i64 2637 #define tcg_const_local_tl tcg_const_local_i64 2638 #define tcg_gen_movcond_tl tcg_gen_movcond_i64 2639 #else 2640 #define tcg_gen_movi_tl tcg_gen_movi_i32 2641 #define tcg_gen_mov_tl tcg_gen_mov_i32 2642 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32 2643 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32 2644 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32 2645 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32 2646 #define tcg_gen_ld32u_tl tcg_gen_ld_i32 2647 #define tcg_gen_ld32s_tl tcg_gen_ld_i32 2648 #define tcg_gen_ld_tl tcg_gen_ld_i32 2649 #define tcg_gen_st8_tl tcg_gen_st8_i32 2650 #define tcg_gen_st16_tl tcg_gen_st16_i32 2651 #define tcg_gen_st32_tl tcg_gen_st_i32 2652 #define tcg_gen_st_tl tcg_gen_st_i32 2653 #define tcg_gen_add_tl tcg_gen_add_i32 2654 #define tcg_gen_addi_tl tcg_gen_addi_i32 2655 #define tcg_gen_sub_tl tcg_gen_sub_i32 2656 #define tcg_gen_neg_tl tcg_gen_neg_i32 2657 #define tcg_gen_subfi_tl tcg_gen_subfi_i32 2658 #define tcg_gen_subi_tl tcg_gen_subi_i32 2659 #define tcg_gen_and_tl tcg_gen_and_i32 2660 #define tcg_gen_andi_tl tcg_gen_andi_i32 2661 #define tcg_gen_or_tl tcg_gen_or_i32 2662 #define tcg_gen_ori_tl tcg_gen_ori_i32 2663 #define tcg_gen_xor_tl tcg_gen_xor_i32 2664 #define tcg_gen_xori_tl tcg_gen_xori_i32 2665 #define tcg_gen_not_tl tcg_gen_not_i32 2666 #define tcg_gen_shl_tl tcg_gen_shl_i32 2667 #define tcg_gen_shli_tl tcg_gen_shli_i32 2668 #define tcg_gen_shr_tl tcg_gen_shr_i32 2669 #define tcg_gen_shri_tl tcg_gen_shri_i32 2670 #define tcg_gen_sar_tl tcg_gen_sar_i32 2671 #define tcg_gen_sari_tl tcg_gen_sari_i32 2672 #define tcg_gen_brcond_tl tcg_gen_brcond_i32 2673 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32 2674 #define tcg_gen_setcond_tl tcg_gen_setcond_i32 2675 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32 2676 #define tcg_gen_mul_tl tcg_gen_mul_i32 2677 #define tcg_gen_muli_tl tcg_gen_muli_i32 2678 #define tcg_gen_div_tl tcg_gen_div_i32 2679 #define tcg_gen_rem_tl tcg_gen_rem_i32 2680 #define tcg_gen_divu_tl tcg_gen_divu_i32 2681 #define tcg_gen_remu_tl tcg_gen_remu_i32 2682 #define tcg_gen_discard_tl tcg_gen_discard_i32 2683 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32 2684 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32 2685 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32 2686 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32 2687 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64 2688 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64 2689 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32 2690 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32 2691 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32 2692 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32 2693 #define tcg_gen_ext32u_tl tcg_gen_mov_i32 2694 #define tcg_gen_ext32s_tl tcg_gen_mov_i32 2695 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32 2696 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32 2697 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64 2698 #define tcg_gen_andc_tl tcg_gen_andc_i32 2699 #define tcg_gen_eqv_tl tcg_gen_eqv_i32 2700 #define tcg_gen_nand_tl tcg_gen_nand_i32 2701 #define tcg_gen_nor_tl tcg_gen_nor_i32 2702 #define tcg_gen_orc_tl tcg_gen_orc_i32 2703 #define tcg_gen_rotl_tl tcg_gen_rotl_i32 2704 #define tcg_gen_rotli_tl tcg_gen_rotli_i32 2705 #define tcg_gen_rotr_tl tcg_gen_rotr_i32 2706 #define tcg_gen_rotri_tl tcg_gen_rotri_i32 2707 #define tcg_gen_deposit_tl tcg_gen_deposit_i32 2708 #define tcg_const_tl tcg_const_i32 2709 #define tcg_const_local_tl tcg_const_local_i32 2710 #define tcg_gen_movcond_tl tcg_gen_movcond_i32 2711 #endif 2712 2713 #if TCG_TARGET_REG_BITS == 32 2714 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), \ 2715 TCGV_PTR_TO_NAT(A), \ 2716 TCGV_PTR_TO_NAT(B)) 2717 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), \ 2718 TCGV_PTR_TO_NAT(A), (B)) 2719 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A)) 2720 #else /* TCG_TARGET_REG_BITS == 32 */ 2721 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), \ 2722 TCGV_PTR_TO_NAT(A), \ 2723 TCGV_PTR_TO_NAT(B)) 2724 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), \ 2725 TCGV_PTR_TO_NAT(A), (B)) 2726 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A)) 2727 #endif /* TCG_TARGET_REG_BITS != 32 */ 2728