summaryrefslogtreecommitdiffstats
path: root/qemu/tcg/tci
diff options
context:
space:
mode:
authorRajithaY <rajithax.yerrumsetty@intel.com>2017-04-25 03:31:15 -0700
committerRajitha Yerrumchetty <rajithax.yerrumsetty@intel.com>2017-05-22 06:48:08 +0000
commitbb756eebdac6fd24e8919e2c43f7d2c8c4091f59 (patch)
treeca11e03542edf2d8f631efeca5e1626d211107e3 /qemu/tcg/tci
parenta14b48d18a9ed03ec191cf16b162206998a895ce (diff)
Adding qemu as a submodule of KVMFORNFV
This Patch includes the changes to add qemu as a submodule to kvmfornfv repo and make use of the updated latest qemu for the execution of all testcase Change-Id: I1280af507a857675c7f81d30c95255635667bdd7 Signed-off-by:RajithaY<rajithax.yerrumsetty@intel.com>
Diffstat (limited to 'qemu/tcg/tci')
-rw-r--r--qemu/tcg/tci/README130
-rw-r--r--qemu/tcg/tci/tcg-target.h185
-rw-r--r--qemu/tcg/tci/tcg-target.inc.c879
3 files changed, 0 insertions, 1194 deletions
diff --git a/qemu/tcg/tci/README b/qemu/tcg/tci/README
deleted file mode 100644
index 3786b0915..000000000
--- a/qemu/tcg/tci/README
+++ /dev/null
@@ -1,130 +0,0 @@
-TCG Interpreter (TCI) - Copyright (c) 2011 Stefan Weil.
-
-This file is released under the BSD license.
-
-1) Introduction
-
-TCG (Tiny Code Generator) is a code generator which translates
-code fragments ("basic blocks") from target code (any of the
-targets supported by QEMU) to a code representation which
-can be run on a host.
-
-QEMU can create native code for some hosts (arm, hppa, i386, ia64, ppc, ppc64,
-s390, sparc, x86_64). For others, unofficial host support was written.
-
-By adding a code generator for a virtual machine and using an
-interpreter for the generated bytecode, it is possible to
-support (almost) any host.
-
-This is what TCI (Tiny Code Interpreter) does.
-
-2) Implementation
-
-Like each TCG host frontend, TCI implements the code generator in
-tcg-target.inc.c, tcg-target.h. Both files are in directory tcg/tci.
-
-The additional file tcg/tci.c adds the interpreter.
-
-The bytecode consists of opcodes (same numeric values as those used by
-TCG), command length and arguments of variable size and number.
-
-3) Usage
-
-For hosts without native TCG, the interpreter TCI must be enabled by
-
- configure --enable-tcg-interpreter
-
-If configure is called without --enable-tcg-interpreter, it will
-suggest using this option. Setting it automatically would need
-additional code in configure which must be fixed when new native TCG
-implementations are added.
-
-System emulation should work on any 32 or 64 bit host.
-User mode emulation might work. Maybe a new linker script (*.ld)
-is needed. Byte order might be wrong (on big endian hosts)
-and need fixes in configure.
-
-For hosts with native TCG, the interpreter TCI can be enabled by
-
- configure --enable-tcg-interpreter
-
-The only difference from running QEMU with TCI to running without TCI
-should be speed. Especially during development of TCI, it was very
-useful to compare runs with and without TCI. Create /tmp/qemu.log by
-
- qemu-system-i386 -d in_asm,op_opt,cpu -D /tmp/qemu.log -singlestep
-
-once with interpreter and once without interpreter and compare the resulting
-qemu.log files. This is also useful to see the effects of additional
-registers or additional opcodes (it is easy to modify the virtual machine).
-It can also be used to verify native TCGs.
-
-Hosts with native TCG can also enable TCI by claiming to be unsupported:
-
- configure --cpu=unknown --enable-tcg-interpreter
-
-configure then no longer uses the native linker script (*.ld) for
-user mode emulation.
-
-
-4) Status
-
-TCI needs special implementation for 32 and 64 bit host, 32 and 64 bit target,
-host and target with same or different endianness.
-
- | host (le) host (be)
- | 32 64 32 64
-------------+------------------------------------------------------------
-target (le) | s0, u0 s1, u1 s?, u? s?, u?
-32 bit |
- |
-target (le) | sc, uc s1, u1 s?, u? s?, u?
-64 bit |
- |
-target (be) | sc, u0 sc, uc s?, u? s?, u?
-32 bit |
- |
-target (be) | sc, uc sc, uc s?, u? s?, u?
-64 bit |
- |
-
-System emulation
-s? = untested
-sc = compiles
-s0 = bios works
-s1 = grub works
-s2 = Linux boots
-
-Linux user mode emulation
-u? = untested
-uc = compiles
-u0 = static hello works
-u1 = linux-user-test works
-
-5) Todo list
-
-* TCI is not widely tested. It was written and tested on a x86_64 host
- running i386 and x86_64 system emulation and Linux user mode.
- A cross compiled QEMU for i386 host also works with the same basic tests.
- A cross compiled QEMU for mipsel host works, too. It is terribly slow
- because I run it in a mips malta emulation, so it is an interpreted
- emulation in an emulation.
- A cross compiled QEMU for arm host works (tested with pc bios).
- A cross compiled QEMU for ppc host works at least partially:
- i386-linux-user/qemu-i386 can run a simple hello-world program
- (tested in a ppc emulation).
-
-* Some TCG opcodes are either missing in the code generator and/or
- in the interpreter. These opcodes raise a runtime exception, so it is
- possible to see where code must be added.
-
-* The pseudo code is not optimized and still ugly. For hosts with special
- alignment requirements, it needs some fixes (maybe aligned bytecode
- would also improve speed for hosts which support byte alignment).
-
-* A better disassembler for the pseudo code would be nice (a very primitive
- disassembler is included in tcg-target.inc.c).
-
-* It might be useful to have a runtime option which selects the native TCG
- or TCI, so QEMU would have to include two TCGs. Today, selecting TCI
- is a configure option, so you need two compilations of QEMU.
diff --git a/qemu/tcg/tci/tcg-target.h b/qemu/tcg/tci/tcg-target.h
deleted file mode 100644
index 3942f9ccc..000000000
--- a/qemu/tcg/tci/tcg-target.h
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Tiny Code Generator for QEMU
- *
- * Copyright (c) 2009, 2011 Stefan Weil
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
- * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-/*
- * This code implements a TCG which does not generate machine code for some
- * real target machine but which generates virtual machine code for an
- * interpreter. Interpreted pseudo code is slow, but it works on any host.
- *
- * Some remarks might help in understanding the code:
- *
- * "target" or "TCG target" is the machine which runs the generated code.
- * This is different to the usual meaning in QEMU where "target" is the
- * emulated machine. So normally QEMU host is identical to TCG target.
- * Here the TCG target is a virtual machine, but this virtual machine must
- * use the same word size like the real machine.
- * Therefore, we need both 32 and 64 bit virtual machines (interpreter).
- */
-
-#if !defined(TCG_TARGET_H)
-#define TCG_TARGET_H
-
-
-#define TCG_TARGET_INTERPRETER 1
-#define TCG_TARGET_INSN_UNIT_SIZE 1
-#define TCG_TARGET_TLB_DISPLACEMENT_BITS 32
-
-#if UINTPTR_MAX == UINT32_MAX
-# define TCG_TARGET_REG_BITS 32
-#elif UINTPTR_MAX == UINT64_MAX
-# define TCG_TARGET_REG_BITS 64
-#else
-# error Unknown pointer size for tci target
-#endif
-
-#ifdef CONFIG_DEBUG_TCG
-/* Enable debug output. */
-#define CONFIG_DEBUG_TCG_INTERPRETER
-#endif
-
-/* Optional instructions. */
-
-#define TCG_TARGET_HAS_bswap16_i32 1
-#define TCG_TARGET_HAS_bswap32_i32 1
-#define TCG_TARGET_HAS_div_i32 1
-#define TCG_TARGET_HAS_rem_i32 1
-#define TCG_TARGET_HAS_ext8s_i32 1
-#define TCG_TARGET_HAS_ext16s_i32 1
-#define TCG_TARGET_HAS_ext8u_i32 1
-#define TCG_TARGET_HAS_ext16u_i32 1
-#define TCG_TARGET_HAS_andc_i32 0
-#define TCG_TARGET_HAS_deposit_i32 1
-#define TCG_TARGET_HAS_eqv_i32 0
-#define TCG_TARGET_HAS_nand_i32 0
-#define TCG_TARGET_HAS_nor_i32 0
-#define TCG_TARGET_HAS_neg_i32 1
-#define TCG_TARGET_HAS_not_i32 1
-#define TCG_TARGET_HAS_orc_i32 0
-#define TCG_TARGET_HAS_rot_i32 1
-#define TCG_TARGET_HAS_movcond_i32 0
-#define TCG_TARGET_HAS_muls2_i32 0
-#define TCG_TARGET_HAS_muluh_i32 0
-#define TCG_TARGET_HAS_mulsh_i32 0
-
-#if TCG_TARGET_REG_BITS == 64
-#define TCG_TARGET_HAS_extrl_i64_i32 0
-#define TCG_TARGET_HAS_extrh_i64_i32 0
-#define TCG_TARGET_HAS_bswap16_i64 1
-#define TCG_TARGET_HAS_bswap32_i64 1
-#define TCG_TARGET_HAS_bswap64_i64 1
-#define TCG_TARGET_HAS_deposit_i64 1
-#define TCG_TARGET_HAS_div_i64 0
-#define TCG_TARGET_HAS_rem_i64 0
-#define TCG_TARGET_HAS_ext8s_i64 1
-#define TCG_TARGET_HAS_ext16s_i64 1
-#define TCG_TARGET_HAS_ext32s_i64 1
-#define TCG_TARGET_HAS_ext8u_i64 1
-#define TCG_TARGET_HAS_ext16u_i64 1
-#define TCG_TARGET_HAS_ext32u_i64 1
-#define TCG_TARGET_HAS_andc_i64 0
-#define TCG_TARGET_HAS_eqv_i64 0
-#define TCG_TARGET_HAS_nand_i64 0
-#define TCG_TARGET_HAS_nor_i64 0
-#define TCG_TARGET_HAS_neg_i64 1
-#define TCG_TARGET_HAS_not_i64 1
-#define TCG_TARGET_HAS_orc_i64 0
-#define TCG_TARGET_HAS_rot_i64 1
-#define TCG_TARGET_HAS_movcond_i64 0
-#define TCG_TARGET_HAS_muls2_i64 0
-#define TCG_TARGET_HAS_add2_i32 0
-#define TCG_TARGET_HAS_sub2_i32 0
-#define TCG_TARGET_HAS_mulu2_i32 0
-#define TCG_TARGET_HAS_add2_i64 0
-#define TCG_TARGET_HAS_sub2_i64 0
-#define TCG_TARGET_HAS_mulu2_i64 0
-#define TCG_TARGET_HAS_muluh_i64 0
-#define TCG_TARGET_HAS_mulsh_i64 0
-#else
-#define TCG_TARGET_HAS_mulu2_i32 1
-#endif /* TCG_TARGET_REG_BITS == 64 */
-
-/* Number of registers available.
- For 32 bit hosts, we need more than 8 registers (call arguments). */
-/* #define TCG_TARGET_NB_REGS 8 */
-#define TCG_TARGET_NB_REGS 16
-/* #define TCG_TARGET_NB_REGS 32 */
-
-/* List of registers which are used by TCG. */
-typedef enum {
- TCG_REG_R0 = 0,
- TCG_REG_R1,
- TCG_REG_R2,
- TCG_REG_R3,
- TCG_REG_R4,
- TCG_REG_R5,
- TCG_REG_R6,
- TCG_REG_R7,
-#if TCG_TARGET_NB_REGS >= 16
- TCG_REG_R8,
- TCG_REG_R9,
- TCG_REG_R10,
- TCG_REG_R11,
- TCG_REG_R12,
- TCG_REG_R13,
- TCG_REG_R14,
- TCG_REG_R15,
-#if TCG_TARGET_NB_REGS >= 32
- TCG_REG_R16,
- TCG_REG_R17,
- TCG_REG_R18,
- TCG_REG_R19,
- TCG_REG_R20,
- TCG_REG_R21,
- TCG_REG_R22,
- TCG_REG_R23,
- TCG_REG_R24,
- TCG_REG_R25,
- TCG_REG_R26,
- TCG_REG_R27,
- TCG_REG_R28,
- TCG_REG_R29,
- TCG_REG_R30,
- TCG_REG_R31,
-#endif
-#endif
- /* Special value UINT8_MAX is used by TCI to encode constant values. */
- TCG_CONST = UINT8_MAX
-} TCGReg;
-
-#define TCG_AREG0 (TCG_TARGET_NB_REGS - 2)
-
-/* Used for function call generation. */
-#define TCG_REG_CALL_STACK (TCG_TARGET_NB_REGS - 1)
-#define TCG_TARGET_CALL_STACK_OFFSET 0
-#define TCG_TARGET_STACK_ALIGN 16
-
-void tci_disas(uint8_t opc);
-
-#define HAVE_TCG_QEMU_TB_EXEC
-
-static inline void flush_icache_range(uintptr_t start, uintptr_t stop)
-{
-}
-
-#endif /* TCG_TARGET_H */
diff --git a/qemu/tcg/tci/tcg-target.inc.c b/qemu/tcg/tci/tcg-target.inc.c
deleted file mode 100644
index e2fc52a16..000000000
--- a/qemu/tcg/tci/tcg-target.inc.c
+++ /dev/null
@@ -1,879 +0,0 @@
-/*
- * Tiny Code Generator for QEMU
- *
- * Copyright (c) 2009, 2011 Stefan Weil
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
- * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-
-#include "tcg-be-null.h"
-
-/* TODO list:
- * - See TODO comments in code.
- */
-
-/* Marker for missing code. */
-#define TODO() \
- do { \
- fprintf(stderr, "TODO %s:%u: %s()\n", \
- __FILE__, __LINE__, __func__); \
- tcg_abort(); \
- } while (0)
-
-/* Bitfield n...m (in 32 bit value). */
-#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
-
-/* Macros used in tcg_target_op_defs. */
-#define R "r"
-#define RI "ri"
-#if TCG_TARGET_REG_BITS == 32
-# define R64 "r", "r"
-#else
-# define R64 "r"
-#endif
-#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
-# define L "L", "L"
-# define S "S", "S"
-#else
-# define L "L"
-# define S "S"
-#endif
-
-/* TODO: documentation. */
-static const TCGTargetOpDef tcg_target_op_defs[] = {
- { INDEX_op_exit_tb, { NULL } },
- { INDEX_op_goto_tb, { NULL } },
- { INDEX_op_br, { NULL } },
-
- { INDEX_op_ld8u_i32, { R, R } },
- { INDEX_op_ld8s_i32, { R, R } },
- { INDEX_op_ld16u_i32, { R, R } },
- { INDEX_op_ld16s_i32, { R, R } },
- { INDEX_op_ld_i32, { R, R } },
- { INDEX_op_st8_i32, { R, R } },
- { INDEX_op_st16_i32, { R, R } },
- { INDEX_op_st_i32, { R, R } },
-
- { INDEX_op_add_i32, { R, RI, RI } },
- { INDEX_op_sub_i32, { R, RI, RI } },
- { INDEX_op_mul_i32, { R, RI, RI } },
-#if TCG_TARGET_HAS_div_i32
- { INDEX_op_div_i32, { R, R, R } },
- { INDEX_op_divu_i32, { R, R, R } },
- { INDEX_op_rem_i32, { R, R, R } },
- { INDEX_op_remu_i32, { R, R, R } },
-#elif TCG_TARGET_HAS_div2_i32
- { INDEX_op_div2_i32, { R, R, "0", "1", R } },
- { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
-#endif
- /* TODO: Does R, RI, RI result in faster code than R, R, RI?
- If both operands are constants, we can optimize. */
- { INDEX_op_and_i32, { R, RI, RI } },
-#if TCG_TARGET_HAS_andc_i32
- { INDEX_op_andc_i32, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_eqv_i32
- { INDEX_op_eqv_i32, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_nand_i32
- { INDEX_op_nand_i32, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_nor_i32
- { INDEX_op_nor_i32, { R, RI, RI } },
-#endif
- { INDEX_op_or_i32, { R, RI, RI } },
-#if TCG_TARGET_HAS_orc_i32
- { INDEX_op_orc_i32, { R, RI, RI } },
-#endif
- { INDEX_op_xor_i32, { R, RI, RI } },
- { INDEX_op_shl_i32, { R, RI, RI } },
- { INDEX_op_shr_i32, { R, RI, RI } },
- { INDEX_op_sar_i32, { R, RI, RI } },
-#if TCG_TARGET_HAS_rot_i32
- { INDEX_op_rotl_i32, { R, RI, RI } },
- { INDEX_op_rotr_i32, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_deposit_i32
- { INDEX_op_deposit_i32, { R, "0", R } },
-#endif
-
- { INDEX_op_brcond_i32, { R, RI } },
-
- { INDEX_op_setcond_i32, { R, R, RI } },
-#if TCG_TARGET_REG_BITS == 64
- { INDEX_op_setcond_i64, { R, R, RI } },
-#endif /* TCG_TARGET_REG_BITS == 64 */
-
-#if TCG_TARGET_REG_BITS == 32
- /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
- { INDEX_op_add2_i32, { R, R, R, R, R, R } },
- { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
- { INDEX_op_brcond2_i32, { R, R, RI, RI } },
- { INDEX_op_mulu2_i32, { R, R, R, R } },
- { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
-#endif
-
-#if TCG_TARGET_HAS_not_i32
- { INDEX_op_not_i32, { R, R } },
-#endif
-#if TCG_TARGET_HAS_neg_i32
- { INDEX_op_neg_i32, { R, R } },
-#endif
-
-#if TCG_TARGET_REG_BITS == 64
- { INDEX_op_ld8u_i64, { R, R } },
- { INDEX_op_ld8s_i64, { R, R } },
- { INDEX_op_ld16u_i64, { R, R } },
- { INDEX_op_ld16s_i64, { R, R } },
- { INDEX_op_ld32u_i64, { R, R } },
- { INDEX_op_ld32s_i64, { R, R } },
- { INDEX_op_ld_i64, { R, R } },
-
- { INDEX_op_st8_i64, { R, R } },
- { INDEX_op_st16_i64, { R, R } },
- { INDEX_op_st32_i64, { R, R } },
- { INDEX_op_st_i64, { R, R } },
-
- { INDEX_op_add_i64, { R, RI, RI } },
- { INDEX_op_sub_i64, { R, RI, RI } },
- { INDEX_op_mul_i64, { R, RI, RI } },
-#if TCG_TARGET_HAS_div_i64
- { INDEX_op_div_i64, { R, R, R } },
- { INDEX_op_divu_i64, { R, R, R } },
- { INDEX_op_rem_i64, { R, R, R } },
- { INDEX_op_remu_i64, { R, R, R } },
-#elif TCG_TARGET_HAS_div2_i64
- { INDEX_op_div2_i64, { R, R, "0", "1", R } },
- { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
-#endif
- { INDEX_op_and_i64, { R, RI, RI } },
-#if TCG_TARGET_HAS_andc_i64
- { INDEX_op_andc_i64, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_eqv_i64
- { INDEX_op_eqv_i64, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_nand_i64
- { INDEX_op_nand_i64, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_nor_i64
- { INDEX_op_nor_i64, { R, RI, RI } },
-#endif
- { INDEX_op_or_i64, { R, RI, RI } },
-#if TCG_TARGET_HAS_orc_i64
- { INDEX_op_orc_i64, { R, RI, RI } },
-#endif
- { INDEX_op_xor_i64, { R, RI, RI } },
- { INDEX_op_shl_i64, { R, RI, RI } },
- { INDEX_op_shr_i64, { R, RI, RI } },
- { INDEX_op_sar_i64, { R, RI, RI } },
-#if TCG_TARGET_HAS_rot_i64
- { INDEX_op_rotl_i64, { R, RI, RI } },
- { INDEX_op_rotr_i64, { R, RI, RI } },
-#endif
-#if TCG_TARGET_HAS_deposit_i64
- { INDEX_op_deposit_i64, { R, "0", R } },
-#endif
- { INDEX_op_brcond_i64, { R, RI } },
-
-#if TCG_TARGET_HAS_ext8s_i64
- { INDEX_op_ext8s_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext16s_i64
- { INDEX_op_ext16s_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext32s_i64
- { INDEX_op_ext32s_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext8u_i64
- { INDEX_op_ext8u_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext16u_i64
- { INDEX_op_ext16u_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext32u_i64
- { INDEX_op_ext32u_i64, { R, R } },
-#endif
- { INDEX_op_ext_i32_i64, { R, R } },
- { INDEX_op_extu_i32_i64, { R, R } },
-#if TCG_TARGET_HAS_bswap16_i64
- { INDEX_op_bswap16_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_bswap32_i64
- { INDEX_op_bswap32_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_bswap64_i64
- { INDEX_op_bswap64_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_not_i64
- { INDEX_op_not_i64, { R, R } },
-#endif
-#if TCG_TARGET_HAS_neg_i64
- { INDEX_op_neg_i64, { R, R } },
-#endif
-#endif /* TCG_TARGET_REG_BITS == 64 */
-
- { INDEX_op_qemu_ld_i32, { R, L } },
- { INDEX_op_qemu_ld_i64, { R64, L } },
-
- { INDEX_op_qemu_st_i32, { R, S } },
- { INDEX_op_qemu_st_i64, { R64, S } },
-
-#if TCG_TARGET_HAS_ext8s_i32
- { INDEX_op_ext8s_i32, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext16s_i32
- { INDEX_op_ext16s_i32, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext8u_i32
- { INDEX_op_ext8u_i32, { R, R } },
-#endif
-#if TCG_TARGET_HAS_ext16u_i32
- { INDEX_op_ext16u_i32, { R, R } },
-#endif
-
-#if TCG_TARGET_HAS_bswap16_i32
- { INDEX_op_bswap16_i32, { R, R } },
-#endif
-#if TCG_TARGET_HAS_bswap32_i32
- { INDEX_op_bswap32_i32, { R, R } },
-#endif
-
- { -1 },
-};
-
-static const int tcg_target_reg_alloc_order[] = {
- TCG_REG_R0,
- TCG_REG_R1,
- TCG_REG_R2,
- TCG_REG_R3,
-#if 0 /* used for TCG_REG_CALL_STACK */
- TCG_REG_R4,
-#endif
- TCG_REG_R5,
- TCG_REG_R6,
- TCG_REG_R7,
-#if TCG_TARGET_NB_REGS >= 16
- TCG_REG_R8,
- TCG_REG_R9,
- TCG_REG_R10,
- TCG_REG_R11,
- TCG_REG_R12,
- TCG_REG_R13,
- TCG_REG_R14,
- TCG_REG_R15,
-#endif
-};
-
-#if MAX_OPC_PARAM_IARGS != 5
-# error Fix needed, number of supported input arguments changed!
-#endif
-
-static const int tcg_target_call_iarg_regs[] = {
- TCG_REG_R0,
- TCG_REG_R1,
- TCG_REG_R2,
- TCG_REG_R3,
-#if 0 /* used for TCG_REG_CALL_STACK */
- TCG_REG_R4,
-#endif
- TCG_REG_R5,
-#if TCG_TARGET_REG_BITS == 32
- /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
- TCG_REG_R6,
- TCG_REG_R7,
-#if TCG_TARGET_NB_REGS >= 16
- TCG_REG_R8,
- TCG_REG_R9,
- TCG_REG_R10,
-#else
-# error Too few input registers available
-#endif
-#endif
-};
-
-static const int tcg_target_call_oarg_regs[] = {
- TCG_REG_R0,
-#if TCG_TARGET_REG_BITS == 32
- TCG_REG_R1
-#endif
-};
-
-#ifdef CONFIG_DEBUG_TCG
-static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
- "r00",
- "r01",
- "r02",
- "r03",
- "r04",
- "r05",
- "r06",
- "r07",
-#if TCG_TARGET_NB_REGS >= 16
- "r08",
- "r09",
- "r10",
- "r11",
- "r12",
- "r13",
- "r14",
- "r15",
-#if TCG_TARGET_NB_REGS >= 32
- "r16",
- "r17",
- "r18",
- "r19",
- "r20",
- "r21",
- "r22",
- "r23",
- "r24",
- "r25",
- "r26",
- "r27",
- "r28",
- "r29",
- "r30",
- "r31"
-#endif
-#endif
-};
-#endif
-
-static void patch_reloc(tcg_insn_unit *code_ptr, int type,
- intptr_t value, intptr_t addend)
-{
- /* tcg_out_reloc always uses the same type, addend. */
- tcg_debug_assert(type == sizeof(tcg_target_long));
- tcg_debug_assert(addend == 0);
- tcg_debug_assert(value != 0);
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_patch32(code_ptr, value);
- } else {
- tcg_patch64(code_ptr, value);
- }
-}
-
-/* Parse target specific constraints. */
-static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
-{
- const char *ct_str = *pct_str;
- switch (ct_str[0]) {
- case 'r':
- case 'L': /* qemu_ld constraint */
- case 'S': /* qemu_st constraint */
- ct->ct |= TCG_CT_REG;
- tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
- break;
- default:
- return -1;
- }
- ct_str++;
- *pct_str = ct_str;
- return 0;
-}
-
-#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
-/* Show current bytecode. Used by tcg interpreter. */
-void tci_disas(uint8_t opc)
-{
- const TCGOpDef *def = &tcg_op_defs[opc];
- fprintf(stderr, "TCG %s %u, %u, %u\n",
- def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
-}
-#endif
-
-/* Write value (native size). */
-static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
-{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_out32(s, v);
- } else {
- tcg_out64(s, v);
- }
-}
-
-/* Write opcode. */
-static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
-{
- tcg_out8(s, op);
- tcg_out8(s, 0);
-}
-
-/* Write register. */
-static void tcg_out_r(TCGContext *s, TCGArg t0)
-{
- tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
- tcg_out8(s, t0);
-}
-
-/* Write register or constant (native size). */
-static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
-{
- if (const_arg) {
- tcg_debug_assert(const_arg == 1);
- tcg_out8(s, TCG_CONST);
- tcg_out_i(s, arg);
- } else {
- tcg_out_r(s, arg);
- }
-}
-
-/* Write register or constant (32 bit). */
-static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
-{
- if (const_arg) {
- tcg_debug_assert(const_arg == 1);
- tcg_out8(s, TCG_CONST);
- tcg_out32(s, arg);
- } else {
- tcg_out_r(s, arg);
- }
-}
-
-#if TCG_TARGET_REG_BITS == 64
-/* Write register or constant (64 bit). */
-static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
-{
- if (const_arg) {
- tcg_debug_assert(const_arg == 1);
- tcg_out8(s, TCG_CONST);
- tcg_out64(s, arg);
- } else {
- tcg_out_r(s, arg);
- }
-}
-#endif
-
-/* Write label. */
-static void tci_out_label(TCGContext *s, TCGLabel *label)
-{
- if (label->has_value) {
- tcg_out_i(s, label->u.value);
- tcg_debug_assert(label->u.value);
- } else {
- tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
- s->code_ptr += sizeof(tcg_target_ulong);
- }
-}
-
-static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
- intptr_t arg2)
-{
- uint8_t *old_code_ptr = s->code_ptr;
- if (type == TCG_TYPE_I32) {
- tcg_out_op_t(s, INDEX_op_ld_i32);
- tcg_out_r(s, ret);
- tcg_out_r(s, arg1);
- tcg_out32(s, arg2);
- } else {
- tcg_debug_assert(type == TCG_TYPE_I64);
-#if TCG_TARGET_REG_BITS == 64
- tcg_out_op_t(s, INDEX_op_ld_i64);
- tcg_out_r(s, ret);
- tcg_out_r(s, arg1);
- tcg_debug_assert(arg2 == (int32_t)arg2);
- tcg_out32(s, arg2);
-#else
- TODO();
-#endif
- }
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
-}
-
-static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
-{
- uint8_t *old_code_ptr = s->code_ptr;
- tcg_debug_assert(ret != arg);
-#if TCG_TARGET_REG_BITS == 32
- tcg_out_op_t(s, INDEX_op_mov_i32);
-#else
- tcg_out_op_t(s, INDEX_op_mov_i64);
-#endif
- tcg_out_r(s, ret);
- tcg_out_r(s, arg);
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
-}
-
-static void tcg_out_movi(TCGContext *s, TCGType type,
- TCGReg t0, tcg_target_long arg)
-{
- uint8_t *old_code_ptr = s->code_ptr;
- uint32_t arg32 = arg;
- if (type == TCG_TYPE_I32 || arg == arg32) {
- tcg_out_op_t(s, INDEX_op_movi_i32);
- tcg_out_r(s, t0);
- tcg_out32(s, arg32);
- } else {
- tcg_debug_assert(type == TCG_TYPE_I64);
-#if TCG_TARGET_REG_BITS == 64
- tcg_out_op_t(s, INDEX_op_movi_i64);
- tcg_out_r(s, t0);
- tcg_out64(s, arg);
-#else
- TODO();
-#endif
- }
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
-}
-
-static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
-{
- uint8_t *old_code_ptr = s->code_ptr;
- tcg_out_op_t(s, INDEX_op_call);
- tcg_out_ri(s, 1, (uintptr_t)arg);
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
-}
-
-static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
- const int *const_args)
-{
- uint8_t *old_code_ptr = s->code_ptr;
-
- tcg_out_op_t(s, opc);
-
- switch (opc) {
- case INDEX_op_exit_tb:
- tcg_out64(s, args[0]);
- break;
- case INDEX_op_goto_tb:
- if (s->tb_jmp_offset) {
- /* Direct jump method. */
- tcg_debug_assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
- s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
- tcg_out32(s, 0);
- } else {
- /* Indirect jump method. */
- TODO();
- }
- tcg_debug_assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
- s->tb_next_offset[args[0]] = tcg_current_code_size(s);
- break;
- case INDEX_op_br:
- tci_out_label(s, arg_label(args[0]));
- break;
- case INDEX_op_setcond_i32:
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_ri32(s, const_args[2], args[2]);
- tcg_out8(s, args[3]); /* condition */
- break;
-#if TCG_TARGET_REG_BITS == 32
- case INDEX_op_setcond2_i32:
- /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_r(s, args[2]);
- tcg_out_ri32(s, const_args[3], args[3]);
- tcg_out_ri32(s, const_args[4], args[4]);
- tcg_out8(s, args[5]); /* condition */
- break;
-#elif TCG_TARGET_REG_BITS == 64
- case INDEX_op_setcond_i64:
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_ri64(s, const_args[2], args[2]);
- tcg_out8(s, args[3]); /* condition */
- break;
-#endif
- case INDEX_op_ld8u_i32:
- case INDEX_op_ld8s_i32:
- case INDEX_op_ld16u_i32:
- case INDEX_op_ld16s_i32:
- case INDEX_op_ld_i32:
- case INDEX_op_st8_i32:
- case INDEX_op_st16_i32:
- case INDEX_op_st_i32:
- case INDEX_op_ld8u_i64:
- case INDEX_op_ld8s_i64:
- case INDEX_op_ld16u_i64:
- case INDEX_op_ld16s_i64:
- case INDEX_op_ld32u_i64:
- case INDEX_op_ld32s_i64:
- case INDEX_op_ld_i64:
- case INDEX_op_st8_i64:
- case INDEX_op_st16_i64:
- case INDEX_op_st32_i64:
- case INDEX_op_st_i64:
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_debug_assert(args[2] == (int32_t)args[2]);
- tcg_out32(s, args[2]);
- break;
- case INDEX_op_add_i32:
- case INDEX_op_sub_i32:
- case INDEX_op_mul_i32:
- case INDEX_op_and_i32:
- case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
- case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
- case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
- case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
- case INDEX_op_or_i32:
- case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
- case INDEX_op_xor_i32:
- case INDEX_op_shl_i32:
- case INDEX_op_shr_i32:
- case INDEX_op_sar_i32:
- case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
- case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
- tcg_out_r(s, args[0]);
- tcg_out_ri32(s, const_args[1], args[1]);
- tcg_out_ri32(s, const_args[2], args[2]);
- break;
- case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_r(s, args[2]);
- tcg_debug_assert(args[3] <= UINT8_MAX);
- tcg_out8(s, args[3]);
- tcg_debug_assert(args[4] <= UINT8_MAX);
- tcg_out8(s, args[4]);
- break;
-
-#if TCG_TARGET_REG_BITS == 64
- case INDEX_op_add_i64:
- case INDEX_op_sub_i64:
- case INDEX_op_mul_i64:
- case INDEX_op_and_i64:
- case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
- case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
- case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
- case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
- case INDEX_op_or_i64:
- case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
- case INDEX_op_xor_i64:
- case INDEX_op_shl_i64:
- case INDEX_op_shr_i64:
- case INDEX_op_sar_i64:
- case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
- case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
- tcg_out_r(s, args[0]);
- tcg_out_ri64(s, const_args[1], args[1]);
- tcg_out_ri64(s, const_args[2], args[2]);
- break;
- case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_r(s, args[2]);
- tcg_debug_assert(args[3] <= UINT8_MAX);
- tcg_out8(s, args[3]);
- tcg_debug_assert(args[4] <= UINT8_MAX);
- tcg_out8(s, args[4]);
- break;
- case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
- case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
- case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
- case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
- TODO();
- break;
- case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
- case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
- TODO();
- break;
- case INDEX_op_brcond_i64:
- tcg_out_r(s, args[0]);
- tcg_out_ri64(s, const_args[1], args[1]);
- tcg_out8(s, args[2]); /* condition */
- tci_out_label(s, arg_label(args[3]));
- break;
- case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
- case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
- case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
- case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
- case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
- case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
- case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
- case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
- case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
- case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
- case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
- case INDEX_op_ext_i32_i64:
- case INDEX_op_extu_i32_i64:
-#endif /* TCG_TARGET_REG_BITS == 64 */
- case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
- case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
- case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
- case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
- case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
- case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
- case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
- case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- break;
- case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
- case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
- case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
- case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
- tcg_out_r(s, args[0]);
- tcg_out_ri32(s, const_args[1], args[1]);
- tcg_out_ri32(s, const_args[2], args[2]);
- break;
- case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
- case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
- TODO();
- break;
-#if TCG_TARGET_REG_BITS == 32
- case INDEX_op_add2_i32:
- case INDEX_op_sub2_i32:
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_r(s, args[2]);
- tcg_out_r(s, args[3]);
- tcg_out_r(s, args[4]);
- tcg_out_r(s, args[5]);
- break;
- case INDEX_op_brcond2_i32:
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_ri32(s, const_args[2], args[2]);
- tcg_out_ri32(s, const_args[3], args[3]);
- tcg_out8(s, args[4]); /* condition */
- tci_out_label(s, arg_label(args[5]));
- break;
- case INDEX_op_mulu2_i32:
- tcg_out_r(s, args[0]);
- tcg_out_r(s, args[1]);
- tcg_out_r(s, args[2]);
- tcg_out_r(s, args[3]);
- break;
-#endif
- case INDEX_op_brcond_i32:
- tcg_out_r(s, args[0]);
- tcg_out_ri32(s, const_args[1], args[1]);
- tcg_out8(s, args[2]); /* condition */
- tci_out_label(s, arg_label(args[3]));
- break;
- case INDEX_op_qemu_ld_i32:
- tcg_out_r(s, *args++);
- tcg_out_r(s, *args++);
- if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
- tcg_out_r(s, *args++);
- }
- tcg_out_i(s, *args++);
- break;
- case INDEX_op_qemu_ld_i64:
- tcg_out_r(s, *args++);
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_out_r(s, *args++);
- }
- tcg_out_r(s, *args++);
- if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
- tcg_out_r(s, *args++);
- }
- tcg_out_i(s, *args++);
- break;
- case INDEX_op_qemu_st_i32:
- tcg_out_r(s, *args++);
- tcg_out_r(s, *args++);
- if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
- tcg_out_r(s, *args++);
- }
- tcg_out_i(s, *args++);
- break;
- case INDEX_op_qemu_st_i64:
- tcg_out_r(s, *args++);
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_out_r(s, *args++);
- }
- tcg_out_r(s, *args++);
- if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
- tcg_out_r(s, *args++);
- }
- tcg_out_i(s, *args++);
- break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
- case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
- case INDEX_op_movi_i64:
- case INDEX_op_call: /* Always emitted via tcg_out_call. */
- default:
- tcg_abort();
- }
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
-}
-
-static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
- intptr_t arg2)
-{
- uint8_t *old_code_ptr = s->code_ptr;
- if (type == TCG_TYPE_I32) {
- tcg_out_op_t(s, INDEX_op_st_i32);
- tcg_out_r(s, arg);
- tcg_out_r(s, arg1);
- tcg_out32(s, arg2);
- } else {
- tcg_debug_assert(type == TCG_TYPE_I64);
-#if TCG_TARGET_REG_BITS == 64
- tcg_out_op_t(s, INDEX_op_st_i64);
- tcg_out_r(s, arg);
- tcg_out_r(s, arg1);
- tcg_out32(s, arg2);
-#else
- TODO();
-#endif
- }
- old_code_ptr[1] = s->code_ptr - old_code_ptr;
-}
-
-/* Test if a constant matches the constraint. */
-static int tcg_target_const_match(tcg_target_long val, TCGType type,
- const TCGArgConstraint *arg_ct)
-{
- /* No need to return 0 or 1, 0 or != 0 is good enough. */
- return arg_ct->ct & TCG_CT_CONST;
-}
-
-static void tcg_target_init(TCGContext *s)
-{
-#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
- const char *envval = getenv("DEBUG_TCG");
- if (envval) {
- qemu_set_log(strtol(envval, NULL, 0));
- }
-#endif
-
- /* The current code uses uint8_t for tcg operations. */
- tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
-
- /* Registers available for 32 bit operations. */
- tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
- BIT(TCG_TARGET_NB_REGS) - 1);
- /* Registers available for 64 bit operations. */
- tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
- BIT(TCG_TARGET_NB_REGS) - 1);
- /* TODO: Which registers should be set here? */
- tcg_regset_set32(tcg_target_call_clobber_regs, 0,
- BIT(TCG_TARGET_NB_REGS) - 1);
-
- tcg_regset_clear(s->reserved_regs);
- tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
- tcg_add_target_add_op_defs(tcg_target_op_defs);
-
- /* We use negative offsets from "sp" so that we can distinguish
- stores that might pretend to be call arguments. */
- tcg_set_frame(s, TCG_REG_CALL_STACK,
- -CPU_TEMP_BUF_NLONGS * sizeof(long),
- CPU_TEMP_BUF_NLONGS * sizeof(long));
-}
-
-/* Generate global QEMU prologue and epilogue code. */
-static inline void tcg_target_qemu_prologue(TCGContext *s)
-{
-}