github.com/goccy/go-jit@v0.0.0-20200514131505-ff78d45cf6af/internal/ccall/jit-gen-x86.h (about)

     1  /*
     2   * jit-gen-x86.h: Macros for generating x86 code
     3   *
     4   * Authors:
     5   *   Paolo Molaro (lupus@ximian.com)
     6   *   Intel Corporation (ORP Project)
     7   *   Sergey Chaban (serge@wildwestsoftware.com)
     8   *   Dietmar Maurer (dietmar@ximian.com)
     9   *   Patrik Torstensson
    10   * 
    11   * Copyright (C)  2000 Intel Corporation.  All rights reserved.
    12   * Copyright (C)  2001, 2002 Ximian, Inc.
    13   *
    14   * This file originated with the Mono project (www.go-mono.com), and may
    15   * be redistributed under the terms of the Lesser General Public License.
    16   */
    17  
    18  #ifndef JIT_GEN_X86_H
    19  #define JIT_GEN_X86_H
    20  #define	jit_assert(x)	if (!(x)) break
    21  /*
    22  // x86 register numbers
    23  */
    24  typedef enum {
    25  	X86_EAX = 0,
    26  	X86_ECX = 1,
    27  	X86_EDX = 2,
    28  	X86_EBX = 3,
    29  	X86_ESP = 4,
    30  	X86_EBP = 5,
    31  	X86_ESI = 6,
    32  	X86_EDI = 7,
    33  	X86_NREG
    34  } X86_Reg_No;
    35  /*
    36  // opcodes for alu instructions
    37  */
    38  typedef enum {
    39  	X86_ADD = 0,
    40  	X86_OR  = 1,
    41  	X86_ADC = 2,
    42  	X86_SBB = 3,
    43  	X86_AND = 4,
    44  	X86_SUB = 5,
    45  	X86_XOR = 6,
    46  	X86_CMP = 7,
    47  	X86_NALU
    48  } X86_ALU_Opcode;
    49  /*
    50  // opcodes for shift instructions
    51  */
    52  typedef enum {
    53  	X86_SHLD,
    54  	X86_SHLR,
    55  	X86_ROL = 0,
    56  	X86_ROR = 1,
    57  	X86_RCL = 2,
    58  	X86_RCR = 3,
    59  	X86_SHL = 4,
    60  	X86_SHR = 5,
    61  	X86_SAR = 7,
    62  	X86_NSHIFT = 8
    63  } X86_Shift_Opcode;
    64  /*
    65  // opcodes for floating-point instructions
    66  */
    67  typedef enum {
    68  	X86_FADD  = 0,
    69  	X86_FMUL  = 1,
    70  	X86_FCOM  = 2,
    71  	X86_FCOMP = 3,
    72  	X86_FSUB  = 4,
    73  	X86_FSUBR = 5,
    74  	X86_FDIV  = 6,
    75  	X86_FDIVR = 7,
    76  	X86_NFP   = 8
    77  } X86_FP_Opcode;
    78  /*
    79  // integer conditions codes
    80  */
    81  typedef enum {
    82  	X86_CC_EQ = 0, X86_CC_E = 0, X86_CC_Z = 0,
    83  	X86_CC_NE = 1, X86_CC_NZ = 1,
    84  	X86_CC_LT = 2, X86_CC_B = 2, X86_CC_C = 2, X86_CC_NAE = 2,
    85  	X86_CC_LE = 3, X86_CC_BE = 3, X86_CC_NA = 3,
    86  	X86_CC_GT = 4, X86_CC_A = 4, X86_CC_NBE = 4,
    87  	X86_CC_GE = 5, X86_CC_AE = 5, X86_CC_NB = 5, X86_CC_NC = 5,
    88  	X86_CC_LZ = 6, X86_CC_S = 6,
    89  	X86_CC_GEZ = 7, X86_CC_NS = 7,
    90  	X86_CC_P = 8, X86_CC_PE = 8,
    91  	X86_CC_NP = 9, X86_CC_PO = 9,
    92  	X86_CC_O = 10,
    93  	X86_CC_NO = 11,
    94  	X86_NCC
    95  } X86_CC;
    96  
    97  /* FP status */
    98  enum {
    99  	X86_FP_C0 = 0x100,
   100  	X86_FP_C1 = 0x200,
   101  	X86_FP_C2 = 0x400,
   102  	X86_FP_C3 = 0x4000,
   103  	X86_FP_CC_MASK = 0x4500
   104  };
   105  
   106  /* FP control word */
   107  enum {
   108  	X86_FPCW_INVOPEX_MASK = 0x1,
   109  	X86_FPCW_DENOPEX_MASK = 0x2,
   110  	X86_FPCW_ZERODIV_MASK = 0x4,
   111  	X86_FPCW_OVFEX_MASK   = 0x8,
   112  	X86_FPCW_UNDFEX_MASK  = 0x10,
   113  	X86_FPCW_PRECEX_MASK  = 0x20,
   114  	X86_FPCW_PRECC_MASK   = 0x300,
   115  	X86_FPCW_ROUNDC_MASK  = 0xc00,
   116  
   117  	/* values for precision control */
   118  	X86_FPCW_PREC_SINGLE    = 0,
   119  	X86_FPCW_PREC_DOUBLE    = 0x200,
   120  	X86_FPCW_PREC_EXTENDED  = 0x300,
   121  
   122  	/* values for rounding control */
   123  	X86_FPCW_ROUND_NEAREST  = 0,
   124  	X86_FPCW_ROUND_DOWN     = 0x400,
   125  	X86_FPCW_ROUND_UP       = 0x800,
   126  	X86_FPCW_ROUND_TOZERO   = 0xc00
   127  };
   128  
   129  /*
   130  // prefix code
   131  */
   132  typedef enum {
   133  	X86_LOCK_PREFIX = 0xF0,
   134  	X86_REPNZ_PREFIX = 0xF2,
   135  	X86_REPZ_PREFIX = 0xF3, 
   136  	X86_REP_PREFIX = 0xF3,
   137  	X86_CS_PREFIX = 0x2E,
   138  	X86_SS_PREFIX = 0x36,
   139  	X86_DS_PREFIX = 0x3E,
   140  	X86_ES_PREFIX = 0x26,
   141  	X86_FS_PREFIX = 0x64,
   142  	X86_GS_PREFIX = 0x65,
   143  	X86_UNLIKELY_PREFIX = 0x2E,
   144  	X86_LIKELY_PREFIX = 0x3E,
   145  	X86_OPERAND_PREFIX = 0x66,
   146  	X86_ADDRESS_PREFIX = 0x67
   147  } X86_Prefix;
   148  
   149  static const unsigned char 
   150  x86_cc_unsigned_map [X86_NCC] = {
   151  	0x74, /* eq  */
   152  	0x75, /* ne  */
   153  	0x72, /* lt  */
   154  	0x76, /* le  */
   155  	0x77, /* gt  */
   156  	0x73, /* ge  */
   157  	0x78, /* lz  */
   158  	0x79, /* gez */
   159  	0x7a, /* p   */
   160  	0x7b, /* np  */
   161  	0x70, /* o  */
   162  	0x71, /* no  */
   163  };
   164  
   165  static const unsigned char 
   166  x86_cc_signed_map [X86_NCC] = {
   167  	0x74, /* eq  */
   168  	0x75, /* ne  */
   169  	0x7c, /* lt  */
   170  	0x7e, /* le  */
   171  	0x7f, /* gt  */
   172  	0x7d, /* ge  */
   173  	0x78, /* lz  */
   174  	0x79, /* gez */
   175  	0x7a, /* p   */
   176  	0x7b, /* np  */
   177  	0x70, /* o  */
   178  	0x71, /* no  */
   179  };
   180  
   181  typedef union {
   182  	int val;
   183  	unsigned char b [4];
   184  } x86_imm_buf;
   185  
   186  #define X86_NOBASEREG (-1)
   187  
   188  /*
   189  // bitvector mask for callee-saved registers
   190  */
   191  #define X86_ESI_MASK (1<<X86_ESI)
   192  #define X86_EDI_MASK (1<<X86_EDI)
   193  #define X86_EBX_MASK (1<<X86_EBX)
   194  #define X86_EBP_MASK (1<<X86_EBP)
   195  
   196  #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
   197  #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
   198  #define X86_BYTE_REGS   ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
   199  
   200  #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
   201  #define X86_IS_CALLEE(reg)  (X86_CALLEE_REGS & (1 << (reg))) 	/* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
   202  
   203  #define X86_IS_BYTE_REG(reg) ((reg) < 4)
   204  
   205  /*
   206  // Frame structure:
   207  //
   208  //      +--------------------------------+
   209  //      | in_arg[0]       = var[0]	     |
   210  //      | in_arg[1]	      = var[1]	     |
   211  //      |	      . . .			         |
   212  //      | in_arg[n_arg-1] = var[n_arg-1] |
   213  //      +--------------------------------+
   214  //      |       return IP                |
   215  //      +--------------------------------+
   216  //      |       saved EBP                | <-- frame pointer (EBP)
   217  //      +--------------------------------+
   218  //      |            ...                 |  n_extra
   219  //      +--------------------------------+
   220  //      |	    var[n_arg]	             |
   221  //      |	    var[n_arg+1]             |  local variables area
   222  //      |          . . .                 |
   223  //      |	    var[n_var-1]             | 
   224  //      +--------------------------------+
   225  //      |			                     |
   226  //      |			                     |  
   227  //      |		spill area               | area for spilling mimic stack
   228  //      |			                     |
   229  //      +--------------------------------|
   230  //      |          ebx                   |
   231  //      |          ebp [ESP_Frame only]  |
   232  //      |	       esi                   |  0..3 callee-saved regs
   233  //      |          edi                   | <-- stack pointer (ESP)
   234  //      +--------------------------------+
   235  //      |	stk0	                     |
   236  //      |	stk1	                     |  operand stack area/
   237  //      |	. . .	                     |  out args
   238  //      |	stkn-1	                     |
   239  //      +--------------------------------|
   240  //
   241  //
   242  */
   243  
   244  
   245  /*
   246   * useful building blocks
   247   */
   248  #define x86_modrm_mod(modrm) ((modrm) >> 6)
   249  #define x86_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
   250  #define x86_modrm_rm(modrm) ((modrm) & 0x7)
   251  
   252  #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
   253  #define x86_imm_emit32(inst,imm)     \
   254  	do {	\
   255  			x86_imm_buf imb; imb.val = (int) (imm);	\
   256  			*(inst)++ = imb.b [0];	\
   257  			*(inst)++ = imb.b [1];	\
   258  			*(inst)++ = imb.b [2];	\
   259  			*(inst)++ = imb.b [3];	\
   260  	} while (0)
   261  #define x86_imm_emit16(inst,imm)     do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
   262  #define x86_imm_emit8(inst,imm)      do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
   263  #define x86_is_imm8(imm)             (((jit_nint)(imm) >= -128 && (jit_nint)(imm) <= 127))
   264  #define x86_is_imm16(imm)            (((jit_nint)(imm) >= -(1<<16) && (jit_nint)(imm) <= ((1<<16)-1)))
   265  
   266  #define x86_reg_emit(inst,r,regno)   do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
   267  #define x86_reg8_emit(inst,r,regno,is_rh,is_rnoh)   do {x86_address_byte ((inst), 3, (is_rh)?((r)|4):(r), (is_rnoh)?((regno)|4):(regno));} while (0)
   268  #define x86_regp_emit(inst,r,regno)  do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
   269  #define x86_mem_emit(inst,r,disp)    do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
   270  
   271  #define x86_membase_emit(inst,r,basereg,disp)	do {\
   272  	if ((basereg) == X86_ESP) {	\
   273  		if ((disp) == 0) {	\
   274  			x86_address_byte ((inst), 0, (r), X86_ESP);	\
   275  			x86_address_byte ((inst), 0, X86_ESP, X86_ESP);	\
   276  		} else if (x86_is_imm8((disp))) {	\
   277  			x86_address_byte ((inst), 1, (r), X86_ESP);	\
   278  			x86_address_byte ((inst), 0, X86_ESP, X86_ESP);	\
   279  			x86_imm_emit8 ((inst), (disp));	\
   280  		} else {	\
   281  			x86_address_byte ((inst), 2, (r), X86_ESP);	\
   282  			x86_address_byte ((inst), 0, X86_ESP, X86_ESP);	\
   283  			x86_imm_emit32 ((inst), (disp));	\
   284  		}	\
   285  		break;	\
   286  	}	\
   287  	if ((disp) == 0 && (basereg) != X86_EBP) {	\
   288  		x86_address_byte ((inst), 0, (r), (basereg));	\
   289  		break;	\
   290  	}	\
   291  	if (x86_is_imm8((disp))) {	\
   292  		x86_address_byte ((inst), 1, (r), (basereg));	\
   293  		x86_imm_emit8 ((inst), (disp));	\
   294  	} else {	\
   295  		x86_address_byte ((inst), 2, (r), (basereg));	\
   296  		x86_imm_emit32 ((inst), (disp));	\
   297  	}	\
   298  	} while (0)
   299  
   300  #define x86_memindex_emit(inst,r,basereg,disp,indexreg,shift)	\
   301  	do {	\
   302  		if ((basereg) == X86_NOBASEREG) {	\
   303  			x86_address_byte ((inst), 0, (r), 4);	\
   304  			x86_address_byte ((inst), (shift), (indexreg), 5);	\
   305  			x86_imm_emit32 ((inst), (disp));	\
   306  		} else if ((disp) == 0 && (basereg) != X86_EBP) {	\
   307  			x86_address_byte ((inst), 0, (r), 4);	\
   308  			x86_address_byte ((inst), (shift), (indexreg), (basereg));	\
   309  		} else if (x86_is_imm8((disp))) {	\
   310  			x86_address_byte ((inst), 1, (r), 4);	\
   311  			x86_address_byte ((inst), (shift), (indexreg), (basereg));	\
   312  			x86_imm_emit8 ((inst), (disp));	\
   313  		} else {	\
   314  			x86_address_byte ((inst), 2, (r), 4);	\
   315  			x86_address_byte ((inst), (shift), (indexreg), (basereg));	\
   316  			x86_imm_emit32 ((inst), (disp));	\
   317  		}	\
   318  	} while (0)
   319  
   320  /*
   321   * target is the position in the code where to jump to:
   322   * target = code;
   323   * .. output loop code...
   324   * x86_mov_reg_imm (code, X86_EAX, 0);
   325   * loop = code;
   326   * x86_loop (code, -1);
   327   * ... finish method
   328   *
   329   * patch displacement
   330   * x86_patch (loop, target);
   331   *
   332   * ins should point at the start of the instruction that encodes a target.
   333   * the instruction is inspected for validity and the correct displacement
   334   * is inserted.
   335   */
   336  #define x86_patch(ins,target)	\
   337  	do {	\
   338  		unsigned char* pos = (ins) + 1;	\
   339  		int disp, size = 0;	\
   340  		switch (*(unsigned char*)(ins)) {	\
   341  		case 0xe8: case 0xe9: ++size; break; /* call, jump32 */	\
   342  		case 0x0f: if (!(*pos >= 0x70 && *pos <= 0x8f)) jit_assert (0);	\
   343  		   ++size; ++pos; break; /* prefix for 32-bit disp */	\
   344  		case 0xe0: case 0xe1: case 0xe2: /* loop */	\
   345  		case 0xeb: /* jump8 */	\
   346  		/* conditional jump opcodes */	\
   347  		case 0x70: case 0x71: case 0x72: case 0x73:	\
   348  		case 0x74: case 0x75: case 0x76: case 0x77:	\
   349  		case 0x78: case 0x79: case 0x7a: case 0x7b:	\
   350  		case 0x7c: case 0x7d: case 0x7e: case 0x7f:	\
   351  			break;	\
   352  		default: jit_assert (0);	\
   353  		}	\
   354  		disp = (target) - pos;	\
   355  		if (size) x86_imm_emit32 (pos, disp - 4);	\
   356  		else if (x86_is_imm8 (disp - 1)) x86_imm_emit8 (pos, disp - 1);	\
   357  		else jit_assert (0);	\
   358  	} while (0)
   359  
   360  #define x86_breakpoint(inst) \
   361  	do {	\
   362  		*(inst)++ = 0xcc;	\
   363  	} while (0)
   364  
   365  #define x86_cld(inst) do { *(inst)++ =(unsigned char)0xfc; } while (0)
   366  #define x86_stosb(inst) do { *(inst)++ =(unsigned char)0xaa; } while (0)
   367  #define x86_stosl(inst) do { *(inst)++ =(unsigned char)0xab; } while (0)
   368  #define x86_stosd(inst) x86_stosl((inst))
   369  #define x86_movsb(inst) do { *(inst)++ =(unsigned char)0xa4; } while (0)
   370  #define x86_movsl(inst) do { *(inst)++ =(unsigned char)0xa5; } while (0)
   371  #define x86_movsd(inst) x86_movsl((inst))
   372  
   373  #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
   374  
   375  #define x86_rdtsc(inst) \
   376  	do {	\
   377  		*(inst)++ = 0x0f;	\
   378  		*(inst)++ = 0x31;	\
   379  	} while (0)
   380  
   381  #define x86_cmpxchg_reg_reg(inst,dreg,reg)	\
   382  	do {	\
   383  		*(inst)++ = (unsigned char)0x0f;	\
   384  		*(inst)++ = (unsigned char)0xb1;	\
   385  		x86_reg_emit ((inst), (reg), (dreg));	\
   386  	} while (0)
   387  	
   388  #define x86_cmpxchg_mem_reg(inst,mem,reg)	\
   389  	do {	\
   390  		*(inst)++ = (unsigned char)0x0f;	\
   391  		*(inst)++ = (unsigned char)0xb1;	\
   392  		x86_mem_emit ((inst), (reg), (mem));	\
   393  	} while (0)
   394  	
   395  #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg)	\
   396  	do {	\
   397  		*(inst)++ = (unsigned char)0x0f;	\
   398  		*(inst)++ = (unsigned char)0xb1;	\
   399  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   400  	} while (0)
   401  
   402  #define x86_xchg_reg_reg(inst,dreg,reg,size)	\
   403  	do {	\
   404  		if ((size) == 1)	\
   405  			*(inst)++ = (unsigned char)0x86;	\
   406  		else	\
   407  			*(inst)++ = (unsigned char)0x87;	\
   408  		x86_reg_emit ((inst), (reg), (dreg));	\
   409  	} while (0)
   410  
   411  #define x86_xchg_mem_reg(inst,mem,reg,size)	\
   412  	do {	\
   413  		if ((size) == 1)	\
   414  			*(inst)++ = (unsigned char)0x86;	\
   415  		else	\
   416  			*(inst)++ = (unsigned char)0x87;	\
   417  		x86_mem_emit ((inst), (reg), (mem));	\
   418  	} while (0)
   419  
   420  #define x86_xchg_membase_reg(inst,basereg,disp,reg,size)	\
   421  	do {	\
   422  		if ((size) == 1)	\
   423  			*(inst)++ = (unsigned char)0x86;	\
   424  		else	\
   425  			*(inst)++ = (unsigned char)0x87;	\
   426  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   427  	} while (0)
   428  
   429  #define x86_xadd_reg_reg(inst,dreg,reg,size)	\
   430  	do {	\
   431  		*(inst)++ = (unsigned char)0x0F;     \
   432  		if ((size) == 1)	\
   433  			*(inst)++ = (unsigned char)0xC0;	\
   434  		else	\
   435  			*(inst)++ = (unsigned char)0xC1;	\
   436  		x86_reg_emit ((inst), (reg), (dreg));	\
   437  	} while (0)
   438  
   439  #define x86_xadd_mem_reg(inst,mem,reg,size)	\
   440  	do {	\
   441  		*(inst)++ = (unsigned char)0x0F;     \
   442  		if ((size) == 1)	\
   443  			*(inst)++ = (unsigned char)0xC0;	\
   444  		else	\
   445  			*(inst)++ = (unsigned char)0xC1;	\
   446  		x86_mem_emit ((inst), (reg), (mem));	\
   447  	} while (0)
   448  
   449  #define x86_xadd_membase_reg(inst,basereg,disp,reg,size)	\
   450  	do {	\
   451  		*(inst)++ = (unsigned char)0x0F;     \
   452  		if ((size) == 1)	\
   453  			*(inst)++ = (unsigned char)0xC0;	\
   454  		else	\
   455  			*(inst)++ = (unsigned char)0xC1;	\
   456  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   457  	} while (0)
   458  
   459  #define x86_inc_mem(inst,mem)	\
   460  	do {	\
   461  		*(inst)++ = (unsigned char)0xff;	\
   462  		x86_mem_emit ((inst), 0, (mem)); 	\
   463  	} while (0)
   464  
   465  #define x86_inc_membase(inst,basereg,disp)	\
   466  	do {	\
   467  		*(inst)++ = (unsigned char)0xff;	\
   468  		x86_membase_emit ((inst), 0, (basereg), (disp));	\
   469  	} while (0)
   470  
   471  #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
   472  
   473  #define x86_dec_mem(inst,mem)	\
   474  	do {	\
   475  		*(inst)++ = (unsigned char)0xff;	\
   476  		x86_mem_emit ((inst), 1, (mem));	\
   477  	} while (0)
   478  
   479  #define x86_dec_membase(inst,basereg,disp)	\
   480  	do {	\
   481  		*(inst)++ = (unsigned char)0xff;	\
   482  		x86_membase_emit ((inst), 1, (basereg), (disp));	\
   483  	} while (0)
   484  
   485  #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
   486  
   487  #define x86_not_mem(inst,mem)	\
   488  	do {	\
   489  		*(inst)++ = (unsigned char)0xf7;	\
   490  		x86_mem_emit ((inst), 2, (mem));	\
   491  	} while (0)
   492  
   493  #define x86_not_membase(inst,basereg,disp)	\
   494  	do {	\
   495  		*(inst)++ = (unsigned char)0xf7;	\
   496  		x86_membase_emit ((inst), 2, (basereg), (disp));	\
   497  	} while (0)
   498  
   499  #define x86_not_reg(inst,reg)	\
   500  	do {	\
   501  		*(inst)++ = (unsigned char)0xf7;	\
   502  		x86_reg_emit ((inst), 2, (reg));	\
   503  	} while (0)
   504  
   505  #define x86_neg_mem(inst,mem)	\
   506  	do {	\
   507  		*(inst)++ = (unsigned char)0xf7;	\
   508  		x86_mem_emit ((inst), 3, (mem));	\
   509  	} while (0)
   510  
   511  #define x86_neg_membase(inst,basereg,disp)	\
   512  	do {	\
   513  		*(inst)++ = (unsigned char)0xf7;	\
   514  		x86_membase_emit ((inst), 3, (basereg), (disp));	\
   515  	} while (0)
   516  
   517  #define x86_neg_reg(inst,reg)	\
   518  	do {	\
   519  		*(inst)++ = (unsigned char)0xf7;	\
   520  		x86_reg_emit ((inst), 3, (reg));	\
   521  	} while (0)
   522  
   523  #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
   524  
   525  #define x86_alu_reg_imm(inst,opc,reg,imm) 	\
   526  	do {	\
   527  		if ((reg) == X86_EAX) {	\
   528  			*(inst)++ = (((unsigned char)(opc)) << 3) + 5;	\
   529  			x86_imm_emit32 ((inst), (imm));	\
   530  			break;	\
   531  		}	\
   532  		if (x86_is_imm8((imm))) {	\
   533  			*(inst)++ = (unsigned char)0x83;	\
   534  			x86_reg_emit ((inst), (opc), (reg));	\
   535  			x86_imm_emit8 ((inst), (imm));	\
   536  		} else {	\
   537  			*(inst)++ = (unsigned char)0x81;	\
   538  			x86_reg_emit ((inst), (opc), (reg));	\
   539  			x86_imm_emit32 ((inst), (imm));	\
   540  		}	\
   541  	} while (0)
   542  
   543  #define x86_alu_reg16_imm(inst,opc,reg,imm) 	\
   544  	do {	\
   545  		*(inst)++ = (unsigned char)0x66;	\
   546  		if ((reg) == X86_EAX) {	\
   547  			*(inst)++ = (((unsigned char)(opc)) << 3) + 5;	\
   548  			x86_imm_emit16 ((inst), (imm));	\
   549  			break;	\
   550  		}	\
   551  		if (x86_is_imm8((imm))) {	\
   552  			*(inst)++ = (unsigned char)0x83;	\
   553  			x86_reg_emit ((inst), (opc), (reg));	\
   554  			x86_imm_emit8 ((inst), (imm));	\
   555  		} else {	\
   556  			*(inst)++ = (unsigned char)0x81;	\
   557  			x86_reg_emit ((inst), (opc), (reg));	\
   558  			x86_imm_emit16 ((inst), (imm));	\
   559  		}	\
   560  	} while (0)
   561  
   562  #define x86_alu_mem_imm(inst,opc,mem,imm) 	\
   563  	do {	\
   564  		if (x86_is_imm8((imm))) {	\
   565  			*(inst)++ = (unsigned char)0x83;	\
   566  			x86_mem_emit ((inst), (opc), (mem));	\
   567  			x86_imm_emit8 ((inst), (imm));	\
   568  		} else {	\
   569  			*(inst)++ = (unsigned char)0x81;	\
   570  			x86_mem_emit ((inst), (opc), (mem));	\
   571  			x86_imm_emit32 ((inst), (imm));	\
   572  		}	\
   573  	} while (0)
   574  
   575  #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) 	\
   576  	do {	\
   577  		if (x86_is_imm8((imm))) {	\
   578  			*(inst)++ = (unsigned char)0x83;	\
   579  			x86_membase_emit ((inst), (opc), (basereg), (disp));	\
   580  			x86_imm_emit8 ((inst), (imm));	\
   581  		} else {	\
   582  			*(inst)++ = (unsigned char)0x81;	\
   583  			x86_membase_emit ((inst), (opc), (basereg), (disp));	\
   584  			x86_imm_emit32 ((inst), (imm));	\
   585  		}	\
   586  	} while (0)
   587  	
   588  #define x86_alu_membase8_imm(inst,opc,basereg,disp,imm) 	\
   589  	do {	\
   590  		*(inst)++ = (unsigned char)0x80;	\
   591  		x86_membase_emit ((inst), (opc), (basereg), (disp));	\
   592  		x86_imm_emit8 ((inst), (imm)); \
   593  	} while (0)
   594  
   595  #define x86_alu_mem_reg(inst,opc,mem,reg)	\
   596  	do {	\
   597  		*(inst)++ = (((unsigned char)(opc)) << 3) + 1;	\
   598  		x86_mem_emit ((inst), (reg), (mem));	\
   599  	} while (0)
   600  
   601  #define x86_alu_membase_reg(inst,opc,basereg,disp,reg)	\
   602  	do {	\
   603  		*(inst)++ = (((unsigned char)(opc)) << 3) + 1;	\
   604  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   605  	} while (0)
   606  
   607  #define x86_alu_reg_reg(inst,opc,dreg,reg)	\
   608  	do {	\
   609  		*(inst)++ = (((unsigned char)(opc)) << 3) + 3;	\
   610  		x86_reg_emit ((inst), (dreg), (reg));	\
   611  	} while (0)
   612  
   613  /**
   614   * @x86_alu_reg8_reg8:
   615   * Supports ALU operations between two 8-bit registers.
   616   * dreg := dreg opc reg
   617   * X86_Reg_No enum is used to specify the registers.
   618   * Additionally is_*_h flags are used to specify what part
   619   * of a given 32-bit register is used - high (TRUE) or low (FALSE).
   620   * For example: dreg = X86_EAX, is_dreg_h = TRUE -> use AH
   621   */
   622  #define x86_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h)	\
   623  	do {	\
   624  		*(inst)++ = (((unsigned char)(opc)) << 3) + 2;	\
   625  		x86_reg8_emit ((inst), (dreg), (reg), (is_dreg_h), (is_reg_h));	\
   626  	} while (0)
   627  
   628  #define x86_alu_reg_mem(inst,opc,reg,mem)	\
   629  	do {	\
   630  		*(inst)++ = (((unsigned char)(opc)) << 3) + 3;	\
   631  		x86_mem_emit ((inst), (reg), (mem));	\
   632  	} while (0)
   633  
   634  #define x86_alu_reg_membase(inst,opc,reg,basereg,disp)	\
   635  	do {	\
   636  		*(inst)++ = (((unsigned char)(opc)) << 3) + 3;	\
   637  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   638  	} while (0)
   639  
   640  #define x86_test_reg_imm(inst,reg,imm)	\
   641  	do {	\
   642  		if ((reg) == X86_EAX) {	\
   643  			*(inst)++ = (unsigned char)0xa9;	\
   644  		} else {	\
   645  			*(inst)++ = (unsigned char)0xf7;	\
   646  			x86_reg_emit ((inst), 0, (reg));	\
   647  		}	\
   648  		x86_imm_emit32 ((inst), (imm));	\
   649  	} while (0)
   650  
   651  #define x86_test_mem_imm(inst,mem,imm)	\
   652  	do {	\
   653  		*(inst)++ = (unsigned char)0xf7;	\
   654  		x86_mem_emit ((inst), 0, (mem));	\
   655  		x86_imm_emit32 ((inst), (imm));	\
   656  	} while (0)
   657  
   658  #define x86_test_membase_imm(inst,basereg,disp,imm)	\
   659  	do {	\
   660  		*(inst)++ = (unsigned char)0xf7;	\
   661  		x86_membase_emit ((inst), 0, (basereg), (disp));	\
   662  		x86_imm_emit32 ((inst), (imm));	\
   663  	} while (0)
   664  
   665  #define x86_test_reg_reg(inst,dreg,reg)	\
   666  	do {	\
   667  		*(inst)++ = (unsigned char)0x85;	\
   668  		x86_reg_emit ((inst), (reg), (dreg));	\
   669  	} while (0)
   670  
   671  #define x86_test_mem_reg(inst,mem,reg)	\
   672  	do {	\
   673  		*(inst)++ = (unsigned char)0x85;	\
   674  		x86_mem_emit ((inst), (reg), (mem));	\
   675  	} while (0)
   676  
   677  #define x86_test_membase_reg(inst,basereg,disp,reg)	\
   678  	do {	\
   679  		*(inst)++ = (unsigned char)0x85;	\
   680  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   681  	} while (0)
   682  
   683  #define x86_shift_reg_imm(inst,opc,reg,imm)	\
   684  	do {	\
   685  		if ((imm) == 1) {	\
   686  			*(inst)++ = (unsigned char)0xd1;	\
   687  			x86_reg_emit ((inst), (opc), (reg));	\
   688  		} else {	\
   689  			*(inst)++ = (unsigned char)0xc1;	\
   690  			x86_reg_emit ((inst), (opc), (reg));	\
   691  			x86_imm_emit8 ((inst), (imm));	\
   692  		}	\
   693  	} while (0)
   694  
   695  #define x86_shift_mem_imm(inst,opc,mem,imm)	\
   696  	do {	\
   697  		if ((imm) == 1) {	\
   698  			*(inst)++ = (unsigned char)0xd1;	\
   699  			x86_mem_emit ((inst), (opc), (mem));	\
   700  		} else {	\
   701  			*(inst)++ = (unsigned char)0xc1;	\
   702  			x86_mem_emit ((inst), (opc), (mem));	\
   703  			x86_imm_emit8 ((inst), (imm));	\
   704  		}	\
   705  	} while (0)
   706  
   707  #define x86_shift_membase_imm(inst,opc,basereg,disp,imm)	\
   708  	do {	\
   709  		if ((imm) == 1) {	\
   710  			*(inst)++ = (unsigned char)0xd1;	\
   711  			x86_membase_emit ((inst), (opc), (basereg), (disp));	\
   712  		} else {	\
   713  			*(inst)++ = (unsigned char)0xc1;	\
   714  			x86_membase_emit ((inst), (opc), (basereg), (disp));	\
   715  			x86_imm_emit8 ((inst), (imm));	\
   716  		}	\
   717  	} while (0)
   718  
   719  #define x86_shift_reg(inst,opc,reg)	\
   720  	do {	\
   721  		*(inst)++ = (unsigned char)0xd3;	\
   722  		x86_reg_emit ((inst), (opc), (reg));	\
   723  	} while (0)
   724  
   725  #define x86_shift_mem(inst,opc,mem)	\
   726  	do {	\
   727  		*(inst)++ = (unsigned char)0xd3;	\
   728  		x86_mem_emit ((inst), (opc), (mem));	\
   729  	} while (0)
   730  
   731  #define x86_shift_membase(inst,opc,basereg,disp)	\
   732  	do {	\
   733  		*(inst)++ = (unsigned char)0xd3;	\
   734  		x86_membase_emit ((inst), (opc), (basereg), (disp));	\
   735  	} while (0)
   736  
   737  /*
   738   * Multi op shift missing.
   739   */
   740  
   741  #define x86_shrd_reg(inst,dreg,reg)                     \
   742          do {                                            \
   743  		*(inst)++ = (unsigned char)0x0f;	\
   744  		*(inst)++ = (unsigned char)0xad;	\
   745  		x86_reg_emit ((inst), (reg), (dreg));	\
   746  	} while (0)
   747  
   748  #define x86_shrd_reg_imm(inst,dreg,reg,shamt)           \
   749          do {                                            \
   750  		*(inst)++ = (unsigned char)0x0f;	\
   751  		*(inst)++ = (unsigned char)0xac;	\
   752  		x86_reg_emit ((inst), (reg), (dreg));	\
   753  		x86_imm_emit8 ((inst), (shamt));	\
   754  	} while (0)
   755  
   756  #define x86_shld_reg(inst,dreg,reg)                     \
   757          do {                                            \
   758  		*(inst)++ = (unsigned char)0x0f;	\
   759  		*(inst)++ = (unsigned char)0xa5;	\
   760  		x86_reg_emit ((inst), (reg), (dreg));	\
   761  	} while (0)
   762  
   763  #define x86_shld_reg_imm(inst,dreg,reg,shamt)           \
   764          do {                                            \
   765  		*(inst)++ = (unsigned char)0x0f;	\
   766  		*(inst)++ = (unsigned char)0xa4;	\
   767  		x86_reg_emit ((inst), (reg), (dreg));	\
   768  		x86_imm_emit8 ((inst), (shamt));	\
   769  	} while (0)
   770  
   771  /*
   772   * EDX:EAX = EAX * rm
   773   */
   774  #define x86_mul_reg(inst,reg,is_signed)	\
   775  	do {	\
   776  		*(inst)++ = (unsigned char)0xf7;	\
   777  		x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg));	\
   778  	} while (0)
   779  
   780  #define x86_mul_mem(inst,mem,is_signed)	\
   781  	do {	\
   782  		*(inst)++ = (unsigned char)0xf7;	\
   783  		x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem));	\
   784  	} while (0)
   785  
   786  #define x86_mul_membase(inst,basereg,disp,is_signed)	\
   787  	do {	\
   788  		*(inst)++ = (unsigned char)0xf7;	\
   789  		x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp));	\
   790  	} while (0)
   791  
   792  /*
   793   * r *= rm
   794   */
   795  #define x86_imul_reg_reg(inst,dreg,reg)	\
   796  	do {	\
   797  		*(inst)++ = (unsigned char)0x0f;	\
   798  		*(inst)++ = (unsigned char)0xaf;	\
   799  		x86_reg_emit ((inst), (dreg), (reg));	\
   800  	} while (0)
   801  
   802  #define x86_imul_reg_mem(inst,reg,mem)	\
   803  	do {	\
   804  		*(inst)++ = (unsigned char)0x0f;	\
   805  		*(inst)++ = (unsigned char)0xaf;	\
   806  		x86_mem_emit ((inst), (reg), (mem));	\
   807  	} while (0)
   808  
   809  #define x86_imul_reg_membase(inst,reg,basereg,disp)	\
   810  	do {	\
   811  		*(inst)++ = (unsigned char)0x0f;	\
   812  		*(inst)++ = (unsigned char)0xaf;	\
   813  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   814  	} while (0)
   815  
   816  /*
   817   * dreg = rm * imm
   818   */
   819  #define x86_imul_reg_reg_imm(inst,dreg,reg,imm)	\
   820  	do {	\
   821  		if (x86_is_imm8 ((imm))) {	\
   822  			*(inst)++ = (unsigned char)0x6b;	\
   823  			x86_reg_emit ((inst), (dreg), (reg));	\
   824  			x86_imm_emit8 ((inst), (imm));	\
   825  		} else {	\
   826  			*(inst)++ = (unsigned char)0x69;	\
   827  			x86_reg_emit ((inst), (dreg), (reg));	\
   828  			x86_imm_emit32 ((inst), (imm));	\
   829  		}	\
   830  	} while (0)
   831  
   832  #define x86_imul_reg_mem_imm(inst,reg,mem,imm)	\
   833  	do {	\
   834  		if (x86_is_imm8 ((imm))) {	\
   835  			*(inst)++ = (unsigned char)0x6b;	\
   836  			x86_mem_emit ((inst), (reg), (mem));	\
   837  			x86_imm_emit8 ((inst), (imm));	\
   838  		} else {	\
   839  			*(inst)++ = (unsigned char)0x69;	\
   840  			x86_reg_emit ((inst), (reg), (mem));	\
   841  			x86_imm_emit32 ((inst), (imm));	\
   842  		}	\
   843  	} while (0)
   844  
   845  #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm)	\
   846  	do {	\
   847  		if (x86_is_imm8 ((imm))) {	\
   848  			*(inst)++ = (unsigned char)0x6b;	\
   849  			x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   850  			x86_imm_emit8 ((inst), (imm));	\
   851  		} else {	\
   852  			*(inst)++ = (unsigned char)0x69;	\
   853  			x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   854  			x86_imm_emit32 ((inst), (imm));	\
   855  		}	\
   856  	} while (0)
   857  
   858  /*
   859   * divide EDX:EAX by rm;
   860   * eax = quotient, edx = remainder
   861   */
   862  
   863  #define x86_div_reg(inst,reg,is_signed)	\
   864  	do {	\
   865  		*(inst)++ = (unsigned char)0xf7;	\
   866  		x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg));	\
   867  	} while (0)
   868  
   869  #define x86_div_mem(inst,mem,is_signed)	\
   870  	do {	\
   871  		*(inst)++ = (unsigned char)0xf7;	\
   872  		x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem));	\
   873  	} while (0)
   874  
   875  #define x86_div_membase(inst,basereg,disp,is_signed)	\
   876  	do {	\
   877  		*(inst)++ = (unsigned char)0xf7;	\
   878  		x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp));	\
   879  	} while (0)
   880  
   881  #define x86_mov_mem_reg(inst,mem,reg,size)	\
   882  	do {	\
   883  		switch ((size)) {	\
   884  		case 1: *(inst)++ = (unsigned char)0x88; break;	\
   885  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   886  		case 4: *(inst)++ = (unsigned char)0x89; break;	\
   887  		default: jit_assert (0);	\
   888  		}	\
   889  		x86_mem_emit ((inst), (reg), (mem));	\
   890  	} while (0)
   891  
   892  #define x86_mov_regp_reg(inst,regp,reg,size)	\
   893  	do {	\
   894  		switch ((size)) {	\
   895  		case 1: *(inst)++ = (unsigned char)0x88; break;	\
   896  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   897  		case 4: *(inst)++ = (unsigned char)0x89; break;	\
   898  		default: jit_assert (0);	\
   899  		}	\
   900  		x86_regp_emit ((inst), (reg), (regp));	\
   901  	} while (0)
   902  
   903  #define x86_mov_membase_reg(inst,basereg,disp,reg,size)	\
   904  	do {	\
   905  		switch ((size)) {	\
   906  		case 1: *(inst)++ = (unsigned char)0x88; break;	\
   907  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   908  		case 4: *(inst)++ = (unsigned char)0x89; break;	\
   909  		default: jit_assert (0);	\
   910  		}	\
   911  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   912  	} while (0)
   913  
   914  #define x86_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size)	\
   915  	do {	\
   916  		switch ((size)) {	\
   917  		case 1: *(inst)++ = (unsigned char)0x88; break;	\
   918  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   919  		case 4: *(inst)++ = (unsigned char)0x89; break;	\
   920  		default: jit_assert (0);	\
   921  		}	\
   922  		x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift));	\
   923  	} while (0)
   924  
   925  #define x86_mov_reg_reg(inst,dreg,reg,size)	\
   926  	do {	\
   927  		switch ((size)) {	\
   928  		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
   929  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   930  		case 4: *(inst)++ = (unsigned char)0x8b; break;	\
   931  		default: jit_assert (0);	\
   932  		}	\
   933  		x86_reg_emit ((inst), (dreg), (reg));	\
   934  	} while (0)
   935  
   936  #define x86_mov_reg_mem(inst,reg,mem,size)	\
   937  	do {	\
   938  		switch ((size)) {	\
   939  		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
   940  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   941  		case 4: *(inst)++ = (unsigned char)0x8b; break;	\
   942  		default: jit_assert (0);	\
   943  		}	\
   944  		x86_mem_emit ((inst), (reg), (mem));	\
   945  	} while (0)
   946  
   947  #define x86_mov_reg_membase(inst,reg,basereg,disp,size)	\
   948  	do {	\
   949  		switch ((size)) {	\
   950  		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
   951  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   952  		case 4: *(inst)++ = (unsigned char)0x8b; break;	\
   953  		default: jit_assert (0);	\
   954  		}	\
   955  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
   956  	} while (0)
   957  
   958  #define x86_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size)	\
   959  	do {	\
   960  		switch ((size)) {	\
   961  		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
   962  		case 2: *(inst)++ = (unsigned char)0x66; /* fall through */	\
   963  		case 4: *(inst)++ = (unsigned char)0x8b; break;	\
   964  		default: jit_assert (0);	\
   965  		}	\
   966  		x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift));	\
   967  	} while (0)
   968  
   969  /*
   970   * Note: x86_clear_reg () chacnges the condition code!
   971   */
   972  #define x86_clear_reg(inst,reg) x86_alu_reg_reg((inst), X86_XOR, (reg), (reg))
   973  
   974  #define x86_mov_reg_imm(inst,reg,imm)	\
   975  	do {	\
   976  		*(inst)++ = (unsigned char)0xb8 + (reg);	\
   977  		x86_imm_emit32 ((inst), (imm));	\
   978  	} while (0)
   979  
   980  #define x86_mov_mem_imm(inst,mem,imm,size)	\
   981  	do {	\
   982  		if ((size) == 1) {	\
   983  			*(inst)++ = (unsigned char)0xc6;	\
   984  			x86_mem_emit ((inst), 0, (mem));	\
   985  			x86_imm_emit8 ((inst), (imm));	\
   986  		} else if ((size) == 2) {	\
   987  			*(inst)++ = (unsigned char)0x66;	\
   988  			*(inst)++ = (unsigned char)0xc7;	\
   989  			x86_mem_emit ((inst), 0, (mem));	\
   990  			x86_imm_emit16 ((inst), (imm));	\
   991  		} else {	\
   992  			*(inst)++ = (unsigned char)0xc7;	\
   993  			x86_mem_emit ((inst), 0, (mem));	\
   994  			x86_imm_emit32 ((inst), (imm));	\
   995  		}	\
   996  	} while (0)
   997  
   998  #define x86_mov_membase_imm(inst,basereg,disp,imm,size)	\
   999  	do {	\
  1000  		if ((size) == 1) {	\
  1001  			*(inst)++ = (unsigned char)0xc6;	\
  1002  			x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1003  			x86_imm_emit8 ((inst), (imm));	\
  1004  		} else if ((size) == 2) {	\
  1005  			*(inst)++ = (unsigned char)0x66;	\
  1006  			*(inst)++ = (unsigned char)0xc7;	\
  1007  			x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1008  			x86_imm_emit16 ((inst), (imm));	\
  1009  		} else {	\
  1010  			*(inst)++ = (unsigned char)0xc7;	\
  1011  			x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1012  			x86_imm_emit32 ((inst), (imm));	\
  1013  		}	\
  1014  	} while (0)
  1015  
  1016  #define x86_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size)	\
  1017  	do {	\
  1018  		if ((size) == 1) {	\
  1019  			*(inst)++ = (unsigned char)0xc6;	\
  1020  			x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift));	\
  1021  			x86_imm_emit8 ((inst), (imm));	\
  1022  		} else if ((size) == 2) {	\
  1023  			*(inst)++ = (unsigned char)0x66;	\
  1024  			*(inst)++ = (unsigned char)0xc7;	\
  1025  			x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift));	\
  1026  			x86_imm_emit16 ((inst), (imm));	\
  1027  		} else {	\
  1028  			*(inst)++ = (unsigned char)0xc7;	\
  1029  			x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift));	\
  1030  			x86_imm_emit32 ((inst), (imm));	\
  1031  		}	\
  1032  	} while (0)
  1033  
  1034  #define x86_lea_mem(inst,reg,mem)	\
  1035  	do {	\
  1036  		*(inst)++ = (unsigned char)0x8d;	\
  1037  		x86_mem_emit ((inst), (reg), (mem));	\
  1038  	} while (0)
  1039  
  1040  #define x86_lea_membase(inst,reg,basereg,disp)	\
  1041  	do {	\
  1042  		*(inst)++ = (unsigned char)0x8d;	\
  1043  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
  1044  	} while (0)
  1045  
  1046  #define x86_lea_memindex(inst,reg,basereg,disp,indexreg,shift)	\
  1047  	do {	\
  1048  		*(inst)++ = (unsigned char)0x8d;	\
  1049  		x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift));	\
  1050  	} while (0)
  1051  
  1052  #define x86_widen_reg(inst,dreg,reg,is_signed,is_half)	\
  1053  	do {	\
  1054  		unsigned char op = 0xb6;	\
  1055                  jit_assert (is_half ||  X86_IS_BYTE_REG (reg)); \
  1056  		*(inst)++ = (unsigned char)0x0f;	\
  1057  		if ((is_signed)) op += 0x08;	\
  1058  		if ((is_half)) op += 0x01;	\
  1059  		*(inst)++ = op;	\
  1060  		x86_reg_emit ((inst), (dreg), (reg));	\
  1061  	} while (0)
  1062  
  1063  #define x86_widen_mem(inst,dreg,mem,is_signed,is_half)	\
  1064  	do {	\
  1065  		unsigned char op = 0xb6;	\
  1066  		*(inst)++ = (unsigned char)0x0f;	\
  1067  		if ((is_signed)) op += 0x08;	\
  1068  		if ((is_half)) op += 0x01;	\
  1069  		*(inst)++ = op;	\
  1070  		x86_mem_emit ((inst), (dreg), (mem));	\
  1071  	} while (0)
  1072  
  1073  #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half)	\
  1074  	do {	\
  1075  		unsigned char op = 0xb6;	\
  1076  		*(inst)++ = (unsigned char)0x0f;	\
  1077  		if ((is_signed)) op += 0x08;	\
  1078  		if ((is_half)) op += 0x01;	\
  1079  		*(inst)++ = op;	\
  1080  		x86_membase_emit ((inst), (dreg), (basereg), (disp));	\
  1081  	} while (0)
  1082  
  1083  #define x86_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half)	\
  1084  	do {	\
  1085  		unsigned char op = 0xb6;	\
  1086  		*(inst)++ = (unsigned char)0x0f;	\
  1087  		if ((is_signed)) op += 0x08;	\
  1088  		if ((is_half)) op += 0x01;	\
  1089  		*(inst)++ = op;	\
  1090  		x86_memindex_emit ((inst), (dreg), (basereg), (disp), (indexreg), (shift));	\
  1091  	} while (0)
  1092  
  1093  #define x86_cdq(inst)  do { *(inst)++ = (unsigned char)0x99; } while (0)
  1094  #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
  1095  
  1096  #define x86_fp_op_mem(inst,opc,mem,is_double)	\
  1097  	do {	\
  1098  		*(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8;	\
  1099  		x86_mem_emit ((inst), (opc), (mem));	\
  1100  	} while (0)
  1101  
  1102  #define x86_fp_op_membase(inst,opc,basereg,disp,is_double)	\
  1103  	do {	\
  1104  		*(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8;	\
  1105  		x86_membase_emit ((inst), (opc), (basereg), (disp));	\
  1106  	} while (0)
  1107  
  1108  #define x86_fp_op(inst,opc,index)	\
  1109  	do {	\
  1110  		*(inst)++ = (unsigned char)0xd8;	\
  1111  		*(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07);	\
  1112  	} while (0)
  1113  
  1114  #define x86_fp_op_reg(inst,opc,index,pop_stack)	\
  1115  	do {	\
  1116  		static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8};	\
  1117  		*(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc;	\
  1118  		*(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07);	\
  1119  	} while (0)
  1120  
  1121  /**
  1122   * @x86_fp_int_op_membase
  1123   * Supports FPU operations between ST(0) and integer operand in memory.
  1124   * Operation encoded using X86_FP_Opcode enum.
  1125   * Operand is addressed by [basereg + disp].
  1126   * is_int specifies whether operand is int32 (TRUE) or int16 (FALSE).
  1127   */
  1128  #define x86_fp_int_op_membase(inst,opc,basereg,disp,is_int)	\
  1129  	do {	\
  1130  		*(inst)++ = (is_int) ? (unsigned char)0xda : (unsigned char)0xde;	\
  1131  		x86_membase_emit ((inst), opc, (basereg), (disp));	\
  1132  	} while (0)
  1133  
  1134  #define x86_fstp(inst,index)	\
  1135  	do {	\
  1136  		*(inst)++ = (unsigned char)0xdd;	\
  1137  		*(inst)++ = (unsigned char)0xd8+(index);	\
  1138  	} while (0)
  1139  
  1140  #define x86_fcompp(inst)	\
  1141  	do {	\
  1142  		*(inst)++ = (unsigned char)0xde;	\
  1143  		*(inst)++ = (unsigned char)0xd9;	\
  1144  	} while (0)
  1145  
  1146  #define x86_fucompp(inst)	\
  1147  	do {	\
  1148  		*(inst)++ = (unsigned char)0xda;	\
  1149  		*(inst)++ = (unsigned char)0xe9;	\
  1150  	} while (0)
  1151  
  1152  #define x86_fnstsw(inst)	\
  1153  	do {	\
  1154  		*(inst)++ = (unsigned char)0xdf;	\
  1155  		*(inst)++ = (unsigned char)0xe0;	\
  1156  	} while (0)
  1157  
  1158  #define x86_fnstcw(inst,mem)	\
  1159  	do {	\
  1160  		*(inst)++ = (unsigned char)0xd9;	\
  1161  		x86_mem_emit ((inst), 7, (mem));	\
  1162  	} while (0)
  1163  
  1164  #define x86_fnstcw_membase(inst,basereg,disp)	\
  1165  	do {	\
  1166  		*(inst)++ = (unsigned char)0xd9;	\
  1167  		x86_membase_emit ((inst), 7, (basereg), (disp));	\
  1168  	} while (0)
  1169  
  1170  #define x86_fldcw(inst,mem)	\
  1171  	do {	\
  1172  		*(inst)++ = (unsigned char)0xd9;	\
  1173  		x86_mem_emit ((inst), 5, (mem));	\
  1174  	} while (0)
  1175  
  1176  #define x86_fldcw_membase(inst,basereg,disp)	\
  1177  	do {	\
  1178  		*(inst)++ = (unsigned char)0xd9;	\
  1179  		x86_membase_emit ((inst), 5, (basereg), (disp));	\
  1180  	} while (0)
  1181  
  1182  #define x86_fchs(inst)	\
  1183  	do {	\
  1184  		*(inst)++ = (unsigned char)0xd9;	\
  1185  		*(inst)++ = (unsigned char)0xe0;	\
  1186  	} while (0)
  1187  
  1188  #define x86_frem(inst)	\
  1189  	do {	\
  1190  		*(inst)++ = (unsigned char)0xd9;	\
  1191  		*(inst)++ = (unsigned char)0xf8;	\
  1192  	} while (0)
  1193  
  1194  #define x86_fxch(inst,index)	\
  1195  	do {	\
  1196  		*(inst)++ = (unsigned char)0xd9;	\
  1197  		*(inst)++ = (unsigned char)0xc8 + ((index) & 0x07);	\
  1198  	} while (0)
  1199  
  1200  #define x86_fcomi(inst,index)	\
  1201  	do {	\
  1202  		*(inst)++ = (unsigned char)0xdb;	\
  1203  		*(inst)++ = (unsigned char)0xf0 + ((index) & 0x07);	\
  1204  	} while (0)
  1205  
  1206  #define x86_fcomip(inst,index)	\
  1207  	do {	\
  1208  		*(inst)++ = (unsigned char)0xdf;	\
  1209  		*(inst)++ = (unsigned char)0xf0 + ((index) & 0x07);	\
  1210  	} while (0)
  1211  
  1212  #define x86_fucomi(inst,index)	\
  1213  	do {	\
  1214  		*(inst)++ = (unsigned char)0xdb;	\
  1215  		*(inst)++ = (unsigned char)0xe8 + ((index) & 0x07);	\
  1216  	} while (0)
  1217  
  1218  #define x86_fucomip(inst,index)	\
  1219  	do {	\
  1220  		*(inst)++ = (unsigned char)0xdf;	\
  1221  		*(inst)++ = (unsigned char)0xe8 + ((index) & 0x07);	\
  1222  	} while (0)
  1223  
  1224  #define x86_fld(inst,mem,is_double)	\
  1225  	do {	\
  1226  		*(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9;	\
  1227  		x86_mem_emit ((inst), 0, (mem));	\
  1228  	} while (0)
  1229  
  1230  #define x86_fld_membase(inst,basereg,disp,is_double)	\
  1231  	do {	\
  1232  		*(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9;	\
  1233  		x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1234  	} while (0)
  1235  
  1236  #define x86_fld_memindex(inst,basereg,disp,indexreg,shift,is_double)	\
  1237  	do {	\
  1238  		*(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9;	\
  1239  		x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift));	\
  1240  	} while (0)
  1241  
  1242  #define x86_fld80_mem(inst,mem)	\
  1243  	do {	\
  1244  		*(inst)++ = (unsigned char)0xdb;	\
  1245  		x86_mem_emit ((inst), 5, (mem));	\
  1246  	} while (0)
  1247  
  1248  #define x86_fld80_membase(inst,basereg,disp)	\
  1249  	do {	\
  1250  		*(inst)++ = (unsigned char)0xdb;	\
  1251  		x86_membase_emit ((inst), 5, (basereg), (disp));	\
  1252  	} while (0)
  1253  
  1254  #define x86_fld80_memindex(inst,basereg,disp,indexreg,shift)	\
  1255  	do {	\
  1256  		*(inst)++ = (unsigned char)0xdb;	\
  1257  		x86_memindex_emit ((inst), 5, (basereg), (disp), (indexreg), (shift));	\
  1258  	} while (0)
  1259  
  1260  #define x86_fild(inst,mem,is_long)	\
  1261  	do {	\
  1262  		if ((is_long)) {	\
  1263  			*(inst)++ = (unsigned char)0xdf;	\
  1264  			x86_mem_emit ((inst), 5, (mem));	\
  1265  		} else {	\
  1266  			*(inst)++ = (unsigned char)0xdb;	\
  1267  			x86_mem_emit ((inst), 0, (mem));	\
  1268  		}	\
  1269  	} while (0)
  1270  
  1271  #define x86_fild_membase(inst,basereg,disp,is_long)	\
  1272  	do {	\
  1273  		if ((is_long)) {	\
  1274  			*(inst)++ = (unsigned char)0xdf;	\
  1275  			x86_membase_emit ((inst), 5, (basereg), (disp));	\
  1276  		} else {	\
  1277  			*(inst)++ = (unsigned char)0xdb;	\
  1278  			x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1279  		}	\
  1280  	} while (0)
  1281  
  1282  #define x86_fld_reg(inst,index)	\
  1283  	do {	\
  1284  		*(inst)++ = (unsigned char)0xd9;	\
  1285  		*(inst)++ = (unsigned char)0xc0 + ((index) & 0x07);	\
  1286  	} while (0)
  1287  
  1288  #define x86_fldz(inst)	\
  1289  	do {	\
  1290  		*(inst)++ = (unsigned char)0xd9;	\
  1291  		*(inst)++ = (unsigned char)0xee;	\
  1292  	} while (0)
  1293  
  1294  #define x86_fld1(inst)	\
  1295  	do {	\
  1296  		*(inst)++ = (unsigned char)0xd9;	\
  1297  		*(inst)++ = (unsigned char)0xe8;	\
  1298  	} while (0)
  1299  
  1300  #define x86_fldpi(inst)	\
  1301  	do {	\
  1302  		*(inst)++ = (unsigned char)0xd9;	\
  1303  		*(inst)++ = (unsigned char)0xeb;	\
  1304  	} while (0)
  1305  
  1306  #define x86_fst(inst,mem,is_double,pop_stack)	\
  1307  	do {	\
  1308  		*(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9;	\
  1309  		x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem));	\
  1310  	} while (0)
  1311  
  1312  #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack)	\
  1313  	do {	\
  1314  		*(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9;	\
  1315  		x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp));	\
  1316  	} while (0)
  1317  
  1318  #define x86_fst_memindex(inst,basereg,disp,indexreg,shift,is_double,pop_stack)	\
  1319  	do {	\
  1320  		*(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9;	\
  1321  		x86_memindex_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp), (indexreg), (shift));	\
  1322  	} while (0)
  1323  
  1324  #define x86_fst80_mem(inst,mem)	\
  1325  	do {	\
  1326  		*(inst)++ = (unsigned char)0xdb;	\
  1327  		x86_mem_emit ((inst), 7, (mem));	\
  1328  	} while (0)
  1329  
  1330  #define x86_fst80_membase(inst,basereg,disp)	\
  1331  	do {	\
  1332  		*(inst)++ = (unsigned char)0xdb;	\
  1333  		x86_membase_emit ((inst), 7, (basereg), (disp));	\
  1334  	} while (0)
  1335  
  1336  #define x86_fst80_memindex(inst,basereg,disp,indexreg,shift)	\
  1337  	do {	\
  1338  		*(inst)++ = (unsigned char)0xdb;	\
  1339  		x86_memindex_emit ((inst), 7, (basereg), (disp), (indexreg), (shift));	\
  1340  	} while (0)
  1341  
  1342  #define x86_fist_pop(inst,mem,is_long)	\
  1343  	do {	\
  1344  		if ((is_long)) {	\
  1345  			*(inst)++ = (unsigned char)0xdf;	\
  1346  			x86_mem_emit ((inst), 7, (mem));	\
  1347  		} else {	\
  1348  			*(inst)++ = (unsigned char)0xdb;	\
  1349  			x86_mem_emit ((inst), 3, (mem));	\
  1350  		}	\
  1351  	} while (0)
  1352  
  1353  #define x86_fist_pop_membase(inst,basereg,disp,is_long)	\
  1354  	do {	\
  1355  		if ((is_long)) {	\
  1356  			*(inst)++ = (unsigned char)0xdf;	\
  1357  			x86_membase_emit ((inst), 7, (basereg), (disp));	\
  1358  		} else {	\
  1359  			*(inst)++ = (unsigned char)0xdb;	\
  1360  			x86_membase_emit ((inst), 3, (basereg), (disp));	\
  1361  		}	\
  1362  	} while (0)
  1363  
  1364  #define x86_fstsw(inst)	\
  1365  	do {	\
  1366  			*(inst)++ = (unsigned char)0x9b;	\
  1367  			*(inst)++ = (unsigned char)0xdf;	\
  1368  			*(inst)++ = (unsigned char)0xe0;	\
  1369  	} while (0)
  1370  
  1371  /**
  1372   * @x86_fist_membase
  1373   * Converts content of ST(0) to integer and stores it at memory location
  1374   * addressed by [basereg + disp].
  1375   * is_int specifies whether destination is int32 (TRUE) or int16 (FALSE).
  1376   */
  1377  #define x86_fist_membase(inst,basereg,disp,is_int)	\
  1378  	do {	\
  1379  		if ((is_int)) {	\
  1380  			*(inst)++ = (unsigned char)0xdb;	\
  1381  			x86_membase_emit ((inst), 2, (basereg), (disp));	\
  1382  		} else {	\
  1383  			*(inst)++ = (unsigned char)0xdf;	\
  1384  			x86_membase_emit ((inst), 2, (basereg), (disp));	\
  1385  		}	\
  1386  	} while (0)
  1387  
  1388  
  1389  #define x86_push_reg(inst,reg)	\
  1390  	do {	\
  1391  		*(inst)++ = (unsigned char)0x50 + (reg);	\
  1392  	} while (0)
  1393  
  1394  #define x86_push_regp(inst,reg)	\
  1395  	do {	\
  1396  		*(inst)++ = (unsigned char)0xff;	\
  1397  		x86_regp_emit ((inst), 6, (reg));	\
  1398  	} while (0)
  1399  
  1400  #define x86_push_mem(inst,mem)	\
  1401  	do {	\
  1402  		*(inst)++ = (unsigned char)0xff;	\
  1403  		x86_mem_emit ((inst), 6, (mem));	\
  1404  	} while (0)
  1405  
  1406  #define x86_push_membase(inst,basereg,disp)	\
  1407  	do {	\
  1408  		*(inst)++ = (unsigned char)0xff;	\
  1409  		x86_membase_emit ((inst), 6, (basereg), (disp));	\
  1410  	} while (0)
  1411  
  1412  #define x86_push_memindex(inst,basereg,disp,indexreg,shift)	\
  1413  	do {	\
  1414  		*(inst)++ = (unsigned char)0xff;	\
  1415  		x86_memindex_emit ((inst), 6, (basereg), (disp), (indexreg), (shift));	\
  1416  	} while (0)
  1417  
  1418  #define x86_push_imm_template(inst) x86_push_imm (inst, 0xf0f0f0f0)
  1419  	
  1420  #define x86_push_imm(inst,imm)	\
  1421  	do {	\
  1422  		int _imm = (int) (imm);	\
  1423  		if (x86_is_imm8 (_imm)) {	\
  1424  			*(inst)++ = (unsigned char)0x6A;	\
  1425  			x86_imm_emit8 ((inst), (_imm));	\
  1426  		} else {	\
  1427  			*(inst)++ = (unsigned char)0x68;	\
  1428  			x86_imm_emit32 ((inst), (_imm));	\
  1429  		}	\
  1430  	} while (0)
  1431  
  1432  #define x86_pop_reg(inst,reg)	\
  1433  	do {	\
  1434  		*(inst)++ = (unsigned char)0x58 + (reg);	\
  1435  	} while (0)
  1436  
  1437  #define x86_pop_mem(inst,mem)	\
  1438  	do {	\
  1439  		*(inst)++ = (unsigned char)0x8f;	\
  1440  		x86_mem_emit ((inst), 0, (mem));	\
  1441  	} while (0)
  1442  
  1443  #define x86_pop_membase(inst,basereg,disp)	\
  1444  	do {	\
  1445  		*(inst)++ = (unsigned char)0x8f;	\
  1446  		x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1447  	} while (0)
  1448  
  1449  #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
  1450  #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
  1451  #define x86_popad(inst)  do { *(inst)++ = (unsigned char)0x61; } while (0)
  1452  #define x86_popfd(inst)  do { *(inst)++ = (unsigned char)0x9d; } while (0)
  1453  
  1454  #define x86_loop(inst,imm)	\
  1455  	do {	\
  1456  		*(inst)++ = (unsigned char)0xe2;	\
  1457  		x86_imm_emit8 ((inst), (imm));	\
  1458  	} while (0)
  1459  
  1460  #define x86_loope(inst,imm)	\
  1461  	do {	\
  1462  		*(inst)++ = (unsigned char)0xe1;	\
  1463  		x86_imm_emit8 ((inst), (imm));	\
  1464  	} while (0)
  1465  
  1466  #define x86_loopne(inst,imm)	\
  1467  	do {	\
  1468  		*(inst)++ = (unsigned char)0xe0;	\
  1469  		x86_imm_emit8 ((inst), (imm));	\
  1470  	} while (0)
  1471  
  1472  #define x86_jump32(inst,imm)	\
  1473  	do {	\
  1474  		*(inst)++ = (unsigned char)0xe9;	\
  1475  		x86_imm_emit32 ((inst), (imm));	\
  1476  	} while (0)
  1477  
  1478  #define x86_jump8(inst,imm)	\
  1479  	do {	\
  1480  		*(inst)++ = (unsigned char)0xeb;	\
  1481  		x86_imm_emit8 ((inst), (imm));	\
  1482  	} while (0)
  1483  
  1484  #define x86_jump_reg(inst,reg)	\
  1485  	do {	\
  1486  		*(inst)++ = (unsigned char)0xff;	\
  1487  		x86_reg_emit ((inst), 4, (reg));	\
  1488  	} while (0)
  1489  
  1490  #define x86_jump_mem(inst,mem)	\
  1491  	do {	\
  1492  		*(inst)++ = (unsigned char)0xff;	\
  1493  		x86_mem_emit ((inst), 4, (mem));	\
  1494  	} while (0)
  1495  
  1496  #define x86_jump_membase(inst,basereg,disp)	\
  1497  	do {	\
  1498  		*(inst)++ = (unsigned char)0xff;	\
  1499  		x86_membase_emit ((inst), 4, (basereg), (disp));	\
  1500  	} while (0)
  1501  
  1502  #define x86_jump_memindex(inst,basereg,disp,indexreg,shift)	\
  1503  	do {	\
  1504  		*(inst)++ = (unsigned char)0xff;	\
  1505  		x86_memindex_emit ((inst), 4, (basereg), (disp), (indexreg), (shift)); \
  1506  	} while (0)
  1507  /*
  1508   * target is a pointer in our buffer.
  1509   */
  1510  #define x86_jump_code(inst,target)	\
  1511  	do {	\
  1512  		int t = (unsigned char*)(target) - (inst) - 2;	\
  1513  		if (x86_is_imm8(t)) {	\
  1514  			x86_jump8 ((inst), t);	\
  1515  		} else {	\
  1516  			t -= 3;	\
  1517  			x86_jump32 ((inst), t);	\
  1518  		}	\
  1519  	} while (0)
  1520  
  1521  #define x86_jump_disp(inst,disp)	\
  1522  	do {	\
  1523  		int t = (disp) - 2;	\
  1524  		if (x86_is_imm8(t)) {	\
  1525  			x86_jump8 ((inst), t);	\
  1526  		} else {	\
  1527  			t -= 3;	\
  1528  			x86_jump32 ((inst), t);	\
  1529  		}	\
  1530  	} while (0)
  1531  
  1532  #define x86_branch8(inst,cond,imm,is_signed)	\
  1533  	do {	\
  1534  		if ((is_signed))	\
  1535  			*(inst)++ = x86_cc_signed_map [(cond)];	\
  1536  		else	\
  1537  			*(inst)++ = x86_cc_unsigned_map [(cond)];	\
  1538  		x86_imm_emit8 ((inst), (imm));	\
  1539  	} while (0)
  1540  
  1541  #define x86_branch32(inst,cond,imm,is_signed)	\
  1542  	do {	\
  1543  		*(inst)++ = (unsigned char)0x0f;	\
  1544  		if ((is_signed))	\
  1545  			*(inst)++ = x86_cc_signed_map [(cond)] + 0x10;	\
  1546  		else	\
  1547  			*(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10;	\
  1548  		x86_imm_emit32 ((inst), (imm));	\
  1549  	} while (0)
  1550  
  1551  #define x86_branch(inst,cond,target,is_signed)	\
  1552  	do {	\
  1553  		int offset = (target) - (inst) - 2;	\
  1554  		if (x86_is_imm8 ((offset)))	\
  1555  			x86_branch8 ((inst), (cond), offset, (is_signed));	\
  1556  		else {	\
  1557  			offset -= 4;	\
  1558  			x86_branch32 ((inst), (cond), offset, (is_signed));	\
  1559  		}	\
  1560  	} while (0)
  1561  
  1562  #define x86_branch_disp(inst,cond,disp,is_signed)	\
  1563  	do {	\
  1564  		int offset = (disp) - 2;	\
  1565  		if (x86_is_imm8 ((offset)))	\
  1566  			x86_branch8 ((inst), (cond), offset, (is_signed));	\
  1567  		else {	\
  1568  			offset -= 4;	\
  1569  			x86_branch32 ((inst), (cond), offset, (is_signed));	\
  1570  		}	\
  1571  	} while (0)
  1572  
  1573  #define x86_set_reg(inst,cond,reg,is_signed)	\
  1574  	do {	\
  1575                  jit_assert (X86_IS_BYTE_REG (reg)); \
  1576  		*(inst)++ = (unsigned char)0x0f;	\
  1577  		if ((is_signed))	\
  1578  			*(inst)++ = x86_cc_signed_map [(cond)] + 0x20;	\
  1579  		else	\
  1580  			*(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20;	\
  1581  		x86_reg_emit ((inst), 0, (reg));	\
  1582  	} while (0)
  1583  
  1584  #define x86_set_mem(inst,cond,mem,is_signed)	\
  1585  	do {	\
  1586  		*(inst)++ = (unsigned char)0x0f;	\
  1587  		if ((is_signed))	\
  1588  			*(inst)++ = x86_cc_signed_map [(cond)] + 0x20;	\
  1589  		else	\
  1590  			*(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20;	\
  1591  		x86_mem_emit ((inst), 0, (mem));	\
  1592  	} while (0)
  1593  
  1594  #define x86_set_membase(inst,cond,basereg,disp,is_signed)	\
  1595  	do {	\
  1596  		*(inst)++ = (unsigned char)0x0f;	\
  1597  		if ((is_signed))	\
  1598  			*(inst)++ = x86_cc_signed_map [(cond)] + 0x20;	\
  1599  		else	\
  1600  			*(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20;	\
  1601  		x86_membase_emit ((inst), 0, (basereg), (disp));	\
  1602  	} while (0)
  1603  
  1604  #define x86_call_imm(inst,disp)	\
  1605  	do {	\
  1606  		*(inst)++ = (unsigned char)0xe8;	\
  1607  		x86_imm_emit32 ((inst), (int)(disp));	\
  1608  	} while (0)
  1609  
  1610  #define x86_call_reg(inst,reg)	\
  1611  	do {	\
  1612  		*(inst)++ = (unsigned char)0xff;	\
  1613  		x86_reg_emit ((inst), 2, (reg));	\
  1614  	} while (0)
  1615  
  1616  #define x86_call_mem(inst,mem)	\
  1617  	do {	\
  1618  		*(inst)++ = (unsigned char)0xff;	\
  1619  		x86_mem_emit ((inst), 2, (mem));	\
  1620  	} while (0)
  1621  
  1622  #define x86_call_membase(inst,basereg,disp)	\
  1623  	do {	\
  1624  		*(inst)++ = (unsigned char)0xff;	\
  1625  		x86_membase_emit ((inst), 2, (basereg), (disp));	\
  1626  	} while (0)
  1627  
  1628  #define x86_call_code(inst,target)	\
  1629  	do {	\
  1630  		int _x86_offset = (unsigned char*)(target) - (inst);	\
  1631  		_x86_offset -= 5;	\
  1632  		x86_call_imm ((inst), _x86_offset);	\
  1633  	} while (0)
  1634  
  1635  #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
  1636  
  1637  #define x86_ret_imm(inst,imm)	\
  1638  	do {	\
  1639  		if ((imm) == 0) {	\
  1640  			x86_ret ((inst));	\
  1641  		} else {	\
  1642  			*(inst)++ = (unsigned char)0xc2;	\
  1643  			x86_imm_emit16 ((inst), (imm));	\
  1644  		}	\
  1645  	} while (0)
  1646  
  1647  #define x86_cmov_reg(inst,cond,is_signed,dreg,reg)	\
  1648  	do {	\
  1649  		*(inst)++ = (unsigned char) 0x0f;	\
  1650  		if ((is_signed))	\
  1651  			*(inst)++ = x86_cc_signed_map [(cond)] - 0x30;	\
  1652  		else	\
  1653  			*(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30;	\
  1654  		x86_reg_emit ((inst), (dreg), (reg));	\
  1655  	} while (0)
  1656  
  1657  #define x86_cmov_mem(inst,cond,is_signed,reg,mem)	\
  1658  	do {	\
  1659  		*(inst)++ = (unsigned char) 0x0f;	\
  1660  		if ((is_signed))	\
  1661  			*(inst)++ = x86_cc_signed_map [(cond)] - 0x30;	\
  1662  		else	\
  1663  			*(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30;	\
  1664  		x86_mem_emit ((inst), (reg), (mem));	\
  1665  	} while (0)
  1666  
  1667  #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp)	\
  1668  	do {	\
  1669  		*(inst)++ = (unsigned char) 0x0f;	\
  1670  		if ((is_signed))	\
  1671  			*(inst)++ = x86_cc_signed_map [(cond)] - 0x30;	\
  1672  		else	\
  1673  			*(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30;	\
  1674  		x86_membase_emit ((inst), (reg), (basereg), (disp));	\
  1675  	} while (0)
  1676  
  1677  #define x86_enter(inst,framesize)	\
  1678  	do {	\
  1679  		*(inst)++ = (unsigned char)0xc8;	\
  1680  		x86_imm_emit16 ((inst), (framesize));	\
  1681  		*(inst)++ = 0;	\
  1682  	} while (0)
  1683  	
  1684  #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
  1685  #define x86_sahf(inst)  do { *(inst)++ = (unsigned char)0x9e; } while (0)
  1686  
  1687  #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
  1688  #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
  1689  #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
  1690  #define x86_ftst(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe4; } while (0)
  1691  #define x86_fxam(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe5; } while (0)
  1692  #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
  1693  #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
  1694  #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
  1695  #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
  1696  #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
  1697  #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
  1698  
  1699  #define x86_padding(inst,size)	\
  1700  	do {	\
  1701  		switch ((size)) {	\
  1702  		case 1: x86_nop ((inst)); break;	\
  1703  		case 2: *(inst)++ = 0x8b;	\
  1704  			*(inst)++ = 0xc0; break;	\
  1705  		case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d;	\
  1706  			*(inst)++ = 0x00; break;	\
  1707  		case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64;	\
  1708  			*(inst)++ = 0x24; *(inst)++ = 0x00;	\
  1709  			break;	\
  1710  		case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64;	\
  1711  			*(inst)++ = 0x24; *(inst)++ = 0x00;	\
  1712  			x86_nop ((inst)); break;	\
  1713  		case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad;	\
  1714  			*(inst)++ = 0x00; *(inst)++ = 0x00;	\
  1715  			*(inst)++ = 0x00; *(inst)++ = 0x00;	\
  1716  			break;	\
  1717  		case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4;	\
  1718  			*(inst)++ = 0x24; *(inst)++ = 0x00;	\
  1719  			*(inst)++ = 0x00; *(inst)++ = 0x00;	\
  1720  			*(inst)++ = 0x00; break;	\
  1721  		default: jit_assert (0);	\
  1722  		}	\
  1723  	} while (0)
  1724  
  1725  #define x86_prolog(inst,frame_size,reg_mask)	\
  1726  	do {	\
  1727  		unsigned i, m = 1;	\
  1728  		x86_enter ((inst), (frame_size));	\
  1729  		for (i = 0; i < X86_NREG; ++i, m <<= 1) {	\
  1730  			if ((reg_mask) & m)	\
  1731  				x86_push_reg ((inst), i);	\
  1732  		}	\
  1733  	} while (0)
  1734  
  1735  #define x86_epilog(inst,reg_mask)	\
  1736  	do {	\
  1737  		unsigned i, m = 1 << X86_EDI;	\
  1738  		for (i = X86_EDI; m != 0; i--, m=m>>1) {	\
  1739  			if ((reg_mask) & m)	\
  1740  				x86_pop_reg ((inst), i);	\
  1741  		}	\
  1742  		x86_leave ((inst));	\
  1743  		x86_ret ((inst));	\
  1744  	} while (0)
  1745  
  1746  #endif /* JIT_GEN_X86_H */