7990392006-04-27Tor Edvardsson /* * Machine code generator for AMD64. */ #include "operators.h" #include "constants.h" #include "object.h" #include "builtin_functions.h"
ddc1ba2014-08-07Per Hedbor /* These come from linux include files. */ #ifdef REG_RBX #undef REG_RAX #undef REG_RBX #undef REG_RCX #undef REG_RDX #undef REG_RSP #undef REG_RBP #undef REG_RSI #undef REG_RDI #undef REG_R8 #undef REG_R9 #undef REG_R10 #undef REG_R11 #undef REG_R12 #undef REG_R13 #undef REG_R14 #undef REG_R15 #endif #define REG_RAX MISSING_P_ #define REG_RBX MISSING_P_ #define REG_RCX MISSING_P_ #define REG_RDX MISSING_P_ #define REG_RSP MISSING_P_ #define REG_RBP MISSING_P_ #define REG_RSI MISSING_P_ #define REG_RDI MISSING_P_ #define REG_R8 MISSING_P_ #define REG_R9 MISSING_P_ #define REG_R10 MISSING_P_ #define REG_R11 MISSING_P_ #define REG_R12 MISSING_P_ #define REG_R13 MISSING_P_ #define REG_R14 MISSING_P_ #define REG_R15 MISSING_P_
85c1c12014-08-18Per Hedbor struct soff { int off; int type; int value; }; static struct soff SVAL(int i) { struct soff res; res.off = res.type = i*sizeof(struct svalue); res.value = res.type + 8; return res; }
7990392006-04-27Tor Edvardsson  /* Register encodings */
2fd8052013-10-08Per Hedbor enum amd64_reg {P_REG_RAX = 0, P_REG_RBX = 3, P_REG_RCX = 1, P_REG_RDX = 2, P_REG_RSP = 4, P_REG_RBP = 5, P_REG_RSI = 6, P_REG_RDI = 7, P_REG_R8 = 8, P_REG_R9 = 9, P_REG_R10 = 10, P_REG_R11 = 11, P_REG_R12 = 12, P_REG_R13 = 13, P_REG_R14 = 14, P_REG_R15 = 15, P_REG_INVALID = -1, P_REG_XMM0 = 0, P_REG_XMM1 = 1, P_REG_XMM2 = 2, P_REG_XMM3 = 3, P_REG_XMM4 = 4, P_REG_XMM5 = 5, P_REG_XMM6 = 6, P_REG_XMM7 = 7, P_REG_XMM8 = 8, P_REG_XMM9 = 9, P_REG_XMM10=10, P_REG_XMM11=11, P_REG_XMM12=12, P_REG_XMM13=13, P_REG_XMM14=14, P_REG_XMM15=15,
622ebc2012-06-25Per Hedbor  };
7990392006-04-27Tor Edvardsson 
2fd8052013-10-08Per Hedbor #define PIKE_MARK_SP_REG P_REG_R12 #define PIKE_SP_REG P_REG_R13 #define PIKE_FP_REG P_REG_R14 #define Pike_interpreter_reg P_REG_R15
d1fa802011-05-09Henrik Grubbström (Grubba)  #ifdef __NT__ /* From http://software.intel.com/en-us/articles/introduction-to-x64-assembly/ * * Note: Space for the arguments needs to be allocated on the stack as well. */
2fd8052013-10-08Per Hedbor #define ARG1_REG P_REG_RCX #define ARG2_REG P_REG_RDX #define ARG3_REG P_REG_R8 #define ARG4_REG P_REG_R9
d1fa802011-05-09Henrik Grubbström (Grubba) #else /* From SysV ABI for AMD64 draft 0.99.5. */
2fd8052013-10-08Per Hedbor #define ARG1_REG P_REG_RDI #define ARG2_REG P_REG_RSI #define ARG3_REG P_REG_RDX #define ARG4_REG P_REG_RCX #define ARG5_REG P_REG_R8 #define ARG6_REG P_REG_R9
d1fa802011-05-09Henrik Grubbström (Grubba) #endif
7990392006-04-27Tor Edvardsson 
c33ce92012-06-28Per Hedbor static enum amd64_reg sp_reg = -1, fp_reg = -1, mark_sp_reg = -1; static int dirty_regs = 0, ret_for_func = 0; ptrdiff_t amd64_prev_stored_pc = -1; /* PROG_PC at the last point Pike_fp->pc was updated. */
638a2c2012-07-16Per Hedbor static int branch_check_threads_update_etc = 0; static int store_pc = 0;
719c3a2012-06-12Per Hedbor 
15e8a02012-06-13Henrik Grubbström (Grubba) #define MAX_LABEL_USES 6
719c3a2012-06-12Per Hedbor struct label { int n_label_uses;
15e8a02012-06-13Henrik Grubbström (Grubba)  ptrdiff_t addr;
719c3a2012-06-12Per Hedbor  ptrdiff_t offset[MAX_LABEL_USES]; }; static void label( struct label *l ) { int i;
15e8a02012-06-13Henrik Grubbström (Grubba)  if (l->addr >= 0) Pike_fatal("Label reused.\n");
719c3a2012-06-12Per Hedbor  for( i=0; i<l->n_label_uses; i++ ) { int dist = PIKE_PC - (l->offset[i] + 1); if( dist > 0x7f || dist < -0x80 )
8886032012-06-25Per Hedbor  Pike_fatal("Branch %d too far\n", dist);
719c3a2012-06-12Per Hedbor  Pike_compiler->new_program->program[l->offset[i]] = dist; } l->n_label_uses = 0;
15e8a02012-06-13Henrik Grubbström (Grubba)  l->addr = PIKE_PC;
719c3a2012-06-12Per Hedbor }
99c1e92012-06-20Per Hedbor static void ib( char x ) { add_to_program( x ); }
2f46bb2014-03-15Martin Nilsson #if 0
99c1e92012-06-20Per Hedbor static void iw( short x ) { add_to_program( x>>8 ); add_to_program( x ); }
2f46bb2014-03-15Martin Nilsson #endif
99c1e92012-06-20Per Hedbor  static void id( int x ) { add_to_program( (x)&0xff ); add_to_program( (x>>8)&0xff ); add_to_program( (x>>16)&0xff ); add_to_program( (x>>24)&0xff ); } /* x86 opcodes */ #define opcode(X) ib(X)
719c3a2012-06-12Per Hedbor static void modrm( int mod, int r, int m ) {
6810702012-06-21Henrik Grubbström (Grubba)  if (r < 0) Pike_fatal("Using an invalid register(1)!\n"); if (m < 0) Pike_fatal("Using an invalid register(2)!\n");
99c1e92012-06-20Per Hedbor  ib( ((mod<<6) | ((r&0x7)<<3) | (m&0x7)) );
719c3a2012-06-12Per Hedbor } static void sib( int scale, int index, enum amd64_reg base ) {
99c1e92012-06-20Per Hedbor  ib( (scale<<6) | ((index&0x7)<<3) | (base&0x7) ); } static void modrm_sib( int mod, int r, int m ) { modrm( mod, r, m );
2fd8052013-10-08Per Hedbor  if( (m&7) == P_REG_RSP) sib(0, P_REG_RSP, P_REG_RSP );
719c3a2012-06-12Per Hedbor } static void rex( int w, enum amd64_reg r, int x, enum amd64_reg b ) { unsigned char res = 1<<6; /* bit 7, 5-4 == 0 */ if( w ) res |= 1<<3; if( r > 0x7 ) res |= 1<<2; if( x ) res |= 1<<1; if( b > 0x7 ) res |= 1<<0; if( res != (1<<6) ) add_to_program( res ); }
99c1e92012-06-20Per Hedbor static void offset_modrm_sib( int offset, int r, int m )
719c3a2012-06-12Per Hedbor {
99c1e92012-06-20Per Hedbor  /* OFFSET */ if( offset < -128 || offset > 127 ) { modrm_sib( 2, r, m ); id( offset ); }
2fd8052013-10-08Per Hedbor  else if( offset || (m&7) == P_REG_RSP || (m&7) == P_REG_RBP )
99c1e92012-06-20Per Hedbor  { modrm_sib( 1, r, m ); ib( offset ); } else { modrm_sib( 0, r, m ); }
719c3a2012-06-12Per Hedbor } static void ret() { opcode(0xc3); } static void push(enum amd64_reg reg ) {
6810702012-06-21Henrik Grubbström (Grubba)  if (reg < 0) Pike_fatal("Pushing invalid register.\n");
719c3a2012-06-12Per Hedbor  if (reg & 0x08) add_to_program(0x41); add_to_program(0x50 + (reg & 0x07)); } static void pop(enum amd64_reg reg ) {
6810702012-06-21Henrik Grubbström (Grubba)  if (reg < 0) Pike_fatal("Popping invalid register.\n");
719c3a2012-06-12Per Hedbor  if (reg & 0x08) add_to_program(0x41); add_to_program(0x58 + (reg & 0x07)); } static void mov_reg_reg(enum amd64_reg from_reg, enum amd64_reg to_reg ) { rex( 1, from_reg, 0, to_reg ); opcode( 0x89 ); modrm( 3, from_reg, to_reg ); }
85c1c12014-08-18Per Hedbor static void mov_reg32_reg(enum amd64_reg from_reg, enum amd64_reg to_reg ) { rex( 0, from_reg, 0, to_reg ); opcode( 0x89 ); modrm( 3, from_reg, to_reg ); }
7990392006-04-27Tor Edvardsson #define PUSH_INT(X) ins_int((INT32)(X), (void (*)(char))add_to_program)
6785ae2012-06-15Per Hedbor static void low_mov_mem_reg(enum amd64_reg from_reg, ptrdiff_t offset, enum amd64_reg to_reg)
719c3a2012-06-12Per Hedbor { opcode( 0x8b );
99c1e92012-06-20Per Hedbor  offset_modrm_sib(offset, to_reg, from_reg );
719c3a2012-06-12Per Hedbor } static void mov_mem_reg( enum amd64_reg from_reg, ptrdiff_t offset, enum amd64_reg to_reg ) { rex( 1, to_reg, 0, from_reg ); low_mov_mem_reg( from_reg, offset, to_reg ); }
99c1e92012-06-20Per Hedbor static void xor_reg_reg( enum amd64_reg reg1, enum amd64_reg reg2 )
719c3a2012-06-12Per Hedbor {
99c1e92012-06-20Per Hedbor  rex(1,reg1,0,reg2); opcode( 0x31 ); modrm(3,reg1,reg2);
719c3a2012-06-12Per Hedbor }
ad0d7e2014-08-28Per Hedbor static void and_reg_reg( enum amd64_reg reg1, enum amd64_reg reg2 ) { rex(1,reg1,0,reg2); opcode( 0x23 ); modrm(3,reg1,reg2); } static void or_reg_reg( enum amd64_reg reg1, enum amd64_reg reg2 ) { rex(1,reg1,0,reg2); opcode( 0x09 ); modrm(3,reg1,reg2); }
156ab02014-08-31Per Hedbor static void or_reg_imm( enum amd64_reg reg, INT_TYPE imm32 ) { rex( 1, 0, 0, reg ); if( imm32 < -0x80 || imm32 > 0x7f ) { if( reg == P_REG_RAX ) { opcode( 0xd ); /* OR rax,imm32 */ id( imm32 ); } else { opcode( 0x81 ); /* OR REG,imm32 */ modrm( 3, 1, reg); id( imm32 ); } } else { add_to_program(0x83); /* OR REG,imm8 */ modrm( 3, 1, reg ); ib( imm32 ); } } static void and_reg_imm( enum amd64_reg reg, INT_TYPE imm32 )
719c3a2012-06-12Per Hedbor { rex( 1, 0, 0, reg ); if( imm32 < -0x80 || imm32 > 0x7f ) {
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
719c3a2012-06-12Per Hedbor  {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x25 ); /* AND rax,imm32 */
719c3a2012-06-12Per Hedbor  id( imm32 ); } else {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x81 ); /* AND REG,imm32 */
719c3a2012-06-12Per Hedbor  modrm( 3,4, reg); id( imm32 ); } } else {
1c1a242012-06-13Henrik Grubbström (Grubba)  add_to_program(0x83); /* AND REG,imm8 */
719c3a2012-06-12Per Hedbor  modrm( 3, 4, reg ); ib( imm32 ); } }
378ad02012-06-25Per Hedbor static void and_reg32_imm( enum amd64_reg reg, int imm32 ) { rex( 0, 0, 0, reg ); if( imm32 < -0x80 || imm32 > 0x7f ) {
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
378ad02012-06-25Per Hedbor  { opcode( 0x25 ); /* AND rax,imm32 */ id( imm32 ); } else { opcode( 0x81 ); /* AND REG,imm32 */ modrm( 3,4, reg); id( imm32 ); } } else { add_to_program(0x83); /* AND REG,imm8 */ modrm( 3, 4, reg ); ib( imm32 ); } }
99c1e92012-06-20Per Hedbor static void mov_mem32_reg( enum amd64_reg from_reg, ptrdiff_t offset, enum amd64_reg to_reg ) { rex( 0, to_reg, 0, from_reg ); low_mov_mem_reg( from_reg, offset, to_reg ); }
719c3a2012-06-12Per Hedbor static void mov_mem16_reg( enum amd64_reg from_reg, ptrdiff_t offset, enum amd64_reg to_reg ) {
1af0932012-06-22Per Hedbor #if 0
719c3a2012-06-12Per Hedbor  mov_mem32_reg( from_reg, offset, to_reg );
99c1e92012-06-20Per Hedbor  and_reg_imm(to_reg, 0xffff);
1af0932012-06-22Per Hedbor #else rex( 1,to_reg,0,from_reg ); /* movzx r/m16 -> r32. This automatically zero-extends the upper 32-bit */ opcode( 0xf ); opcode( 0xb7 ); offset_modrm_sib(offset, to_reg, from_reg ); #endif } static void mov_mem8_reg( enum amd64_reg from_reg, ptrdiff_t offset, enum amd64_reg to_reg ) { #if 0 mov_mem32_reg( from_reg, offset, to_reg ); and_reg_imm(to_reg, 0xff); #else rex( 0,to_reg,0,from_reg ); /* movzx r/m8 -> r32. This automatically zero-extends the upper 32-bit */ opcode( 0xf ); opcode( 0xb6 ); offset_modrm_sib(offset, to_reg, from_reg ); #endif
719c3a2012-06-12Per Hedbor } static void add_reg_imm( enum amd64_reg src, int imm32); static void shl_reg_imm( enum amd64_reg from_reg, int shift ) { rex( 1, from_reg, 0, 0 ); if( shift == 1 ) {
b233f42012-06-17Henrik Grubbström (Grubba)  opcode( 0xd1 ); /* SAL */ modrm( 3, 4, from_reg );
719c3a2012-06-12Per Hedbor  } else { opcode( 0xc1 );
b233f42012-06-17Henrik Grubbström (Grubba)  modrm( 3, 4, from_reg );
719c3a2012-06-12Per Hedbor  ib( shift ); } }
378ad02012-06-25Per Hedbor static void shl_reg_reg( enum amd64_reg reg, enum amd64_reg sreg) {
2fd8052013-10-08Per Hedbor  if( sreg != P_REG_RCX )
378ad02012-06-25Per Hedbor  Pike_fatal("Not supported\n"); rex( 1, 0, 0, reg ); opcode( 0xd3 ); modrm( 3, 4, reg ); } static void shl_reg32_reg( enum amd64_reg reg, enum amd64_reg sreg) {
2fd8052013-10-08Per Hedbor  if( sreg != P_REG_RCX )
378ad02012-06-25Per Hedbor  Pike_fatal("Not supported\n"); rex( 0, 0, 0, reg ); opcode( 0xd3 ); modrm( 3, 4, reg ); }
ad0d7e2014-08-28Per Hedbor static void shr_mem_reg( enum amd64_reg reg, int off, enum amd64_reg sreg) { if( sreg != P_REG_RCX ) Pike_fatal("Not supported\n"); rex( 1, 0, 0, reg ); opcode( 0xd3 ); /* SAR r/m64,CL */ offset_modrm_sib(off, 7, reg ); /* modrm( 3, 7, reg ); */ }
156ab02014-08-31Per Hedbor  static void shr_reg_reg( enum amd64_reg reg, enum amd64_reg reg_rcx ) { if( reg_rcx != P_REG_RCX ) Pike_fatal("Not supported\n"); rex( 1, 0, 0, reg ); opcode( 0xd3 ); /* SAR r/m64,CL */ modrm( 3, 7, reg ); }
1af0932012-06-22Per Hedbor static void shr_reg_imm( enum amd64_reg from_reg, int shift ) { rex( 1, from_reg, 0, 0 ); if( shift == 1 ) { opcode( 0xd1 ); /* SAR */ modrm( 3, 7, from_reg ); } else { opcode( 0xc1 ); modrm( 3, 7, from_reg ); ib( shift ); } }
719c3a2012-06-12Per Hedbor static void clear_reg( enum amd64_reg reg ) { xor_reg_reg( reg, reg ); }
ca2a072014-09-02Martin Nilsson #if 0
6785ae2012-06-15Per Hedbor static void neg_reg( enum amd64_reg reg ) { rex(1,0,0,reg); opcode(0xf7); modrm(3,3,reg); }
ca2a072014-09-02Martin Nilsson #endif
6785ae2012-06-15Per Hedbor 
719c3a2012-06-12Per Hedbor static void mov_imm_reg( long imm, enum amd64_reg reg ) { if( (imm > 0x7fffffffLL) || (imm < -0x80000000LL) ) { rex(1,0,0,reg); opcode(0xb8 | (reg&0x7)); /* mov imm64 -> reg64 */ id( (imm & 0xffffffffLL) ); id( ((imm >> 32)&0xffffffffLL) ); }
378ad02012-06-25Per Hedbor  else if( imm > 0 ) { rex(0,0,0,reg); opcode( 0xc7 ); /* mov imm32 -> reg/m 32, SE*/ modrm( 3,0,reg ); id( (int)imm ); }
719c3a2012-06-12Per Hedbor  else { rex(1,0,0,reg); opcode( 0xc7 ); /* mov imm32 -> reg/m 64, SE*/ modrm( 3,0,reg ); id( (int)imm ); } }
1807c02014-07-15Per Hedbor /* static void mov_ptr_reg( void *x, enum amd64_reg reg ) */ /* { */ /* mov_imm_reg( (long)x, reg ); */ /* } */
622ebc2012-06-25Per Hedbor static void mov_mem128_reg( enum amd64_reg from_reg, int offset, enum amd64_reg to_reg ) { if( from_reg > 7 ) Pike_fatal("Not supported\n"); rex(0,to_reg,0,from_reg); opcode( 0x66 ); opcode( 0x0f ); opcode( 0x6f ); /* MOVDQA xmm,m128 */ offset_modrm_sib( offset, to_reg, from_reg ); } static void mov_reg_mem128( enum amd64_reg from_reg, enum amd64_reg to_reg, int offset ) { if( from_reg > 7 ) Pike_fatal("Not supported\n"); rex(0,from_reg,0,to_reg); opcode( 0x66 ); opcode( 0x0f ); opcode( 0x7f ); /* MOVDQA m128,xmm */ offset_modrm_sib( offset, from_reg, to_reg ); }
fe5ef32014-09-14Per Hedbor static void low_set_if_cond(unsigned char subop, enum amd64_reg reg) { rex( 0, 0, 0, reg ); opcode( 0xf ); opcode( subop ); modrm( 3, 0, reg ); } static void set_if_lt(enum amd64_reg reg) { low_set_if_cond( 0x9c, reg ); } static void set_if_lte(enum amd64_reg reg) { low_set_if_cond( 0x9e, reg ); } static void set_if_gt(enum amd64_reg reg) { low_set_if_cond( 0x9f, reg ); } static void set_if_gte(enum amd64_reg reg) { low_set_if_cond( 0x9d, reg ); } static void set_if_eq(enum amd64_reg reg) { low_set_if_cond( 0x94, reg ); } static void set_if_neq(enum amd64_reg reg) { low_set_if_cond( 0x95, reg ); } #if 0 static void low_mov_if_cond_reg(unsigned char subop, enum amd64_reg to_reg, enum amd64_reg from_reg ) { rex(0, from_reg, 0, to_reg ); opcode( 0xf ); opcode( subop ); modrm_sib( 3, from_reg, to_reg ); } static void mov_if_lte_reg( enum amd64_reg from_reg, enum amd64_reg to_reg ) { low_mov_if_cond_reg( 0x4e, from_reg, to_reg); } static void mov_if_gte_reg( enum amd64_reg from_reg, enum amd64_reg to_reg ) { low_mov_if_cond_reg( 0x4d, from_reg, to_reg ); } static void mov_if_lt_reg( enum amd64_reg from_reg, enum amd64_reg to_reg ) { low_mov_if_cond_reg( 0x4c, from_reg, to_reg ); } static void mov_if_gt_reg( enum amd64_reg from_reg, enum amd64_reg to_reg ) { low_mov_if_cond_reg( 0x4f, from_reg, to_reg ); } static void mov_if_neq_reg( enum amd64_reg from_reg, enum amd64_reg to_reg ) { low_mov_if_cond_reg( 0x45, from_reg, to_reg ); } static void mov_if_eq_reg( enum amd64_reg from_reg, enum amd64_reg to_reg ) { low_mov_if_cond_reg( 0x44, from_reg, to_reg ); } #endif
99c1e92012-06-20Per Hedbor static void low_mov_reg_mem(enum amd64_reg from_reg, enum amd64_reg to_reg, ptrdiff_t offset ) { opcode( 0x89 ); offset_modrm_sib( offset, from_reg, to_reg ); }
719c3a2012-06-12Per Hedbor static void mov_reg_mem( enum amd64_reg from_reg, enum amd64_reg to_reg, ptrdiff_t offset ) { rex(1, from_reg, 0, to_reg );
99c1e92012-06-20Per Hedbor  low_mov_reg_mem( from_reg, to_reg, offset ); }
719c3a2012-06-12Per Hedbor 
99c1e92012-06-20Per Hedbor static void mov_reg_mem32( enum amd64_reg from_reg, enum amd64_reg to_reg, ptrdiff_t offset ) { rex(0, from_reg, 0, to_reg ); low_mov_reg_mem( from_reg, to_reg, offset ); }
2f46bb2014-03-15Martin Nilsson #if 0
99c1e92012-06-20Per Hedbor static void mov_reg_mem16( enum amd64_reg from_reg, enum amd64_reg to_reg, ptrdiff_t offset ) { opcode( 0x66 ); rex(0, from_reg, 0, to_reg ); low_mov_reg_mem( from_reg, to_reg, offset );
719c3a2012-06-12Per Hedbor }
2f46bb2014-03-15Martin Nilsson #endif
719c3a2012-06-12Per Hedbor 
99c1e92012-06-20Per Hedbor 
719c3a2012-06-12Per Hedbor static void mov_imm_mem( long imm, enum amd64_reg to_reg, ptrdiff_t offset ) { if( imm >= -0x80000000LL && imm <= 0x7fffffffLL ) { rex( 1, 0, 0, to_reg ); opcode( 0xc7 ); /* mov imm32 -> r/m64 (sign extend)*/
99c1e92012-06-20Per Hedbor  offset_modrm_sib( offset, 0, to_reg );
719c3a2012-06-12Per Hedbor  id( imm ); } else {
2fd8052013-10-08Per Hedbor  if( to_reg == P_REG_RAX ) Pike_fatal( "Clobbered TMP P_REG_RAX reg\n"); mov_imm_reg( imm, P_REG_RAX ); mov_reg_mem( P_REG_RAX, to_reg, offset );
719c3a2012-06-12Per Hedbor  } } static void mov_imm_mem32( int imm, enum amd64_reg to_reg, ptrdiff_t offset ) { rex( 0, 0, 0, to_reg ); opcode( 0xc7 ); /* mov imm32 -> r/m32 (sign extend)*/ /* This does not work for rg&7 == 4 or 5. */
99c1e92012-06-20Per Hedbor  offset_modrm_sib( offset, 0, to_reg );
719c3a2012-06-12Per Hedbor  id( imm ); }
99c1e92012-06-20Per Hedbor static void sub_reg_imm( enum amd64_reg reg, int imm32 );
156ab02014-08-31Per Hedbor static void neg_mem( enum amd64_reg reg, int off ) { rex( 1, 0, 0, reg ); opcode(0xf7); offset_modrm_sib( off, 3, reg ); }
36fea02014-08-31Per Hedbor static void not_mem( enum amd64_reg reg, int off ) { rex( 1, 0, 0, reg ); opcode(0xf7); offset_modrm_sib( off, 2, reg ); }
719c3a2012-06-12Per Hedbor static void add_reg_imm( enum amd64_reg reg, int imm32 ) { if( !imm32 ) return;
99c1e92012-06-20Per Hedbor  if( imm32 < 0 ) { /* This makes the disassembly easier to read. */ sub_reg_imm( reg, -imm32 ); return; }
719c3a2012-06-12Per Hedbor  rex( 1, 0, 0, reg ); if( imm32 < -0x80 || imm32 > 0x7f ) {
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
719c3a2012-06-12Per Hedbor  {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x05 ); /* ADD rax,imm32 */
719c3a2012-06-12Per Hedbor  id( imm32 ); } else {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x81 ); /* ADD REG,imm32 */
719c3a2012-06-12Per Hedbor  modrm( 3, 0, reg); id( imm32 ); } } else {
1c1a242012-06-13Henrik Grubbström (Grubba)  add_to_program(0x83); /* ADD REG,imm8 */
719c3a2012-06-12Per Hedbor  modrm( 3, 0, reg ); ib( imm32 ); } }
99c1e92012-06-20Per Hedbor  static void low_add_mem_imm(int w, enum amd64_reg reg, int offset, int imm32 )
719c3a2012-06-12Per Hedbor { int r2 = imm32 == -1 ? 1 : 0; int large = 0; if( !imm32 ) return;
99c1e92012-06-20Per Hedbor  rex( w, 0, 0, reg );
719c3a2012-06-12Per Hedbor  if( r2 ) imm32 = -imm32; if( imm32 == 1 )
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0xff ); /* INCL(DECL) r/m32 */
719c3a2012-06-12Per Hedbor  else if( imm32 >= -128 && imm32 < 128 )
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x83 ); /* ADD imm8,r/m32 */
719c3a2012-06-12Per Hedbor  else {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x81 ); /* ADD imm32,r/m32 */
719c3a2012-06-12Per Hedbor  large = 1; }
99c1e92012-06-20Per Hedbor  offset_modrm_sib( offset, r2, reg );
719c3a2012-06-12Per Hedbor  if( imm32 != 1 ) { if( large ) id( imm32 ); else ib( imm32 ); } }
99c1e92012-06-20Per Hedbor static void add_mem32_imm( enum amd64_reg reg, int offset, int imm32 )
719c3a2012-06-12Per Hedbor {
99c1e92012-06-20Per Hedbor  low_add_mem_imm( 0, reg, offset, imm32 );
6785ae2012-06-15Per Hedbor }
99c1e92012-06-20Per Hedbor static void add_mem_imm( enum amd64_reg reg, int offset, int imm32 )
6785ae2012-06-15Per Hedbor {
99c1e92012-06-20Per Hedbor  low_add_mem_imm( 1, reg, offset, imm32 );
6785ae2012-06-15Per Hedbor }
10d27a2014-08-08Per Hedbor #ifndef USE_VALGRIND
6d62b62014-07-15Per Hedbor static void add_mem8_imm( enum amd64_reg reg, int offset, int imm32 ) { int r2 = imm32 == -1 ? 1 : 0; if( !imm32 ) return; rex( 0, 0, 0, reg ); if( imm32 == 1 || imm32 == -1 ) opcode( 0xfe ); /* INCL r/m8 */ else if( imm32 >= -128 && imm32 < 128 ) opcode( 0x80 ); /* ADD imm8,r/m32 */ else Pike_fatal("Not sensible"); offset_modrm_sib( offset, r2, reg ); if( imm32 != 1 && !r2 ) { ib( imm32 ); } }
10d27a2014-08-08Per Hedbor #endif
719c3a2012-06-12Per Hedbor  static void sub_reg_imm( enum amd64_reg reg, int imm32 ) { if( !imm32 ) return;
99c1e92012-06-20Per Hedbor  if( imm32 < 0 ) /* This makes the disassembly easier to read. */ return add_reg_imm( reg, -imm32 );
719c3a2012-06-12Per Hedbor  rex( 1, 0, 0, reg ); if( imm32 < -0x80 || imm32 > 0x7f ) {
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
719c3a2012-06-12Per Hedbor  {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x2d ); /* SUB rax,imm32 */
719c3a2012-06-12Per Hedbor  id( imm32 ); } else {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x81 ); /* SUB REG,imm32 */
719c3a2012-06-12Per Hedbor  modrm( 3, 5, reg); id( imm32 ); } } else {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode(0x83); /* SUB REG,imm8 */
719c3a2012-06-12Per Hedbor  modrm( 3, 5, reg ); ib( imm32 ); } } static void test_reg_reg( enum amd64_reg reg1, enum amd64_reg reg2 ) { rex(1,reg1,0,reg2); opcode(0x85); modrm(3, reg1, reg2 ); }
b40c772014-08-18Per Hedbor static void test_reg32_reg( enum amd64_reg reg1, enum amd64_reg reg2 ) { rex(0,reg1,0,reg2); opcode(0x85); modrm(3, reg1, reg2 ); }
719c3a2012-06-12Per Hedbor static void test_reg( enum amd64_reg reg1 ) { test_reg_reg( reg1, reg1 ); }
b40c772014-08-18Per Hedbor static void test_reg32( enum amd64_reg reg1 ) { test_reg32_reg( reg1, reg1 ); }
719c3a2012-06-12Per Hedbor static void cmp_reg_imm( enum amd64_reg reg, int imm32 ) {
0dcd652014-08-18Per Hedbor  if(!imm32) { test_reg( reg ); return; }
719c3a2012-06-12Per Hedbor  rex(1, 0, 0, reg); if( imm32 > 0x7f || imm32 < -0x80 ) {
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
719c3a2012-06-12Per Hedbor  { opcode( 0x3d ); id( imm32 ); } else { opcode( 0x81 ); modrm(3,7,reg); id( imm32 ); } } else { opcode( 0x83 ); modrm( 3,7,reg); ib( imm32 ); } }
4ceb792012-06-22Per Hedbor static void cmp_reg32_imm( enum amd64_reg reg, int imm32 ) {
0dcd652014-08-18Per Hedbor  if(!imm32) {
156ab02014-08-31Per Hedbor  test_reg32( reg );
0dcd652014-08-18Per Hedbor  return; }
4ceb792012-06-22Per Hedbor  rex(0, 0, 0, reg); if( imm32 > 0x7f || imm32 < -0x80 ) {
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
4ceb792012-06-22Per Hedbor  { opcode( 0x3d ); id( imm32 ); } else { opcode( 0x81 ); modrm(3,7,reg); id( imm32 ); } } else { opcode( 0x83 ); modrm( 3,7,reg); ib( imm32 ); } }
719c3a2012-06-12Per Hedbor static void cmp_reg_reg( enum amd64_reg reg1, enum amd64_reg reg2 ) {
996c482013-06-19Henrik Grubbström (Grubba)  rex(1, reg2, 0, reg1);
719c3a2012-06-12Per Hedbor  opcode( 0x39 );
996c482013-06-19Henrik Grubbström (Grubba)  modrm( 3, reg2, reg1 );
719c3a2012-06-12Per Hedbor }
11619b2012-07-18Henrik Grubbström (Grubba) static int jmp_rel_imm32( int addr )
719c3a2012-06-12Per Hedbor {
11619b2012-07-18Henrik Grubbström (Grubba)  int rel = addr - (PIKE_PC + 5); // counts from the next instruction
719c3a2012-06-12Per Hedbor  int res; opcode( 0xe9 ); res = PIKE_PC; id( rel ); return res; }
11619b2012-07-18Henrik Grubbström (Grubba) static void jmp_rel_imm( int addr )
719c3a2012-06-12Per Hedbor {
11619b2012-07-18Henrik Grubbström (Grubba)  int rel = addr - (PIKE_PC + 2); // counts from the next instruction
719c3a2012-06-12Per Hedbor  if(rel >= -0x80 && rel <= 0x7f ) { opcode( 0xeb ); ib( rel ); return; }
11619b2012-07-18Henrik Grubbström (Grubba)  jmp_rel_imm32( addr );
719c3a2012-06-12Per Hedbor }
11619b2012-07-18Henrik Grubbström (Grubba) static void call_rel_imm32( int addr )
6785ae2012-06-15Per Hedbor {
11619b2012-07-18Henrik Grubbström (Grubba)  int rel = addr - (PIKE_PC + 5); // counts from the next instruction
6785ae2012-06-15Per Hedbor  opcode( 0xe8 ); id( rel );
84c5f62012-06-28Per Hedbor  sp_reg = -1;
6785ae2012-06-15Per Hedbor  return; }
719c3a2012-06-12Per Hedbor static void jmp_reg( enum amd64_reg reg ) { rex(0,reg,0,0); opcode( 0xff ); modrm( 3, 4, reg ); } static void call_reg( enum amd64_reg reg ) { rex(0,reg,0,0); opcode( 0xff ); modrm( 3, 2, reg ); } static void call_imm( void *ptr ) { size_t addr = (size_t)ptr; if( (addr & ~0x7fffffffLL) && !(addr & ~0x3fffffff8LL) ) {
2fd8052013-10-08Per Hedbor  mov_imm_reg( addr>>3, P_REG_RAX); shl_reg_imm( P_REG_RAX, 3 );
719c3a2012-06-12Per Hedbor  } else {
2fd8052013-10-08Per Hedbor  mov_imm_reg(addr, P_REG_RAX );
719c3a2012-06-12Per Hedbor  }
2fd8052013-10-08Per Hedbor  call_reg( P_REG_RAX );
719c3a2012-06-12Per Hedbor }
6785ae2012-06-15Per Hedbor /* reg += reg2 */ static void add_reg_reg( enum amd64_reg reg, enum amd64_reg reg2 ) { rex(1,reg2,0,reg); opcode( 0x1 ); modrm( 3, reg2, reg ); }
2e82fb2014-08-31Per Hedbor /* reg *= reg2 */ static void mul_reg_reg( enum amd64_reg reg, enum amd64_reg reg2 ) { #if 0 /* 64b * 64b -> 128b This would actually not need an overflow check. instead, push high if not zero, push low, genreate gmp. but... Well. */ if( reg2 == P_REG_RAX ) { rex(1,0,0,reg); /* imul r/m64 */ opcode( 0xf7 ); modrm( 3,5,reg ); } else #endif { rex(1,reg,0,reg2); opcode( 0xf ); opcode( 0xaf ); modrm( 3, reg, reg2 ); } }
5d4e632014-08-31Per Hedbor /* reg /= reg2 */ static void div_reg_reg( enum amd64_reg reg, enum amd64_reg reg2 ) { if( reg != P_REG_RAX ) Pike_error("Not supported, reg1 must be RAX\n"); if( reg2 == P_REG_RDX ) Pike_error("Clobbers RAX+RDX\n"); rex(1,0,0,0); opcode(0x99); /* *cqo: *sign extend 64bit rax -> 128bit in rdx:rax */ rex(1,0,0,reg2); opcode( 0xf7 ); modrm( 3, 7, reg2 ); /* idiv, ax = dx:ax / reg2 dx = dx:ax % reg2 */ }
ad0d7e2014-08-28Per Hedbor /* reg += reg2 */ static void add_reg32_reg32( enum amd64_reg reg, enum amd64_reg reg2 ) { rex(0,reg2,0,reg); opcode( 0x1 ); modrm( 3, reg2, reg ); }
6785ae2012-06-15Per Hedbor /* reg -= reg2 */ static void sub_reg_reg( enum amd64_reg reg, enum amd64_reg reg2 ) { rex(1,reg2,0,reg); opcode( 0x29 ); modrm( 3, reg2, reg ); } /* dst += *(src+off) */ static void add_reg_mem( enum amd64_reg dst, enum amd64_reg src, long off ) { rex(1,dst,0,src); opcode( 0x3 );
99c1e92012-06-20Per Hedbor  offset_modrm_sib( off, dst, src );
6785ae2012-06-15Per Hedbor } /* dst -= *(src+off) */ static void sub_reg_mem( enum amd64_reg dst, enum amd64_reg src, long off ) { rex(1,dst,0,src); opcode( 0x2b );
99c1e92012-06-20Per Hedbor  offset_modrm_sib( off, dst, src );
6785ae2012-06-15Per Hedbor }
719c3a2012-06-12Per Hedbor  /* dst = src + imm32 (LEA, does not set flags!) */ static void add_reg_imm_reg( enum amd64_reg src, long imm32, enum amd64_reg dst ) { if( imm32 > 0x7fffffffLL || imm32 <-0x80000000LL) Pike_fatal("LEA [reg+imm] > 32bit Not supported\n"); if( src == dst ) { if( !imm32 ) return; add_reg_imm( src, imm32 ); } else { if( !imm32 ) { mov_reg_reg( src, dst ); return; } rex(1,dst,0,src); opcode( 0x8d ); /* LEA r64,m */
99c1e92012-06-20Per Hedbor  offset_modrm_sib( imm32, dst, src );
719c3a2012-06-12Per Hedbor  } } /* load code adress + imm to reg, always 32bit offset */ static void mov_rip_imm_reg( int imm, enum amd64_reg reg ) {
1c1a242012-06-13Henrik Grubbström (Grubba)  imm -= 7; /* The size of this instruction. */
719c3a2012-06-12Per Hedbor  rex( 1, reg, 0, 0 );
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x8d ); /* LEA */
719c3a2012-06-12Per Hedbor  modrm( 0, reg, 5 ); id( imm ); } static void add_imm_mem( int imm32, enum amd64_reg reg, int offset ) { int r2 = (imm32 == -1) ? 1 : 0; int large = 0; /* OPCODE */ rex( 1, 0, 0, reg ); if( imm32 == 1 || imm32 == -1 )
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0xff ); /* INCL(decl) r/m32 */
719c3a2012-06-12Per Hedbor  else if( -128 <= imm32 && 128 > imm32 )
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x83 ); /* ADD imm8,r/m32 */
719c3a2012-06-12Per Hedbor  else {
1c1a242012-06-13Henrik Grubbström (Grubba)  opcode( 0x81 ); /* ADD imm32,r/m32 */
719c3a2012-06-12Per Hedbor  large = 1; }
99c1e92012-06-20Per Hedbor  offset_modrm_sib( offset, r2, reg );
719c3a2012-06-12Per Hedbor  /* VALUE */ if( imm32 != 1 && !r2 ) { if( large ) id( imm32 ); else ib( imm32 ); } } static void jump_rel8( struct label *res, unsigned char op ) { opcode( op );
7990392006-04-27Tor Edvardsson 
15e8a02012-06-13Henrik Grubbström (Grubba)  if (res->addr >= 0) { ib(res->addr - (PIKE_PC+1)); return; }
719c3a2012-06-12Per Hedbor  if( res->n_label_uses >= MAX_LABEL_USES ) Pike_fatal( "Label used too many times\n" ); res->offset[res->n_label_uses] = PIKE_PC; res->n_label_uses++; ib(0); } static int jnz_imm_rel32( int rel ) { int res; opcode( 0xf ); opcode( 0x85 ); res = PIKE_PC; id( rel ); return res; } static int jz_imm_rel32( int rel ) { int res; opcode( 0xf ); opcode( 0x84 ); res = PIKE_PC; id( rel ); return res; } #define jne(X) jnz(X) #define je(X) jz(X) static void jmp( struct label *l ) { return jump_rel8( l, 0xeb ); } static void jo( struct label *l ) { return jump_rel8( l, 0x70 ); }
6785ae2012-06-15Per Hedbor static void jno( struct label *l ) { return jump_rel8( l, 0x71 ); } static void jz( struct label *l ) { return jump_rel8( l, 0x74 ); } static void jnz( struct label *l ) { return jump_rel8( l, 0x75 ); }
8886032012-06-25Per Hedbor static void js( struct label *l ) { return jump_rel8( l, 0x78 ); } static void jl( struct label *l ) { return jump_rel8( l, 0x7c ); } static void jge( struct label *l ) { return jump_rel8( l, 0x7d ); } static void jle( struct label *l ) { return jump_rel8( l, 0x7e ); }
2ce89e2014-08-31Per Hedbor #if 0
fe5ef32014-09-14Per Hedbor static void jc( struct label *l ) { return jump_rel8( l, 0x72 ); } static void jnc( struct label *l ) { return jump_rel8( l, 0x73 ); }
ca2a072014-09-02Martin Nilsson static void jg( struct label *l ) { return jump_rel8( l, 0x7f ); }
2f46bb2014-03-15Martin Nilsson #endif
719c3a2012-06-12Per Hedbor 
378ad02012-06-25Per Hedbor #define LABELS() struct label label_A, label_B, label_C, label_D, label_E;label_A.addr = -1;label_A.n_label_uses = 0;label_B.addr = -1;label_B.n_label_uses = 0;label_C.addr = -1;label_C.n_label_uses = 0;label_D.addr = -1;label_D.n_label_uses = 0;label_E.addr=-1;label_E.n_label_uses=0;
719c3a2012-06-12Per Hedbor #define LABEL_A label(&label_A) #define LABEL_B label(&label_B) #define LABEL_C label(&label_C)
6785ae2012-06-15Per Hedbor #define LABEL_D label(&label_D)
378ad02012-06-25Per Hedbor #define LABEL_E label(&label_E)
d1fa802011-05-09Henrik Grubbström (Grubba) 
638a2c2012-07-16Per Hedbor static void amd64_ins_branch_check_threads_etc(int code_only); static int func_start = 0; /* Called at the start of a function. */ void amd64_start_function(int store_lines ) { store_pc = store_lines; branch_check_threads_update_etc = 0; /* * store_lines is true for any non-constant evaluation function * In reality we should only do this if we are going to be using * check_threads_etc (ie, if we have a loop). * * It does not really waste all that much space in the code, though. * * We could really do this one time per program instead of once per * function. * * But it is very hard to know when a new program starts, and * PIKE_PC is not reliable as a marker since constant evaluations * are also done using this code. So for now, waste ~20 bytes per * function in the program code. */ if( store_lines ) amd64_ins_branch_check_threads_etc(1); func_start = PIKE_PC; } /* Called when the current function is done */
74dfe82012-12-30Jonas Walldén void amd64_end_function(int UNUSED(no_pc))
638a2c2012-07-16Per Hedbor { branch_check_threads_update_etc = 0; }
d1fa802011-05-09Henrik Grubbström (Grubba) /* Machine code entry prologue. * * On entry: * RDI: Pike_interpreter (ARG1_REG) * * During interpreting:
639a932011-05-15Henrik Grubbström (Grubba)  * R15: Pike_interpreter
d1fa802011-05-09Henrik Grubbström (Grubba)  */ void amd64_ins_entry(void) {
d46b1d2014-08-11Per Hedbor  size_t orig_ppc = PIKE_PC;
d1fa802011-05-09Henrik Grubbström (Grubba)  /* Push all registers that the ABI requires to be preserved. */
2fd8052013-10-08Per Hedbor  push(P_REG_RBP);
5871372014-08-11Per Hedbor  mov_reg_reg(P_REG_RSP, P_REG_RBP);
2fd8052013-10-08Per Hedbor  push(P_REG_R15); push(P_REG_R14); push(P_REG_R13); push(P_REG_R12); push(P_REG_RBX); sub_reg_imm(P_REG_RSP, 8); /* Align on 16 bytes. */
719c3a2012-06-12Per Hedbor  mov_reg_reg(ARG1_REG, Pike_interpreter_reg);
639a932011-05-15Henrik Grubbström (Grubba)  amd64_flush_code_generator_state();
d46b1d2014-08-11Per Hedbor  if( PIKE_PC - orig_ppc != ENTRY_PROLOGUE_SIZE ) /* sanity check */ Pike_fatal("ENTRY_PROLOGUE_SIZE incorrectly set, should be 0x%x\n", PIKE_PC-orig_ppc );
d1fa802011-05-09Henrik Grubbström (Grubba) }
54a26b2011-05-11Henrik Grubbström (Grubba) void amd64_flush_code_generator_state(void) {
83b92e2014-08-16Per Hedbor  amd64_prev_stored_pc = -1;
638a2c2012-07-16Per Hedbor  ret_for_func = 0;
83b92e2014-08-16Per Hedbor  fp_reg = -1; sp_reg = -1;
6810702012-06-21Henrik Grubbström (Grubba)  mark_sp_reg = -1;
54a26b2011-05-11Henrik Grubbström (Grubba)  dirty_regs = 0; }
9030f62011-05-26Henrik Grubbström (Grubba) static void flush_dirty_regs(void) { /* NB: PIKE_FP_REG is currently never dirty. */ if (dirty_regs & (1 << PIKE_SP_REG)) {
719c3a2012-06-12Per Hedbor  mov_reg_mem(PIKE_SP_REG, Pike_interpreter_reg,
d97eb72011-07-10Henrik Grubbström (Grubba)  OFFSETOF(Pike_interpreter_struct, stack_pointer));
9030f62011-05-26Henrik Grubbström (Grubba)  dirty_regs &= ~(1 << PIKE_SP_REG); } if (dirty_regs & (1 << PIKE_MARK_SP_REG)) {
719c3a2012-06-12Per Hedbor  mov_reg_mem(PIKE_MARK_SP_REG, Pike_interpreter_reg,
d97eb72011-07-10Henrik Grubbström (Grubba)  OFFSETOF(Pike_interpreter_struct, mark_stack_pointer));
9030f62011-05-26Henrik Grubbström (Grubba)  dirty_regs &= ~(1 << PIKE_MARK_SP_REG); } }
7990392006-04-27Tor Edvardsson 
54a26b2011-05-11Henrik Grubbström (Grubba) /* NB: We load Pike_fp et al into registers that * are persistent across function calls. */
28b8542014-08-11Per Hedbor static void amd64_load_fp_reg(void)
54a26b2011-05-11Henrik Grubbström (Grubba) {
6810702012-06-21Henrik Grubbström (Grubba)  if (fp_reg < 0) {
719c3a2012-06-12Per Hedbor  mov_mem_reg(Pike_interpreter_reg,
6785ae2012-06-15Per Hedbor  OFFSETOF(Pike_interpreter_struct, frame_pointer), PIKE_FP_REG);
54a26b2011-05-11Henrik Grubbström (Grubba)  fp_reg = PIKE_FP_REG; } }
28b8542014-08-11Per Hedbor static void amd64_load_sp_reg(void)
54a26b2011-05-11Henrik Grubbström (Grubba) {
6810702012-06-21Henrik Grubbström (Grubba)  if (sp_reg < 0) {
719c3a2012-06-12Per Hedbor  mov_mem_reg(Pike_interpreter_reg,
6785ae2012-06-15Per Hedbor  OFFSETOF(Pike_interpreter_struct, stack_pointer), PIKE_SP_REG);
54a26b2011-05-11Henrik Grubbström (Grubba)  sp_reg = PIKE_SP_REG; } }
28b8542014-08-11Per Hedbor static void amd64_load_mark_sp_reg(void)
54a26b2011-05-11Henrik Grubbström (Grubba) {
6810702012-06-21Henrik Grubbström (Grubba)  if (mark_sp_reg < 0) {
719c3a2012-06-12Per Hedbor  mov_mem_reg(Pike_interpreter_reg,
6785ae2012-06-15Per Hedbor  OFFSETOF(Pike_interpreter_struct, mark_stack_pointer), PIKE_MARK_SP_REG);
54a26b2011-05-11Henrik Grubbström (Grubba)  mark_sp_reg = PIKE_MARK_SP_REG; } }
28b8542014-08-11Per Hedbor  static void mov_sval_type(enum amd64_reg src, enum amd64_reg dst ) {
85c1c12014-08-18Per Hedbor  mov_mem8_reg( src, SVAL(0).type, dst);
28b8542014-08-11Per Hedbor /* and_reg32_imm( dst, 0x1f );*/ }
83b92e2014-08-16Per Hedbor #if 0
28b8542014-08-11Per Hedbor static void svalue_is_referenced(enum amd64_reg in, struct label *not ) { /* bit 4 set, and no higher bit set (all with 8bit values). */ /* aka: type & 248 == 8. Incidentally, 248 is equal to -8 in signed 8bit*/ and_reg_imm(in,-8); /* jz(not) -- saves one comparison in most cases, adds code size, more jumps. */ cmp_reg_imm(in,MIN_REF_TYPE); jne( not ); } static void mem_svalue_is_referenced(enum amd64_reg in, struct label *not ) { /* bit 4 set, and no higher bit set. */ if( in == P_REG_RAX ) Pike_error("RAX not supported here.\n"); mov_sval_type(in,P_REG_RAX); svalue_is_referenced(P_REG_RAX, not ); }
83b92e2014-08-16Per Hedbor #endif
28b8542014-08-11Per Hedbor 
d1fa802011-05-09Henrik Grubbström (Grubba) static void update_arg1(INT32 value) {
719c3a2012-06-12Per Hedbor  mov_imm_reg(value, ARG1_REG);
d1fa802011-05-09Henrik Grubbström (Grubba)  /* FIXME: Alloc stack space on NT. */ }
7990392006-04-27Tor Edvardsson 
d1fa802011-05-09Henrik Grubbström (Grubba) static void update_arg2(INT32 value) {
719c3a2012-06-12Per Hedbor  mov_imm_reg(value, ARG2_REG);
d1fa802011-05-09Henrik Grubbström (Grubba)  /* FIXME: Alloc stack space on NT. */
7990392006-04-27Tor Edvardsson }
719c3a2012-06-12Per Hedbor static void amd64_add_sp( int num ) { amd64_load_sp_reg();
85c1c12014-08-18Per Hedbor  add_reg_imm( sp_reg, num * sizeof(struct svalue));
719c3a2012-06-12Per Hedbor  dirty_regs |= 1 << PIKE_SP_REG;
1af0932012-06-22Per Hedbor  flush_dirty_regs(); /* FIXME: Need to change LABEL handling to remove this */
719c3a2012-06-12Per Hedbor } static void amd64_add_mark_sp( int num ) { amd64_load_mark_sp_reg();
85c1c12014-08-18Per Hedbor  add_reg_imm( mark_sp_reg, num * sizeof(struct svalue*));
719c3a2012-06-12Per Hedbor  dirty_regs |= 1 << PIKE_MARK_SP_REG;
1af0932012-06-22Per Hedbor  flush_dirty_regs(); /* FIXME: Need to change LABEL handling to remove this */
719c3a2012-06-12Per Hedbor }
2fd8052013-10-08Per Hedbor /* Note: Uses RAX and RCX internally. reg MUST not be P_REG_RAX. */
378ad02012-06-25Per Hedbor static void amd64_push_svaluep_to(int reg, int spoff)
9030f62011-05-26Henrik Grubbström (Grubba) {
719c3a2012-06-12Per Hedbor  LABELS();
2fd8052013-10-08Per Hedbor  if( reg == P_REG_RAX )
378ad02012-06-25Per Hedbor  Pike_fatal("Using RAX in push_svaluep not supported\n" );
9030f62011-05-26Henrik Grubbström (Grubba)  amd64_load_sp_reg();
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_mem_reg(reg, OFFSETOF(svalue, tu.t.type), P_REG_RAX);
2fd8052013-10-08Per Hedbor  mov_mem_reg(reg, OFFSETOF(svalue, u.refs), P_REG_RCX);
85c1c12014-08-18Per Hedbor  mov_reg_mem(P_REG_RAX, sp_reg, SVAL(spoff).type); mov_reg_mem(P_REG_RCX, sp_reg, SVAL(spoff).value);
e305942014-08-08Per Hedbor  and_reg32_imm(P_REG_RAX, MIN_REF_TYPE); jz(&label_A);
2fd8052013-10-08Per Hedbor  add_imm_mem( 1, P_REG_RCX, OFFSETOF(pike_string, refs));
719c3a2012-06-12Per Hedbor  LABEL_A;
378ad02012-06-25Per Hedbor } static void amd64_push_svaluep(int reg) { amd64_push_svaluep_to( reg, 0 );
719c3a2012-06-12Per Hedbor  amd64_add_sp( 1 );
9030f62011-05-26Henrik Grubbström (Grubba) }
eb42a12011-05-11Henrik Grubbström (Grubba) static void amd64_push_int(INT64 value, int subtype) { amd64_load_sp_reg();
85c1c12014-08-18Per Hedbor  mov_imm_mem((subtype<<16) + PIKE_T_INT, sp_reg, SVAL(0).type); mov_imm_mem(value, sp_reg, SVAL(0).value);
719c3a2012-06-12Per Hedbor  amd64_add_sp( 1 ); } static void amd64_push_int_reg(enum amd64_reg reg ) { amd64_load_sp_reg();
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( PIKE_T_INT, sp_reg, OFFSETOF(svalue, tu.t.type));
719c3a2012-06-12Per Hedbor  mov_reg_mem( reg, sp_reg, OFFSETOF(svalue, u.integer)); amd64_add_sp( 1 );
eb42a12011-05-11Henrik Grubbström (Grubba) }
d1fa802011-05-09Henrik Grubbström (Grubba) 
4ec32c2014-08-07Per Hedbor static void amd64_get_storage( enum amd64_reg reg, ptrdiff_t offset ) { amd64_load_fp_reg();
5871372014-08-11Per Hedbor #if 0 /* Note: We really should keep pike_frame->current_storage up to date instead.. */ mov_mem_reg( fp_reg, OFFSETOF(pike_frame,current_storage), P_REG_RAX );
4ec32c2014-08-07Per Hedbor  add_reg_imm( reg, offset ); #else /* fp->current_object->storage */ mov_mem_reg(fp_reg, OFFSETOF(pike_frame, current_object), P_REG_RAX); mov_mem_reg(P_REG_RAX, OFFSETOF(object,storage), reg ); mov_mem_reg(fp_reg, OFFSETOF(pike_frame, context), P_REG_RAX); /* + fp->context->storage_offset */ add_reg_mem( reg, P_REG_RAX, OFFSETOF(inherit,storage_offset) ); /* + offset */ add_reg_imm( reg, offset); #endif }
ab7cf52011-05-24Henrik Grubbström (Grubba) static void amd64_mark(int offset) { amd64_load_sp_reg(); amd64_load_mark_sp_reg(); if (offset) {
2fd8052013-10-08Per Hedbor  add_reg_imm_reg(sp_reg, -offset * sizeof(struct svalue), P_REG_RAX); mov_reg_mem(P_REG_RAX, mark_sp_reg, 0);
ab7cf52011-05-24Henrik Grubbström (Grubba)  } else {
719c3a2012-06-12Per Hedbor  mov_reg_mem(sp_reg, mark_sp_reg, 0);
ab7cf52011-05-24Henrik Grubbström (Grubba)  }
719c3a2012-06-12Per Hedbor  amd64_add_mark_sp( 1 ); } static void amd64_call_c_function(void *addr) { flush_dirty_regs(); call_imm(addr); }
85c1c12014-08-18Per Hedbor static void amd64_free_svalue_off(enum amd64_reg src, int off, int guaranteed_ref )
719c3a2012-06-12Per Hedbor { LABELS();
2fd8052013-10-08Per Hedbor  if( src == P_REG_RAX )
719c3a2012-06-12Per Hedbor  Pike_fatal("Clobbering RAX for free-svalue\n"); /* load type -> RAX */
85c1c12014-08-18Per Hedbor  mov_mem8_reg( src, off, P_REG_RAX );
e305942014-08-08Per Hedbor  and_reg_imm(P_REG_RAX, MIN_REF_TYPE); jz( &label_A );
719c3a2012-06-12Per Hedbor  /* Load pointer to refs -> RAX */
85c1c12014-08-18Per Hedbor  mov_mem_reg( src,off+OFFSETOF(svalue, u.refs), P_REG_RAX);
719c3a2012-06-12Per Hedbor  /* if( !--*RAX ) */
2fd8052013-10-08Per Hedbor  add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), -1);
719c3a2012-06-12Per Hedbor  if( !guaranteed_ref ) { /* We need to see if refs got to 0. */ jnz( &label_A );
99c1e92012-06-20Per Hedbor  /* if so, call really_free_svalue */
85c1c12014-08-18Per Hedbor  add_reg_imm_reg( src, off, ARG1_REG );
99c1e92012-06-20Per Hedbor  amd64_call_c_function(really_free_svalue); } LABEL_A; }
85c1c12014-08-18Per Hedbor static void amd64_free_svalue(enum amd64_reg src, int guaranteed_ref ) { amd64_free_svalue_off(src,0,guaranteed_ref); }
ba7d5e2013-06-19Henrik Grubbström (Grubba) /* Type already in register. Note: Clobbers the type register. */
99c1e92012-06-20Per Hedbor static void amd64_free_svalue_type(enum amd64_reg src, enum amd64_reg type, int guaranteed_ref ) { LABELS();
833cae2013-06-12Henrik Grubbström (Grubba)  /* if type < MIN_REF_TYPE+1 */
2fd8052013-10-08Per Hedbor  if( src == P_REG_RAX )
99c1e92012-06-20Per Hedbor  Pike_fatal("Clobbering RAX for free-svalue\n");
1af0932012-06-22Per Hedbor 
e305942014-08-08Per Hedbor  and_reg32_imm(type, MIN_REF_TYPE); jz( &label_A );
99c1e92012-06-20Per Hedbor  /* Load pointer to refs -> RAX */
2fd8052013-10-08Per Hedbor  mov_mem_reg( src, OFFSETOF(svalue, u.refs), P_REG_RAX);
99c1e92012-06-20Per Hedbor  /* if( !--*RAX ) */
2fd8052013-10-08Per Hedbor  add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), -1);
99c1e92012-06-20Per Hedbor  if( !guaranteed_ref ) { /* We need to see if refs got to 0. */ jnz( &label_A ); /* if so, call really_free_svalue */
719c3a2012-06-12Per Hedbor  if( src != ARG1_REG ) mov_reg_reg( src, ARG1_REG ); amd64_call_c_function(really_free_svalue);
ab7cf52011-05-24Henrik Grubbström (Grubba)  }
719c3a2012-06-12Per Hedbor  LABEL_A; }
b505a72012-06-13Per Hedbor void amd64_ref_svalue( enum amd64_reg src, int already_have_type )
719c3a2012-06-12Per Hedbor { LABELS();
2fd8052013-10-08Per Hedbor  if( src == P_REG_RAX ) Pike_fatal("Clobbering src in ref_svalue\n");
b505a72012-06-13Per Hedbor  if( !already_have_type )
2fd8052013-10-08Per Hedbor  mov_sval_type( src, P_REG_RAX );
e305942014-08-08Per Hedbor  and_reg32_imm( P_REG_RAX, MIN_REF_TYPE ); jz( &label_A );
719c3a2012-06-12Per Hedbor  /* Load pointer to refs -> RAX */
2fd8052013-10-08Per Hedbor  mov_mem_reg( src, OFFSETOF(svalue, u.refs), P_REG_RAX);
719c3a2012-06-12Per Hedbor  /* *RAX++ */
2fd8052013-10-08Per Hedbor  add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), 1);
719c3a2012-06-12Per Hedbor  LABEL_A; }
4ec32c2014-08-07Per Hedbor void amd64_assign_svalue_no_free( enum amd64_reg dst, enum amd64_reg src, ptrdiff_t src_offset ) { if( dst == P_REG_RAX ||src == P_REG_RAX ) Pike_fatal( "Clobbering src/dst in amd64_assign_svalue_no_free <%d,%d>\n", dst, src); /* Copy src -> dst */ mov_mem_reg(src, src_offset, P_REG_RAX); mov_reg_mem(P_REG_RAX, dst, 0 ); mov_mem_reg(src, src_offset+sizeof(long), P_REG_RAX); mov_reg_mem(P_REG_RAX, dst, sizeof(long) ); }
719c3a2012-06-12Per Hedbor void amd64_assign_local( int b ) { amd64_load_fp_reg(); amd64_load_sp_reg();
4ec32c2014-08-07Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), P_REG_RBX); add_reg_imm( P_REG_RBX, b*sizeof(struct svalue) ); amd64_free_svalue(P_REG_RBX, 0); amd64_assign_svalue_no_free( P_REG_RBX, sp_reg, -1*sizeof(struct svalue));
ab7cf52011-05-24Henrik Grubbström (Grubba) } static void amd64_pop_mark(void) {
719c3a2012-06-12Per Hedbor  amd64_add_mark_sp( -1 );
ab7cf52011-05-24Henrik Grubbström (Grubba) } static void amd64_push_string(int strno, int subtype) { amd64_load_fp_reg(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, context), P_REG_RAX); mov_mem_reg(P_REG_RAX, OFFSETOF(inherit, prog), P_REG_RAX); mov_mem_reg(P_REG_RAX, OFFSETOF(program, strings), P_REG_RAX); mov_mem_reg(P_REG_RAX, strno * sizeof(struct pike_string *), P_REG_RAX);
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem((subtype<<16) | PIKE_T_STRING, sp_reg, OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  mov_reg_mem(P_REG_RAX, sp_reg,(INT32)OFFSETOF(svalue, u.string)); add_imm_mem( 1, P_REG_RAX, OFFSETOF(pike_string, refs));
719c3a2012-06-12Per Hedbor  amd64_add_sp(1);
ab7cf52011-05-24Henrik Grubbström (Grubba) }
9030f62011-05-26Henrik Grubbström (Grubba) static void amd64_push_local_function(int fun) { amd64_load_fp_reg(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, context), P_REG_RAX);
7e8ef52014-08-15Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, current_object),P_REG_RCX);
2fd8052013-10-08Per Hedbor  mov_mem32_reg(P_REG_RAX, OFFSETOF(inherit, identifier_level),
7e8ef52014-08-15Per Hedbor  P_REG_RAX);
2fd8052013-10-08Per Hedbor  mov_reg_mem(P_REG_RCX, sp_reg, OFFSETOF(svalue, u.object)); add_reg_imm(P_REG_RAX, fun);
7e8ef52014-08-15Per Hedbor  add_imm_mem(1, P_REG_RCX,(INT32)OFFSETOF(object, refs));
2fd8052013-10-08Per Hedbor  shl_reg_imm(P_REG_RAX, 16); add_reg_imm(P_REG_RAX, PIKE_T_FUNCTION);
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_reg_mem(P_REG_RAX, sp_reg, OFFSETOF(svalue, tu.t.type));
719c3a2012-06-12Per Hedbor  amd64_add_sp(1);
d1fa802011-05-09Henrik Grubbström (Grubba) } void amd64_update_pc(void) { INT32 tmp = PIKE_PC, disp;
2fd8052013-10-08Per Hedbor  const enum amd64_reg tmp_reg = P_REG_RAX;
719c3a2012-06-12Per Hedbor  if(amd64_prev_stored_pc == - 1) {
54a26b2011-05-11Henrik Grubbström (Grubba)  amd64_load_fp_reg();
719c3a2012-06-12Per Hedbor  mov_rip_imm_reg(tmp - PIKE_PC, tmp_reg); mov_reg_mem(tmp_reg, fp_reg, OFFSETOF(pike_frame, pc));
7990392006-04-27Tor Edvardsson #ifdef PIKE_DEBUG if (a_flag >= 60)
719c3a2012-06-12Per Hedbor  fprintf (stderr, "pc %d update pc via lea\n", tmp);
7990392006-04-27Tor Edvardsson #endif
bf49e12012-06-15Henrik Grubbström (Grubba)  amd64_prev_stored_pc = tmp;
7990392006-04-27Tor Edvardsson  }
719c3a2012-06-12Per Hedbor  else if ((disp = tmp - amd64_prev_stored_pc)) {
7990392006-04-27Tor Edvardsson #ifdef PIKE_DEBUG if (a_flag >= 60) fprintf (stderr, "pc %d update pc relative: %d\n", tmp, disp); #endif
54a26b2011-05-11Henrik Grubbström (Grubba)  amd64_load_fp_reg();
84c5f62012-06-28Per Hedbor  mov_rip_imm_reg(tmp - PIKE_PC, tmp_reg); mov_reg_mem(tmp_reg, fp_reg, OFFSETOF(pike_frame, pc)); /* amd64_load_fp_reg(); */ /* add_imm_mem(disp, fp_reg, OFFSETOF (pike_frame, pc)); */
bf49e12012-06-15Henrik Grubbström (Grubba)  amd64_prev_stored_pc += disp;
7990392006-04-27Tor Edvardsson  }
719c3a2012-06-12Per Hedbor  else {
7990392006-04-27Tor Edvardsson #ifdef PIKE_DEBUG if (a_flag >= 60) fprintf (stderr, "pc %d update pc - already up-to-date\n", tmp); #endif
719c3a2012-06-12Per Hedbor  }
7990392006-04-27Tor Edvardsson }
d1fa802011-05-09Henrik Grubbström (Grubba) static void maybe_update_pc(void) { static int last_prog_id=-1; static size_t last_num_linenumbers=-1;
638a2c2012-07-16Per Hedbor  if( !store_pc ) return;
719c3a2012-06-12Per Hedbor 
d1fa802011-05-09Henrik Grubbström (Grubba)  if(
7990392006-04-27Tor Edvardsson #ifdef PIKE_DEBUG /* Update the pc more often for the sake of the opcode level trace. */ d_flag || #endif (amd64_prev_stored_pc == -1) || last_prog_id != Pike_compiler->new_program->id || last_num_linenumbers != Pike_compiler->new_program->num_linenumbers ) { last_prog_id=Pike_compiler->new_program->id; last_num_linenumbers = Pike_compiler->new_program->num_linenumbers; UPDATE_PC(); } }
f3bbcc2012-07-13Henrik Grubbström (Grubba) static void maybe_load_fp(void) { static int last_prog_id=-1; static size_t last_num_linenumbers=-1;
638a2c2012-07-16Per Hedbor  if( !store_pc ) return;
f3bbcc2012-07-13Henrik Grubbström (Grubba)  if( #ifdef PIKE_DEBUG /* Update the pc more often for the sake of the opcode level trace. */ d_flag || #endif (amd64_prev_stored_pc == -1) || last_prog_id != Pike_compiler->new_program->id || last_num_linenumbers != Pike_compiler->new_program->num_linenumbers ) { amd64_load_fp_reg(); } }
719c3a2012-06-12Per Hedbor static void sync_registers(int flags) { maybe_update_pc(); flush_dirty_regs();
2fd8052013-10-08Per Hedbor  if (flags & I_UPDATE_SP) sp_reg = P_REG_INVALID; if (flags & I_UPDATE_M_SP) mark_sp_reg = P_REG_INVALID; if (flags & I_UPDATE_FP) fp_reg = P_REG_INVALID;
719c3a2012-06-12Per Hedbor } static void amd64_call_c_opcode(void *addr, int flags) { sync_registers(flags); call_imm( addr ); }
54a26b2011-05-11Henrik Grubbström (Grubba) #ifdef PIKE_DEBUG static void ins_debug_instr_prologue (PIKE_INSTR_T instr, INT32 arg1, INT32 arg2) { int flags = instrs[instr].flags;
f3bbcc2012-07-13Henrik Grubbström (Grubba)  /* Note: maybe_update_pc() is called by amd64_call_c_opcode() above, * which has the side-effect of loading fp_reg. Some of the * opcodes use amd64_call_c_opcode() in conditional segments. * This is to make sure that fp_reg is always loaded on exit * from such opcodes. */ maybe_load_fp();
622ebc2012-06-25Per Hedbor /* For now: It is very hard to read the disassembled source when this is inserted */ if( !d_flag ) return;
54a26b2011-05-11Henrik Grubbström (Grubba)  maybe_update_pc(); if (flags & I_HASARG2)
719c3a2012-06-12Per Hedbor  mov_imm_reg(arg2, ARG3_REG);
54a26b2011-05-11Henrik Grubbström (Grubba)  if (flags & I_HASARG)
719c3a2012-06-12Per Hedbor  mov_imm_reg(arg1, ARG2_REG); mov_imm_reg(instr, ARG1_REG);
54a26b2011-05-11Henrik Grubbström (Grubba)  if (flags & I_HASARG2) amd64_call_c_function (simple_debug_instr_prologue_2); else if (flags & I_HASARG) amd64_call_c_function (simple_debug_instr_prologue_1); else amd64_call_c_function (simple_debug_instr_prologue_0); } #else /* !PIKE_DEBUG */
d7805b2012-07-14Henrik Grubbström (Grubba) #define ins_debug_instr_prologue(instr, arg1, arg2) maybe_load_fp()
54a26b2011-05-11Henrik Grubbström (Grubba) #endif
719c3a2012-06-12Per Hedbor static void amd64_push_this_object( ) { amd64_load_fp_reg(); amd64_load_sp_reg();
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( PIKE_T_OBJECT, sp_reg, OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, current_object), P_REG_RAX ); mov_reg_mem( P_REG_RAX, sp_reg, OFFSETOF(svalue,u.object) ); add_mem32_imm( P_REG_RAX, (INT32)OFFSETOF(object, refs), 1);
719c3a2012-06-12Per Hedbor  amd64_add_sp( 1 ); }
638a2c2012-07-16Per Hedbor static void amd64_align() { while( PIKE_PC & 3 ) ib( 0x90 ); } static void amd64_ins_branch_check_threads_etc(int code_only)
719c3a2012-06-12Per Hedbor { LABELS();
638a2c2012-07-16Per Hedbor  if( !branch_check_threads_update_etc )
6785ae2012-06-15Per Hedbor  {
11619b2012-07-18Henrik Grubbström (Grubba)  /* Create update + call to branch_check_threads_etc */
638a2c2012-07-16Per Hedbor  if( !code_only ) jmp( &label_A );
6785ae2012-06-15Per Hedbor  branch_check_threads_update_etc = PIKE_PC;
5e064b2012-06-15Henrik Grubbström (Grubba)  if( (unsigned long long)&fast_check_threads_counter < 0x7fffffffULL )
6785ae2012-06-15Per Hedbor  { /* Short pointer. */
2fd8052013-10-08Per Hedbor  clear_reg( P_REG_RAX ); add_mem32_imm( P_REG_RAX,
5e064b2012-06-15Henrik Grubbström (Grubba)  (int)(ptrdiff_t)&fast_check_threads_counter,
6785ae2012-06-15Per Hedbor  0x80 ); } else {
2fd8052013-10-08Per Hedbor  mov_imm_reg( (long)&fast_check_threads_counter, P_REG_RAX); add_mem_imm( P_REG_RAX, 0, 0x80 );
6785ae2012-06-15Per Hedbor  }
2fd8052013-10-08Per Hedbor  mov_imm_reg( (ptrdiff_t)branch_check_threads_etc, P_REG_RAX ); jmp_reg(P_REG_RAX); /* ret in BCTE will return to desired point. */
638a2c2012-07-16Per Hedbor  amd64_align(); } if( !code_only ) { LABEL_A; /* Use C-stack for counter. We have padding added in entry */
10d27a2014-08-08Per Hedbor #ifndef USE_VALGRIND
2fd8052013-10-08Per Hedbor  add_mem8_imm( P_REG_RSP, 0, 1 );
638a2c2012-07-16Per Hedbor  jno( &label_B );
10d27a2014-08-08Per Hedbor #endif
11619b2012-07-18Henrik Grubbström (Grubba)  call_rel_imm32( branch_check_threads_update_etc );
10d27a2014-08-08Per Hedbor #ifndef USE_VALGRIND
70ff3d2013-05-28Martin Nilsson  LABEL_B;
10d27a2014-08-08Per Hedbor #endif
6785ae2012-06-15Per Hedbor  }
719c3a2012-06-12Per Hedbor }
54a26b2011-05-11Henrik Grubbström (Grubba) void amd64_init_interpreter_state(void) {
85c1c12014-08-18Per Hedbor #ifdef PIKE_DEBUG if( PIKE_T_INT != 0 ) Pike_fatal("assumption failed: pike_t_int == 0\n"); if( sizeof(struct svalue) != 16 ) Pike_fatal("assumption failed: sizeof svalue != 16\n"); if( OFFSETOF(svalue,tu.t.type) != 0 ) Pike_fatal("assumption failed: offsetof(svalue.tu.t.type) != 0\n"); if( OFFSETOF(svalue,u.integer) != 8 ) Pike_fatal("assumption failed: offsetof(svalue.u.integer) != 8\n"); #endif
54a26b2011-05-11Henrik Grubbström (Grubba)  instrs[F_CATCH - F_OFFSET].address = inter_return_opcode_F_CATCH; }
6785ae2012-06-15Per Hedbor static void amd64_return_from_function() { if( ret_for_func ) {
11619b2012-07-18Henrik Grubbström (Grubba)  jmp_rel_imm( ret_for_func );
6785ae2012-06-15Per Hedbor  } else { ret_for_func = PIKE_PC;
2fd8052013-10-08Per Hedbor  pop(P_REG_RBX); /* Stack padding. */ pop(P_REG_RBX); pop(P_REG_R12); pop(P_REG_R13); pop(P_REG_R14); pop(P_REG_R15); pop(P_REG_RBP);
6785ae2012-06-15Per Hedbor  ret(); } }
2e82fb2014-08-31Per Hedbor static void if_not_two_int(struct label *to, int load) { amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX ); mov_mem8_reg(sp_reg, SVAL(-2).type, P_REG_RBX ); add_reg_reg(P_REG_RAX,P_REG_RBX); /* test_reg32(P_REG_RAX); int == 0 */ jnz(to); if( load ) { mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RAX ); mov_mem_reg(sp_reg, SVAL(-2).value, P_REG_RBX ); } }
d1fa802011-05-09Henrik Grubbström (Grubba) void ins_f_byte(unsigned int b) {
54a26b2011-05-11Henrik Grubbström (Grubba)  int flags;
d1fa802011-05-09Henrik Grubbström (Grubba)  void *addr;
48f5972011-05-23Henrik Grubbström (Grubba)  INT32 rel_addr = 0;
719c3a2012-06-12Per Hedbor  LABELS();
d1fa802011-05-09Henrik Grubbström (Grubba)  b-=F_OFFSET;
7990392006-04-27Tor Edvardsson #ifdef PIKE_DEBUG
d1fa802011-05-09Henrik Grubbström (Grubba)  if(b>255) Pike_error("Instruction too big %d\n",b);
7990392006-04-27Tor Edvardsson #endif
d1fa802011-05-09Henrik Grubbström (Grubba)  maybe_update_pc();
54a26b2011-05-11Henrik Grubbström (Grubba)  flags = instrs[b].flags;
eb42a12011-05-11Henrik Grubbström (Grubba)  addr=instrs[b].address;
1af0932012-06-22Per Hedbor  switch(b + F_OFFSET) {
b505a72012-06-13Per Hedbor  case F_DUP:
6785ae2012-06-15Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  add_reg_imm_reg(sp_reg, -sizeof(struct svalue), P_REG_R10 ); amd64_push_svaluep( P_REG_R10 );
b505a72012-06-13Per Hedbor  return;
378ad02012-06-25Per Hedbor 
4ceb792012-06-22Per Hedbor #if 0
378ad02012-06-25Per Hedbor  case F_ESCAPE_CATCH:
1af0932012-06-22Per Hedbor  case F_EXIT_CATCH:
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
1af0932012-06-22Per Hedbor  ins_f_byte( F_ESCAPE_CATCH ); amd64_load_sp_reg(); amd64_push_int( 0, 1 ); return;
4ceb792012-06-22Per Hedbor #endif
378ad02012-06-25Per Hedbor  /* -3 -2 -1 0 * lval[0], lval[1], <RANDOM>, **SP** * -> * -4 -3 -2 -1 0 * lval[0], lval[1], *lval, <RANDOM>, *SP** * * This will also free *lval iff it has type * array,multiset,mapping or string. */ case F_LTOSVAL2_AND_FREE: {
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
378ad02012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -3*sizeof(struct svalue), P_REG_RBX ); cmp_reg_imm( P_REG_RBX, T_SVALUE_PTR );
378ad02012-06-25Per Hedbor  jne( &label_A ); /* inline version for SVALUE_PTR. */ /* First, make room for the result, move top and inc sp */
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -sizeof(struct svalue), P_REG_RAX ); mov_mem_reg( sp_reg, -sizeof(struct svalue)+8, P_REG_RCX ); mov_reg_mem( P_REG_RAX, sp_reg, 0); mov_reg_mem( P_REG_RCX, sp_reg, 8);
378ad02012-06-25Per Hedbor  amd64_add_sp( 1 ); mov_mem_reg( sp_reg, -4*sizeof(struct svalue)+OFFSETOF(svalue,u.lval),
2fd8052013-10-08Per Hedbor  P_REG_RBX ); amd64_push_svaluep_to(P_REG_RBX, -2);
378ad02012-06-25Per Hedbor  /* Result is now in sp[-2], lval in -4. */ /* push svaluep leaves type in RAX */
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RAX, P_REG_RCX ); /* mov_mem8_reg( sp_reg, -2*sizeof(struct svalue), P_REG_RCX ); */ mov_imm_reg( 1, P_REG_RAX ); shl_reg32_reg( P_REG_RAX, P_REG_RCX ); and_reg_imm( P_REG_RAX, (BIT_ARRAY|BIT_MULTISET|BIT_MAPPING|BIT_STRING));
378ad02012-06-25Per Hedbor  jz( &label_B ); /* Do nothing.. */ /* Free the old value. */
2fd8052013-10-08Per Hedbor  amd64_free_svalue( P_REG_RBX, 0 );
378ad02012-06-25Per Hedbor  /* assign 0 */
2fd8052013-10-08Per Hedbor  mov_imm_mem( PIKE_T_INT, P_REG_RBX, 0 ); mov_imm_mem( 0, P_REG_RBX, 8 );
378ad02012-06-25Per Hedbor  jmp( &label_B ); /* All done */ LABEL_A; /* So, not a svalueptr. Use C-version */ amd64_call_c_opcode( addr, flags ); amd64_load_sp_reg(); LABEL_B; } return; case F_LTOSVAL: {
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
378ad02012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -sizeof(struct svalue)*2, P_REG_RAX );
378ad02012-06-25Per Hedbor  /* lval type in RAX. */ /* if( rax == T_SVALUE_PTR ) push( *l->u.lval ) possibly: if( rax == T_OBJECT ) object_index_no_free( to, lval->u.object, lval->subtype, &lval[1]) if( rax == T_ARRAY && lval[1].type == PIKE_T_INT ) push( l->u.array->item[&lval[1].u.integer] ) */
2fd8052013-10-08Per Hedbor  cmp_reg_imm( P_REG_RAX, T_SVALUE_PTR );
378ad02012-06-25Per Hedbor  je( &label_A ); /* So, not a svalueptr */ mov_reg_reg( sp_reg, ARG1_REG ); add_reg_imm_reg( sp_reg, -2*sizeof(struct svalue), ARG2_REG ); amd64_call_c_function(lvalue_to_svalue_no_free);
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
378ad02012-06-25Per Hedbor  amd64_add_sp(1); jmp(&label_B); LABEL_A; mov_mem_reg( sp_reg, -sizeof(struct svalue)*2+OFFSETOF(svalue,u.lval),
2fd8052013-10-08Per Hedbor  P_REG_RCX ); amd64_push_svaluep(P_REG_RCX);
378ad02012-06-25Per Hedbor  LABEL_B; } return; case F_ASSIGN: {
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
378ad02012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -3*sizeof(struct svalue), P_REG_RAX ); cmp_reg_imm( P_REG_RAX, T_SVALUE_PTR );
378ad02012-06-25Per Hedbor  je( &label_A ); /* So, not a svalueptr. Use C-version for simplicity */ amd64_call_c_opcode( addr, flags ); amd64_load_sp_reg(); jmp( &label_B ); amd64_align(); LABEL_A;
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -3*sizeof(struct svalue)+8, P_REG_RBX );
378ad02012-06-25Per Hedbor  /* Free old value. */
2fd8052013-10-08Per Hedbor  amd64_free_svalue( P_REG_RBX, 0 );
378ad02012-06-25Per Hedbor  /* assign new value */ /* also move assigned value -> sp[-3] */
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -sizeof(struct svalue), P_REG_RAX ); mov_mem_reg( sp_reg, -sizeof(struct svalue)+8, P_REG_RCX ); mov_reg_mem( P_REG_RAX, P_REG_RBX, 0 ); mov_reg_mem( P_REG_RCX, P_REG_RBX, 8 ); add_reg_imm_reg( sp_reg, -sizeof(struct svalue), P_REG_RCX ); amd64_push_svaluep_to( P_REG_RCX, -3 );
378ad02012-06-25Per Hedbor  /* Note: For SVALUEPTR we know we do not have to free the lvalue. */ amd64_add_sp( -2 ); LABEL_B; } return; case F_ASSIGN_AND_POP: {
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
378ad02012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -3*sizeof(struct svalue), P_REG_RAX ); cmp_reg_imm( P_REG_RAX, T_SVALUE_PTR );
378ad02012-06-25Per Hedbor  je( &label_A ); /* So, not a svalueptr. Use C-version for simplicity */ amd64_call_c_opcode( addr, flags ); amd64_load_sp_reg(); jmp( &label_B ); amd64_align(); LABEL_A;
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -3*sizeof(struct svalue)+8, P_REG_RBX );
378ad02012-06-25Per Hedbor  /* Free old value. */
2fd8052013-10-08Per Hedbor  amd64_free_svalue( P_REG_RBX, 0 );
378ad02012-06-25Per Hedbor  /* assign new value */
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -sizeof(struct svalue), P_REG_RAX ); mov_mem_reg( sp_reg, -sizeof(struct svalue)+8, P_REG_RCX ); mov_reg_mem( P_REG_RAX, P_REG_RBX, 0 ); mov_reg_mem( P_REG_RCX, P_REG_RBX, 8 );
378ad02012-06-25Per Hedbor  /* Note: For SVALUEPTR we know we do not have to free the lvalue. */ amd64_add_sp( -3 ); LABEL_B; } return;
9840c22014-08-28Per Hedbor 
156ab02014-08-31Per Hedbor  case F_NEGATE: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX ); test_reg32(P_REG_RAX); jz(&label_B);
fb2ce02014-08-31Per Hedbor  jmp(&label_A);
156ab02014-08-31Per Hedbor  LABEL_D; neg_mem(sp_reg, SVAL(-1).value ); LABEL_A; amd64_call_c_opcode(addr, flags); jmp(&label_C); LABEL_B;
da865c2014-09-01Per Hedbor  mov_imm_mem(PIKE_T_INT, sp_reg, SVAL(-1).type );
156ab02014-08-31Per Hedbor  neg_mem(sp_reg, SVAL(-1).value ); jo(&label_D); LABEL_C; } return;
7420062014-09-23Per Hedbor  case F_NOT: { LABELS(); amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RCX ); test_reg32(P_REG_RCX); jz(&label_B); /* integer. */ mov_imm_reg( 1, P_REG_RAX ); shl_reg32_reg( P_REG_RAX, P_REG_RCX ); and_reg32_imm( P_REG_RAX, (BIT_FLOAT|BIT_ARRAY|BIT_MULTISET|BIT_MAPPING|BIT_STRING|BIT_PROGRAM)); jnz( &label_A ); /* object or function */ amd64_call_c_opcode(addr, flags); jmp(&label_C); /* int. */ LABEL_B; mov_mem_reg( sp_reg, SVAL(-1).value, P_REG_RAX ); clear_reg( P_REG_RCX ); cmp_reg_imm( P_REG_RAX, 0 ); set_if_eq(P_REG_RCX); mov_imm_mem( PIKE_T_INT, sp_reg, SVAL(-1).type ); mov_reg_mem( P_REG_RCX, sp_reg, SVAL(-1).value ); jmp(&label_C); LABEL_A; /* array etc, !x is always false */ amd64_free_svalue_off( sp_reg, SVAL(-1).off, 0 ); mov_imm_mem( PIKE_T_INT, sp_reg, SVAL(-1).type ); mov_imm_mem( 0, sp_reg, SVAL(-1).value ); LABEL_C; } return;
36fea02014-08-31Per Hedbor  case F_COMPL: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX ); test_reg32(P_REG_RAX); jz(&label_B); LABEL_A; amd64_call_c_opcode(addr, flags); jmp(&label_C); LABEL_B; not_mem(sp_reg, SVAL(-1).value ); LABEL_C; } return;
0eb13c2014-08-31Per Hedbor  case F_MOD:
da865c2014-09-01Per Hedbor  case F_DIVIDE:
5d4e632014-08-31Per Hedbor  { LABELS(); if_not_two_int(&label_A,0); mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RBX ); mov_mem_reg(sp_reg, SVAL(-2).value, P_REG_RAX );
da865c2014-09-01Per Hedbor  cmp_reg_imm(P_REG_RAX, 0 ); jl(&label_A);
0eb13c2014-08-31Per Hedbor  if( b+F_OFFSET == F_MOD ) {
da865c2014-09-01Per Hedbor  /* Emulates pikes % functionality. */ cmp_reg_imm(P_REG_RBX, 0 ); jl(&label_A);
0eb13c2014-08-31Per Hedbor  }
5d4e632014-08-31Per Hedbor  cmp_reg_imm(P_REG_RBX,0);
da865c2014-09-01Per Hedbor  je(&label_A);
5d4e632014-08-31Per Hedbor  cmp_reg_imm(P_REG_RBX,-1);
da865c2014-09-01Per Hedbor  jne(&label_C);
5d4e632014-08-31Per Hedbor  cmp_reg_imm(P_REG_RAX,0); jle(&label_A); /* FIXME: Int.native_min / -1 -> exception.
956b122014-09-23Per Hedbor 
0eb13c2014-08-31Per Hedbor  This checks for -<whatever> / -1.
5d4e632014-08-31Per Hedbor  There is bound to be a better way. Somehow disable the exception?
956b122014-09-23Per Hedbor 
0eb13c2014-08-31Per Hedbor  Some SSE alternative that does not throw? Perhaps a 64/64->64 instead of the current 128/64->64?
5d4e632014-08-31Per Hedbor  */ LABEL_C; div_reg_reg(P_REG_RAX,P_REG_RBX); mov_imm_mem(PIKE_T_INT, sp_reg, SVAL(-2).type);
0eb13c2014-08-31Per Hedbor  if( b+F_OFFSET == F_MOD ) mov_reg_mem(P_REG_RDX, sp_reg, SVAL(-2).value); else mov_reg_mem(P_REG_RAX, sp_reg, SVAL(-2).value);
5d4e632014-08-31Per Hedbor  amd64_add_sp(-1); jmp(&label_B); LABEL_A; amd64_call_c_opcode(addr, flags); amd64_load_sp_reg(); LABEL_B; } return;
2e82fb2014-08-31Per Hedbor  case F_MULTIPLY:
ad0d7e2014-08-28Per Hedbor  {
5d4e632014-08-31Per Hedbor  LABELS();
2e82fb2014-08-31Per Hedbor  if_not_two_int(&label_A,1); mul_reg_reg(P_REG_RBX,P_REG_RAX); jo(&label_A); mov_imm_mem(PIKE_T_INT, sp_reg, SVAL(-2).type); mov_reg_mem(P_REG_RBX, sp_reg, SVAL(-2).value); amd64_add_sp(-1); jmp(&label_B);
ad0d7e2014-08-28Per Hedbor  LABEL_A;
2e82fb2014-08-31Per Hedbor  amd64_call_c_opcode(addr, flags); amd64_load_sp_reg();
ad0d7e2014-08-28Per Hedbor  LABEL_B; } return;
2e82fb2014-08-31Per Hedbor  case F_AND: { LABELS(); ins_debug_instr_prologue(b, 0, 0); if_not_two_int(&label_A,1); and_reg_reg(P_REG_RBX,P_REG_RAX); mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-2).type); mov_reg_mem(P_REG_RBX,sp_reg,SVAL(-2).value); amd64_add_sp(-1); jmp(&label_B); LABEL_A; amd64_call_c_opcode(addr, flags); amd64_load_sp_reg(); LABEL_B; } return;
ad0d7e2014-08-28Per Hedbor  case F_OR: { LABELS(); ins_debug_instr_prologue(b, 0, 0);
2e82fb2014-08-31Per Hedbor  if_not_two_int(&label_A,1);
ad0d7e2014-08-28Per Hedbor  or_reg_reg(P_REG_RAX,P_REG_RBX);
282c372014-08-29Per Hedbor  mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-2).type);
ad0d7e2014-08-28Per Hedbor  mov_reg_mem(P_REG_RBX,sp_reg,SVAL(-2).value); amd64_add_sp(-1); jmp(&label_B); LABEL_A; amd64_call_c_opcode(addr, flags); amd64_load_sp_reg(); LABEL_B; } return;
2ce89e2014-08-31Per Hedbor  case F_XOR: { LABELS(); ins_debug_instr_prologue(b, 0, 0); if_not_two_int(&label_A,1); xor_reg_reg(P_REG_RAX,P_REG_RBX); mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-2).type); mov_reg_mem(P_REG_RBX,sp_reg,SVAL(-2).value); amd64_add_sp(-1); jmp(&label_B); LABEL_A; amd64_call_c_opcode(addr, flags); amd64_load_sp_reg(); LABEL_B; } return;
ad0d7e2014-08-28Per Hedbor  case F_RSH: { LABELS(); ins_debug_instr_prologue(b, 0, 0);
2e82fb2014-08-31Per Hedbor  if_not_two_int(&label_A,0);
ad0d7e2014-08-28Per Hedbor  mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RCX ); cmp_reg_imm(P_REG_RCX,0); jl( &label_A ); cmp_reg_imm(P_REG_RCX,63);
9840c22014-08-28Per Hedbor  jl( &label_B );
ad0d7e2014-08-28Per Hedbor  LABEL_A; amd64_call_c_opcode(addr, flags); amd64_load_sp_reg();
9840c22014-08-28Per Hedbor  jmp(&label_C); LABEL_B;
282c372014-08-29Per Hedbor  mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-2).type);
9840c22014-08-28Per Hedbor  shr_mem_reg( sp_reg, SVAL(-2).value, P_REG_RCX); amd64_add_sp(-1);
ad0d7e2014-08-28Per Hedbor  LABEL_C; } return;
156ab02014-08-31Per Hedbor  case F_LSH: { LABELS(); ins_debug_instr_prologue(b, 0, 0);
2e82fb2014-08-31Per Hedbor  if_not_two_int(&label_A,0);
156ab02014-08-31Per Hedbor  mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RCX ); cmp_reg_imm(P_REG_RCX,0); jl( &label_A ); cmp_reg_imm(P_REG_RCX,63); jl( &label_B ); LABEL_A; amd64_call_c_opcode(addr, flags); amd64_load_sp_reg(); jmp(&label_C); LABEL_B; mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-2).type); mov_mem_reg( sp_reg, SVAL(-2).value, P_REG_RBX); /* ok. It would have been nice is sal set a bit that stayed set when you shifted out a 1 bit. :) */ mov_reg_reg( P_REG_RBX, P_REG_RAX ); shl_reg_reg( P_REG_RBX, P_REG_RCX); mov_reg_reg( P_REG_RBX, P_REG_RDX ); shr_reg_reg( P_REG_RDX, P_REG_RCX ); cmp_reg_reg( P_REG_RDX, P_REG_RAX ); jne( &label_A ); mov_reg_mem( P_REG_RBX, sp_reg, SVAL(-2).value); amd64_add_sp(-1); LABEL_C; } return;
6785ae2012-06-15Per Hedbor  case F_ADD_INTS: { ins_debug_instr_prologue(b, 0, 0);
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
851d192014-08-07Per Hedbor  mov_mem_reg( sp_reg, -sizeof(struct svalue)*2, P_REG_RAX ); add_reg_mem( P_REG_RAX, sp_reg, -sizeof(struct svalue ) ); jnz( &label_A );
6785ae2012-06-15Per Hedbor  /* So. Both are actually integers. */ mov_mem_reg( sp_reg, -sizeof(struct svalue)+OFFSETOF(svalue,u.integer),
2fd8052013-10-08Per Hedbor  P_REG_RAX );
6785ae2012-06-15Per Hedbor 
2fd8052013-10-08Per Hedbor  add_reg_mem( P_REG_RAX,
6785ae2012-06-15Per Hedbor  sp_reg,
851d192014-08-07Per Hedbor  -sizeof(struct svalue)*2+OFFSETOF(svalue,u.integer));
6785ae2012-06-15Per Hedbor  jo( &label_A ); amd64_add_sp( -1 );
851d192014-08-07Per Hedbor  /* the type only needs to be set if the subtype was set before. It was not, since the type check before would not have triggered. */
2fd8052013-10-08Per Hedbor  mov_reg_mem( P_REG_RAX, sp_reg,
6785ae2012-06-15Per Hedbor  -sizeof(struct svalue)+OFFSETOF(svalue,u.integer)); jmp( &label_B ); LABEL_A; /* Fallback version */ update_arg1( 2 ); amd64_call_c_opcode( f_add, I_UPDATE_SP ); amd64_load_sp_reg(); LABEL_B; } return;
b505a72012-06-13Per Hedbor  case F_SWAP:
6785ae2012-06-15Per Hedbor  /* pike_sp[-1] = pike_sp[-2] */
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
6785ae2012-06-15Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  add_reg_imm_reg( sp_reg, -2*sizeof(struct svalue), P_REG_RCX ); mov_mem128_reg( P_REG_RCX, 0, P_REG_XMM0 ); mov_mem128_reg( P_REG_RCX, 16, P_REG_XMM1 ); mov_reg_mem128( P_REG_XMM1, P_REG_RCX, 0 ); mov_reg_mem128( P_REG_XMM0, P_REG_RCX, 16 );
6785ae2012-06-15Per Hedbor  return;
8886032012-06-25Per Hedbor  case F_INDEX: /* pike_sp[-2][pike_sp[-1]] */
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
8886032012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -2*sizeof(struct svalue), P_REG_RAX ); mov_mem8_reg( sp_reg, -1*sizeof(struct svalue), P_REG_RBX ); shl_reg_imm( P_REG_RAX, 8 ); add_reg_reg( P_REG_RAX, P_REG_RBX ); mov_mem_reg( sp_reg, -1*sizeof(struct svalue)+8, P_REG_RBX ); /* int */ mov_mem_reg( sp_reg, -2*sizeof(struct svalue)+8, P_REG_RCX ); /* value */ cmp_reg32_imm( P_REG_RAX, (PIKE_T_ARRAY<<8)|PIKE_T_INT );
8886032012-06-25Per Hedbor  jne( &label_A ); /* Array and int index. */
2fd8052013-10-08Per Hedbor  mov_mem32_reg( P_REG_RCX, OFFSETOF(array,size), P_REG_RDX );
956b122014-09-23Per Hedbor  cmp_reg_imm( P_REG_RBX, 0 ); jge( &label_D );
8886032012-06-25Per Hedbor  /* less than 0, add size */
2fd8052013-10-08Per Hedbor  add_reg_reg( P_REG_RBX, P_REG_RDX );
8886032012-06-25Per Hedbor  LABEL_D;
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RBX, 0 ); jl( &label_B ); // <0 cmp_reg_reg( P_REG_RBX, P_REG_RDX); jge( &label_B ); // >size
8886032012-06-25Per Hedbor  /* array, index inside array. push item, swap, pop, done */
2fd8052013-10-08Per Hedbor  push( P_REG_RCX ); /* Save array pointer */ mov_mem_reg( P_REG_RCX, OFFSETOF(array,item), P_REG_RCX ); shl_reg_imm( P_REG_RBX, 4 ); add_reg_reg( P_REG_RBX, P_REG_RCX );
8886032012-06-25Per Hedbor  /* This overwrites the array. */ amd64_add_sp( -1 );
2fd8052013-10-08Per Hedbor  amd64_push_svaluep_to( P_REG_RBX, -1 );
8886032012-06-25Per Hedbor 
2fd8052013-10-08Per Hedbor  pop( P_REG_RCX );
8886032012-06-25Per Hedbor  /* We know it's an array. */
2fd8052013-10-08Per Hedbor  add_mem32_imm( P_REG_RCX, OFFSETOF(array,refs), -1);
8886032012-06-25Per Hedbor  jnz( &label_C );
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RCX, ARG1_REG );
8886032012-06-25Per Hedbor  amd64_call_c_function(really_free_array); jmp( &label_C ); LABEL_A; #if 0
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, (PIKE_T_STRING<<8)|PIKE_T_INT );
8886032012-06-25Per Hedbor  jne( &label_B ); #endif LABEL_B; /* something else. */ amd64_call_c_opcode( addr, flags ); amd64_load_sp_reg(); LABEL_C; /* done */ return;
956b122014-09-23Per Hedbor  case F_SIZEOF_STRING: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); mov_mem8_reg( sp_reg, SVAL(-1).type, P_REG_RAX ); cmp_reg32_imm( P_REG_RAX, PIKE_T_STRING ); jne( &label_A ); mov_mem_reg( sp_reg, SVAL(-1).value, P_REG_RAX); mov_mem_reg( P_REG_RAX, OFFSETOF(pike_string,len), P_REG_RBX ); /* dec refs */ add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), -1); jnz( &label_B ); /* no refs, call really_free_svalue */ add_reg_imm_reg( sp_reg, SVAL(-1).off, ARG1_REG ); amd64_call_c_function(really_free_svalue); jmp(&label_B); LABEL_A; /* not a string. */ amd64_call_c_opcode( addr, flags ); jmp(&label_C); LABEL_B; /* save size on stack. */ mov_imm_mem( PIKE_T_INT, sp_reg, SVAL(-1).type ); mov_reg_mem( P_REG_RBX, sp_reg, SVAL(-1).value ); LABEL_C; } return;
ea82682012-06-25Per Hedbor  case F_SIZEOF: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  add_reg_imm_reg( sp_reg, -sizeof(struct svalue), P_REG_RBX); mov_sval_type( P_REG_RBX, P_REG_RAX );
ea82682012-06-25Per Hedbor  /* type in RAX, svalue in ARG1 */
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, PIKE_T_ARRAY ); jne( &label_A );
ea82682012-06-25Per Hedbor  /* It's an array */
2fd8052013-10-08Per Hedbor  mov_mem_reg( P_REG_RBX, OFFSETOF(svalue, u.array ), ARG1_REG);
ea82682012-06-25Per Hedbor  /* load size -> RAX*/
2fd8052013-10-08Per Hedbor  mov_mem32_reg( ARG1_REG,OFFSETOF(array, size), P_REG_RAX );
ea82682012-06-25Per Hedbor  jmp( &label_C ); LABEL_A;
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, PIKE_T_STRING ); jne( &label_B );
ea82682012-06-25Per Hedbor  /* It's a string */
2fd8052013-10-08Per Hedbor  mov_mem_reg( P_REG_RBX, OFFSETOF(svalue, u.string ), ARG1_REG);
ea82682012-06-25Per Hedbor  /* load size ->RAX*/
2fd8052013-10-08Per Hedbor  mov_mem32_reg( ARG1_REG,OFFSETOF(pike_string, len ), P_REG_RAX );
ea82682012-06-25Per Hedbor  jmp( &label_C ); LABEL_B; /* It's something else, svalue in RBX. */
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RBX, ARG1_REG );
ea82682012-06-25Per Hedbor  amd64_call_c_function( pike_sizeof );
8886032012-06-25Per Hedbor 
ea82682012-06-25Per Hedbor  LABEL_C;/* all done, res in RAX */ /* free value, store result */
2fd8052013-10-08Per Hedbor  push( P_REG_RAX ); amd64_free_svalue( P_REG_RBX, 0 ); pop( P_REG_RAX ); mov_reg_mem(P_REG_RAX, P_REG_RBX, OFFSETOF(svalue, u.integer));
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem(PIKE_T_INT, P_REG_RBX, OFFSETOF(svalue, tu.t.type));
ea82682012-06-25Per Hedbor  } return;
b505a72012-06-13Per Hedbor  case F_POP_VALUE:
6785ae2012-06-15Per Hedbor  { ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); amd64_add_sp( -1 ); amd64_free_svalue( sp_reg, 0 ); } return;
2e82fb2014-08-31Per Hedbor 
eb42a12011-05-11Henrik Grubbström (Grubba)  case F_CATCH:
21b9112011-05-23Henrik Grubbström (Grubba)  { /* Special argument for the F_CATCH instruction. */ addr = inter_return_opcode_F_CATCH;
719c3a2012-06-12Per Hedbor  mov_rip_imm_reg(0, ARG1_REG); /* Address for the POINTER. */
48f5972011-05-23Henrik Grubbström (Grubba)  rel_addr = PIKE_PC;
21b9112011-05-23Henrik Grubbström (Grubba)  }
eb42a12011-05-11Henrik Grubbström (Grubba)  break;
85c1c12014-08-18Per Hedbor  /* sp-1 = undefinedp(sp-1) */ case F_UNDEFINEDP: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); mov_mem32_reg( sp_reg, SVAL(-1).type, P_REG_RAX ); cmp_reg32_imm( P_REG_RAX, 1<<16 | PIKE_T_INT ); jne( &label_A ); mov_imm_mem( 1, sp_reg,SVAL(-1).value); jmp( &label_B ); LABEL_A; amd64_free_svalue_off( sp_reg, SVAL(-1).off,0 ); mov_imm_mem( 0, sp_reg,SVAL(-1).value); LABEL_B; mov_imm_mem( PIKE_T_INT, sp_reg,SVAL(-1).type); } return;
1af0932012-06-22Per Hedbor  case F_ZERO_TYPE: { LABELS();
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
1af0932012-06-22Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem32_reg( sp_reg, -sizeof(struct svalue), P_REG_RAX );
1af0932012-06-22Per Hedbor  /* Rax now has type + subtype. */
85c1c12014-08-18Per Hedbor  mov_reg32_reg( P_REG_RAX, P_REG_RBX ); and_reg32_imm( P_REG_RAX, 0x1f );
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, PIKE_T_INT );
1af0932012-06-22Per Hedbor  jne( &label_A ); /* It is an integer. */
2fd8052013-10-08Per Hedbor  shr_reg_imm( P_REG_RBX, 16 );
1af0932012-06-22Per Hedbor  /* subtype in RBX. */ mov_imm_mem( PIKE_T_INT, sp_reg, -sizeof(struct svalue) );
545c232014-08-17Per Hedbor  mov_reg_mem( P_REG_RBX, sp_reg,
1af0932012-06-22Per Hedbor  -sizeof(struct svalue)+OFFSETOF(svalue,u.integer) ); jmp( &label_B ); LABEL_A; /* not an integer. Use C version for simplicitly.. */ amd64_call_c_opcode( addr, flags ); LABEL_B; } return;
b40c772014-08-18Per Hedbor 
fe5ef32014-09-14Per Hedbor  case F_EQ: case F_NE: /* Note: We can do better when doing equality comparison without much work. */ case F_LE: case F_GE: case F_LT: case F_GT: { LABELS(); ins_debug_instr_prologue(b, 0, 0); if_not_two_int(&label_A,1); amd64_add_sp(-1); clear_reg(P_REG_RCX); cmp_reg_reg(P_REG_RBX, P_REG_RAX); switch(b+F_OFFSET) { case F_GT: set_if_gt( P_REG_RCX); break; case F_LT: set_if_lt( P_REG_RCX); break; case F_LE: set_if_lte(P_REG_RCX );break; case F_GE: set_if_gte(P_REG_RCX ); break; case F_EQ: set_if_eq( P_REG_RCX ); break; case F_NE: set_if_neq(P_REG_RCX ); break; } mov_imm_mem(PIKE_T_INT, sp_reg, SVAL(-1).type ); mov_reg_mem(P_REG_RCX, sp_reg, SVAL(-1).value ); jmp(&label_D); LABEL_A; /* not an integer. Use C version for simplicitly.. */ amd64_call_c_opcode( addr, flags ); amd64_load_sp_reg(); LABEL_D; } return;
b40c772014-08-18Per Hedbor  case F_ADD: { addr = f_add; update_arg1(2); amd64_call_c_opcode(addr, flags); amd64_load_sp_reg(); } return;
545c232014-08-17Per Hedbor  case F_INC: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); mov_mem8_reg(sp_reg, -16, P_REG_RAX );
b40c772014-08-18Per Hedbor  test_reg32(P_REG_RAX); jnz(&label_A);
545c232014-08-17Per Hedbor  add_imm_mem(1, sp_reg, -8); jno(&label_B); add_imm_mem(-1, sp_reg, -8); LABEL_A; amd64_call_c_opcode(addr, flags); LABEL_B; } return;
b40c772014-08-18Per Hedbor  case F_DEC:
545c232014-08-17Per Hedbor  { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg();
b40c772014-08-18Per Hedbor  mov_mem8_reg(sp_reg, -16, P_REG_RAX ); test_reg32(P_REG_RAX);
545c232014-08-17Per Hedbor  jnz(&label_A);
b40c772014-08-18Per Hedbor  add_imm_mem(-1, sp_reg, -8);
545c232014-08-17Per Hedbor  jno(&label_B);
b40c772014-08-18Per Hedbor  add_imm_mem(1, sp_reg, -8); LABEL_A;
545c232014-08-17Per Hedbor  amd64_call_c_opcode(addr, flags);
b40c772014-08-18Per Hedbor  LABEL_B;
545c232014-08-17Per Hedbor  } return;
b40c772014-08-18Per Hedbor 
545c232014-08-17Per Hedbor  case F_SUBTRACT: { LABELS(); ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg(); mov_mem8_reg(sp_reg, -1*sizeof(struct svalue), P_REG_RAX ); mov_mem8_reg(sp_reg, -2*sizeof(struct svalue), P_REG_RBX );
ad0d7e2014-08-28Per Hedbor  add_reg32_reg32(P_REG_RAX,P_REG_RBX); /* test_reg32(P_REG_RAX); int == 0 */
545c232014-08-17Per Hedbor  jnz(&label_A); #ifdef PIKE_DEBUG if( PIKE_T_INT ) Pike_fatal("Assertion failed\n"); #endif mov_mem_reg(sp_reg, -1*sizeof(struct svalue)+OFFSETOF(svalue,u.integer), P_REG_RAX ); mov_mem_reg(sp_reg, -2*sizeof(struct svalue)+OFFSETOF(svalue,u.integer), P_REG_RCX ); sub_reg_reg(P_REG_RCX,P_REG_RAX); jno(&label_B); LABEL_A; amd64_call_c_opcode(o_subtract, flags); amd64_load_sp_reg(); jmp(&label_C); LABEL_B; mov_reg_mem( P_REG_RCX, sp_reg, -2*sizeof(struct svalue)+OFFSETOF(svalue,u.integer)); amd64_add_sp(-1); LABEL_C; } return;
b40c772014-08-18Per Hedbor 
eb42a12011-05-11Henrik Grubbström (Grubba)  case F_UNDEFINED: ins_debug_instr_prologue(b, 0, 0); amd64_push_int(0, 1); return; case F_CONST0: ins_debug_instr_prologue(b, 0, 0); amd64_push_int(0, 0); return; case F_CONST1: ins_debug_instr_prologue(b, 0, 0); amd64_push_int(1, 0); return; case F_CONST_1: ins_debug_instr_prologue(b, 0, 0); amd64_push_int(-1, 0); return; case F_BIGNUM: ins_debug_instr_prologue(b, 0, 0); amd64_push_int(0x7fffffff, 0); return; case F_RETURN_1: ins_f_byte(F_CONST1); ins_f_byte(F_RETURN); return; case F_RETURN_0: ins_f_byte(F_CONST0); ins_f_byte(F_RETURN); return;
ab7cf52011-05-24Henrik Grubbström (Grubba)  case F_MARK: case F_SYNCH_MARK: ins_debug_instr_prologue(b, 0, 0); amd64_mark(0); return; case F_MARK2: ins_f_byte(F_MARK); ins_f_byte(F_MARK); return;
91080b2012-06-15Henrik Grubbström (Grubba)  case F_MARK_AND_CONST0: ins_f_byte(F_MARK); ins_f_byte(F_CONST0); return; case F_MARK_AND_CONST1: ins_f_byte(F_MARK); ins_f_byte(F_CONST1); return;
ab7cf52011-05-24Henrik Grubbström (Grubba)  case F_POP_MARK: ins_debug_instr_prologue(b, 0, 0); amd64_pop_mark(); return;
9853bd2012-06-13Henrik Grubbström (Grubba)  case F_POP_TO_MARK: ins_debug_instr_prologue(b, 0, 0); amd64_load_mark_sp_reg(); amd64_load_sp_reg(); amd64_pop_mark();
2fd8052013-10-08Per Hedbor  mov_mem_reg(mark_sp_reg, 0, P_REG_RBX);
9853bd2012-06-13Henrik Grubbström (Grubba)  jmp(&label_A); LABEL_B; amd64_add_sp( -1 ); amd64_free_svalue( sp_reg, 0 );
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
9853bd2012-06-13Henrik Grubbström (Grubba)  LABEL_A;
2fd8052013-10-08Per Hedbor  cmp_reg_reg(P_REG_RBX, sp_reg);
9853bd2012-06-13Henrik Grubbström (Grubba)  jl(&label_B); return;
6785ae2012-06-15Per Hedbor  /* If we are compiling with debug, F_RETURN does extra checks */
622ebc2012-06-25Per Hedbor 
6785ae2012-06-15Per Hedbor  case F_RETURN: case F_DUMB_RETURN: { LABELS();
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(b, 0, 0);
6785ae2012-06-15Per Hedbor  amd64_load_fp_reg(); /* Note: really mem16, but we & with PIKE_FRAME_RETURN_INTERNAL anyway */
2fd8052013-10-08Per Hedbor  mov_mem32_reg( fp_reg, OFFSETOF(pike_frame, flags), P_REG_RAX );
85c1c12014-08-18Per Hedbor  and_reg32_imm( P_REG_RAX, PIKE_FRAME_RETURN_INTERNAL);
6785ae2012-06-15Per Hedbor  jnz( &label_A ); /* So, it is just a normal return. */ LABEL_B; /* Actually return */ flush_dirty_regs(); amd64_return_from_function();
f7202c2013-06-12Per Hedbor  /* */
6785ae2012-06-15Per Hedbor  LABEL_A;
f7202c2013-06-12Per Hedbor  /* We should jump to the given address. */
2fd8052013-10-08Per Hedbor  mov_mem32_reg( fp_reg, OFFSETOF(pike_frame, flags), P_REG_RAX );
85c1c12014-08-18Per Hedbor  and_reg32_imm( P_REG_RAX, PIKE_FRAME_RETURN_POP );
f7202c2013-06-12Per Hedbor  jnz( &label_C ); amd64_call_c_function( low_return ); jmp( &label_D );
6785ae2012-06-15Per Hedbor 
f7202c2013-06-12Per Hedbor  LABEL_C; amd64_call_c_function( low_return_pop ); LABEL_D; fp_reg = -1; amd64_load_fp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, return_addr), P_REG_RAX ); jmp_reg( P_REG_RAX );
6785ae2012-06-15Per Hedbor  } return;
f7202c2013-06-12Per Hedbor 
91080b2012-06-15Henrik Grubbström (Grubba)  case F_CLEAR_STRING_SUBTYPE: ins_debug_instr_prologue(b, 0, 0); amd64_load_sp_reg();
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_mem32_reg(sp_reg, OFFSETOF(svalue, tu.t.type) - sizeof(struct svalue),
2fd8052013-10-08Per Hedbor  P_REG_RAX);
91080b2012-06-15Henrik Grubbström (Grubba)  /* NB: We only care about subtype 1! */
2fd8052013-10-08Per Hedbor  cmp_reg32_imm(P_REG_RAX, (1<<16)|PIKE_T_STRING);
91080b2012-06-15Henrik Grubbström (Grubba)  jne(&label_A);
85c1c12014-08-18Per Hedbor  and_reg32_imm(P_REG_RAX, 0x1f);
2fd8052013-10-08Per Hedbor  mov_reg_mem32(P_REG_RAX,
a5cf652014-06-24Henrik Grubbström (Grubba)  sp_reg, OFFSETOF(svalue, tu.t.type) - sizeof(struct svalue));
91080b2012-06-15Henrik Grubbström (Grubba)  LABEL_A; return;
eb42a12011-05-11Henrik Grubbström (Grubba)  }
719c3a2012-06-12Per Hedbor  amd64_call_c_opcode(addr,flags);
54a26b2011-05-11Henrik Grubbström (Grubba)  if (instrs[b].flags & I_RETURN) {
719c3a2012-06-12Per Hedbor  LABELS();
54a26b2011-05-11Henrik Grubbström (Grubba)  if ((b + F_OFFSET) == F_RETURN_IF_TRUE) { /* Kludge. We must check if the ret addr is
719c3a2012-06-12Per Hedbor  * PC + JUMP_EPILOGUE_SIZE. */
2fd8052013-10-08Per Hedbor  mov_rip_imm_reg(JUMP_EPILOGUE_SIZE, P_REG_RCX);
54a26b2011-05-11Henrik Grubbström (Grubba)  }
2fd8052013-10-08Per Hedbor  cmp_reg_imm(P_REG_RAX, -1);
6785ae2012-06-15Per Hedbor  jne(&label_A); amd64_return_from_function();
719c3a2012-06-12Per Hedbor  LABEL_A;
54a26b2011-05-11Henrik Grubbström (Grubba)  if ((b + F_OFFSET) == F_RETURN_IF_TRUE) { /* Kludge. We must check if the ret addr is * orig_addr + JUMP_EPILOGUE_SIZE. */
2fd8052013-10-08Per Hedbor  cmp_reg_reg( P_REG_RAX, P_REG_RCX );
15e8a02012-06-13Henrik Grubbström (Grubba)  je( &label_B );
2fd8052013-10-08Per Hedbor  jmp_reg(P_REG_RAX);
15e8a02012-06-13Henrik Grubbström (Grubba)  LABEL_B;
54a26b2011-05-11Henrik Grubbström (Grubba)  return; } }
f784252011-05-11Henrik Grubbström (Grubba)  if (flags & I_JUMP) {
2fd8052013-10-08Per Hedbor  jmp_reg(P_REG_RAX);
48f5972011-05-23Henrik Grubbström (Grubba)  if (b + F_OFFSET == F_CATCH) { upd_pointer(rel_addr - 4, PIKE_PC - rel_addr); }
d1fa802011-05-09Henrik Grubbström (Grubba)  }
7990392006-04-27Tor Edvardsson }
54a26b2011-05-11Henrik Grubbström (Grubba) int amd64_ins_f_jump(unsigned int op, int backward_jump) { int flags; void *addr; int off = op - F_OFFSET; int ret = -1;
b505a72012-06-13Per Hedbor  LABELS();
54a26b2011-05-11Henrik Grubbström (Grubba) #ifdef PIKE_DEBUG if(off>255) Pike_error("Instruction too big %d\n",off); #endif flags = instrs[off].flags; if (!(flags & I_BRANCH)) return -1;
6785ae2012-06-15Per Hedbor #define START_JUMP() do{ \ ins_debug_instr_prologue(off, 0, 0); \ if (backward_jump) { \ maybe_update_pc(); \
638a2c2012-07-16Per Hedbor  amd64_ins_branch_check_threads_etc(0); \
6785ae2012-06-15Per Hedbor  } \ } while(0)
b505a72012-06-13Per Hedbor  switch( op ) {
99c1e92012-06-20Per Hedbor  case F_QUICK_BRANCH_WHEN_ZERO: case F_QUICK_BRANCH_WHEN_NON_ZERO: START_JUMP(); amd64_load_sp_reg(); amd64_add_sp( -1 );
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, 8, P_REG_RAX );
b40c772014-08-18Per Hedbor  test_reg32(P_REG_RAX);
99c1e92012-06-20Per Hedbor  if( op == F_QUICK_BRANCH_WHEN_ZERO ) return jz_imm_rel32(0); return jnz_imm_rel32(0);
4ceb792012-06-22Per Hedbor 
99c1e92012-06-20Per Hedbor  case F_BRANCH_WHEN_ZERO: case F_BRANCH_WHEN_NON_ZERO: START_JUMP(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -sizeof(struct svalue), P_REG_RCX ); cmp_reg32_imm( P_REG_RCX, PIKE_T_INT ); je( &label_C ); mov_imm_reg( 1, P_REG_RAX ); shl_reg32_reg( P_REG_RAX, P_REG_RCX ); and_reg32_imm( P_REG_RAX, BIT_FUNCTION|BIT_OBJECT );
378ad02012-06-25Per Hedbor  jnz( &label_A ); /* string, array, mapping or float. Always true */ amd64_add_sp(-1);
2fd8052013-10-08Per Hedbor  amd64_free_svalue_type( sp_reg, P_REG_RCX, 0 ); mov_imm_reg( 1, P_REG_RBX );
378ad02012-06-25Per Hedbor  jmp( &label_B );
4ceb792012-06-22Per Hedbor  LABEL_A;
378ad02012-06-25Per Hedbor  /* function or object. Use svalue_is_true. */
4ceb792012-06-22Per Hedbor  add_reg_imm_reg(sp_reg, -sizeof(struct svalue), ARG1_REG ); amd64_call_c_function(svalue_is_true);
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RAX, P_REG_RBX );
378ad02012-06-25Per Hedbor  amd64_add_sp( -1 ); amd64_free_svalue( sp_reg, 0 ); /* Pop the stack. */
99c1e92012-06-20Per Hedbor  jmp( &label_B );
4ceb792012-06-22Per Hedbor  LABEL_C; /* integer */ mov_mem_reg( sp_reg, -sizeof(struct svalue)+OFFSETOF(svalue,u.integer),
2fd8052013-10-08Per Hedbor  P_REG_RBX );
4ceb792012-06-22Per Hedbor  amd64_add_sp(-1);
99c1e92012-06-20Per Hedbor 
4ceb792012-06-22Per Hedbor  LABEL_B; /* Branch or not? */
67d80c2014-08-18Per Hedbor  test_reg( P_REG_RBX );
99c1e92012-06-20Per Hedbor  if( op == F_BRANCH_WHEN_ZERO ) return jz_imm_rel32(0); return jnz_imm_rel32(0);
4ceb792012-06-22Per Hedbor 
fdb2142012-06-25Per Hedbor  case F_FOREACH:
c33ce92012-06-28Per Hedbor  START_JUMP();
fdb2142012-06-25Per Hedbor  /* -4: array -3: lvalue[0] -2: lvalue[1] -1: counter */ amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -4*sizeof(struct svalue), P_REG_RBX ); cmp_reg32_imm( P_REG_RBX, PIKE_T_ARRAY );
0b57352013-07-06Jonas Walldén  je(&label_D); /* Bad arg 1. Let the C opcode throw the error. */ amd64_call_c_opcode(instrs[off].address, flags); /* NOT_REACHED */ LABEL_D;
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -1*sizeof(struct svalue)+8, P_REG_RAX ); mov_mem_reg( sp_reg, -4*sizeof(struct svalue)+8, P_REG_RBX ); mov_mem32_reg( P_REG_RBX, OFFSETOF(array,size), P_REG_RCX ); cmp_reg_reg( P_REG_RAX, P_REG_RCX );
fdb2142012-06-25Per Hedbor  je(&label_A); /* increase counter */
638a2c2012-07-16Per Hedbor  add_mem_imm( sp_reg, -1*(int)sizeof(struct svalue)+8, 1 );
fdb2142012-06-25Per Hedbor  /* get item */
2fd8052013-10-08Per Hedbor  mov_mem_reg( P_REG_RBX, OFFSETOF(array,item), P_REG_RBX ); shl_reg_imm( P_REG_RAX, 4 ); add_reg_reg( P_REG_RBX, P_REG_RAX );
fdb2142012-06-25Per Hedbor 
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -3*sizeof(struct svalue), P_REG_RAX ); cmp_reg_imm( P_REG_RAX,T_SVALUE_PTR);
fdb2142012-06-25Per Hedbor  jne( &label_C ); /* SVALUE_PTR optimization */
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -3*sizeof(struct svalue)+8, P_REG_RDX ); push( P_REG_RDX );
fdb2142012-06-25Per Hedbor  /* Free old value. */
2fd8052013-10-08Per Hedbor  amd64_free_svalue( P_REG_RDX, 0 ); pop( P_REG_RDX );
fdb2142012-06-25Per Hedbor  /* Assign new value. */
2fd8052013-10-08Per Hedbor  mov_mem_reg( P_REG_RBX, 0, P_REG_RAX ); mov_mem_reg( P_REG_RBX, 8, P_REG_RCX ); mov_reg_mem( P_REG_RAX, P_REG_RDX, 0 ); mov_reg_mem( P_REG_RCX, P_REG_RDX, 8 );
fdb2142012-06-25Per Hedbor  /* inc refs? */
e305942014-08-08Per Hedbor  and_reg32_imm( P_REG_RAX, MIN_REF_TYPE ); jz( &label_B );
2fd8052013-10-08Per Hedbor  add_imm_mem( 1, P_REG_RCX, OFFSETOF(pike_string, refs));
fdb2142012-06-25Per Hedbor  jmp( &label_B ); LABEL_C; add_reg_imm_reg( sp_reg, -3*sizeof(struct svalue), ARG1_REG );
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RBX, ARG2_REG );
fdb2142012-06-25Per Hedbor  amd64_call_c_function( assign_lvalue ); jmp(&label_B); LABEL_A;
2fd8052013-10-08Per Hedbor  mov_imm_reg( 0, P_REG_RBX );
fdb2142012-06-25Per Hedbor  LABEL_B;
67d80c2014-08-18Per Hedbor  test_reg(P_REG_RBX);
fdb2142012-06-25Per Hedbor  return jnz_imm_rel32(0);
b505a72012-06-13Per Hedbor  case F_LOOP:
c33ce92012-06-28Per Hedbor  START_JUMP();
6785ae2012-06-15Per Hedbor  /* counter in pike_sp-1 */ /* decrement until 0. */ /* if not 0, branch */ /* otherwise, pop */ amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem32_reg( sp_reg, -sizeof(struct svalue), P_REG_RAX );
6785ae2012-06-15Per Hedbor  /* Is it a normal integer? subtype -> 0, type -> PIKE_T_INT */
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, PIKE_T_INT );
6785ae2012-06-15Per Hedbor  jne( &label_A ); /* if it is, is it 0? */
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -sizeof(struct svalue)+8, P_REG_RAX ); test_reg(P_REG_RAX);
6785ae2012-06-15Per Hedbor  jz( &label_B ); /* it is. */
2fd8052013-10-08Per Hedbor  add_reg_imm( P_REG_RAX, -1 ); mov_reg_mem( P_REG_RAX, sp_reg, -sizeof(struct svalue)+8); mov_imm_reg( 1, P_REG_RAX );
6785ae2012-06-15Per Hedbor  /* decremented. Jump -> true. */
1af0932012-06-22Per Hedbor  /* This is where we would really like to have two instances of * the target returned from this function... */
6785ae2012-06-15Per Hedbor  jmp( &label_C );
b505a72012-06-13Per Hedbor  LABEL_A; /* Not an integer. */
6785ae2012-06-15Per Hedbor  amd64_call_c_opcode(instrs[F_LOOP-F_OFFSET].address, instrs[F_LOOP-F_OFFSET].flags ); jmp( &label_C );
b505a72012-06-13Per Hedbor 
6785ae2012-06-15Per Hedbor  /* result in RAX */
b505a72012-06-13Per Hedbor  LABEL_B; /* loop done, inline. Known to be int, and 0 */
6785ae2012-06-15Per Hedbor  amd64_add_sp( -1 );
2fd8052013-10-08Per Hedbor  mov_imm_reg(0, P_REG_RAX );
b505a72012-06-13Per Hedbor  LABEL_C; /* Branch or not? */
2fd8052013-10-08Per Hedbor  test_reg( P_REG_RAX );
6785ae2012-06-15Per Hedbor  return jnz_imm_rel32(0); case F_BRANCH_WHEN_EQ: /* sp[-2] != sp[-1] */ case F_BRANCH_WHEN_NE: /* sp[-2] != sp[-1] */ /* START_JUMP();*/
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(op, 0, 0);
6785ae2012-06-15Per Hedbor  amd64_load_sp_reg();
279d752014-08-11Per Hedbor  mov_mem8_reg( sp_reg, -sizeof(struct svalue), P_REG_RCX ); mov_mem8_reg( sp_reg, -sizeof(struct svalue)*2,P_REG_RBX );
2fd8052013-10-08Per Hedbor  cmp_reg_reg( P_REG_RCX, P_REG_RBX );
6785ae2012-06-15Per Hedbor  jnz( &label_A ); /* Types differ */
b016602012-07-19Jonas Walldén  /* Fallback to C for functions, objects and floats. */
2fd8052013-10-08Per Hedbor  mov_imm_reg(1, P_REG_RBX); shl_reg32_reg(P_REG_RBX, P_REG_RCX); and_reg_imm(P_REG_RBX, (BIT_FUNCTION|BIT_OBJECT|BIT_FLOAT));
b016602012-07-19Jonas Walldén  jnz( &label_A );
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -sizeof(struct svalue)+8, P_REG_RBX ); sub_reg_mem( P_REG_RBX, sp_reg, -sizeof(struct svalue)*2+8);
6785ae2012-06-15Per Hedbor  /* RBX will now be 0 if they are equal.*/ /* Optimization: The types are equal, pop_stack can be greatly
b016602012-07-19Jonas Walldén  * simplified if they are <= max_ref_type */
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RCX,MIN_REF_TYPE);
833cae2013-06-12Henrik Grubbström (Grubba)  jge( &label_B );
b016602012-07-19Jonas Walldén  /* cheap pop. We know that both are > max_ref_type */
6785ae2012-06-15Per Hedbor  amd64_add_sp( -2 ); jmp( &label_D ); LABEL_A; /* Fallback - call opcode. */ amd64_call_c_opcode( instrs[F_BRANCH_WHEN_NE-F_OFFSET].address, instrs[F_BRANCH_WHEN_NE-F_OFFSET].flags ); amd64_load_sp_reg(); /* Opcode returns 0 if equal, -1 if not. */
2fd8052013-10-08Per Hedbor  mov_reg_reg(P_REG_RAX, P_REG_RBX);
6785ae2012-06-15Per Hedbor  jmp(&label_D); LABEL_B; /* comparison done, pop stack x2, reftypes */ amd64_add_sp( -2 );
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, OFFSETOF(svalue,u.refs), P_REG_RAX ); add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), -1 );
6785ae2012-06-15Per Hedbor  jnz( &label_C ); /* Free sp_reg */ mov_reg_reg( sp_reg, ARG1_REG ); amd64_call_c_function( really_free_svalue );
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
6785ae2012-06-15Per Hedbor  LABEL_C; add_reg_imm_reg( sp_reg, sizeof(struct svalue), ARG1_REG );
2fd8052013-10-08Per Hedbor  mov_mem_reg( ARG1_REG, OFFSETOF(svalue,u.refs), P_REG_RAX ); add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), -1 );
6785ae2012-06-15Per Hedbor  jnz( &label_D ); amd64_call_c_function( really_free_svalue ); /* free sp[-2] */ LABEL_D;
2fd8052013-10-08Per Hedbor  test_reg(P_REG_RBX);
6785ae2012-06-15Per Hedbor  if( op == F_BRANCH_WHEN_EQ ) return jz_imm_rel32(0); return jnz_imm_rel32(0); #if 0 case F_BRANCH_WHEN_LT: /* sp[-2] < sp[-1] */ case F_BRANCH_WHEN_GE: /* sp[-2] >= sp[-1] */ case F_BRANCH_WHEN_GT: /* sp[-2] > sp[-1] */ case F_BRANCH_WHEN_LE: /* sp[-2] <= sp[-1] */ #endif case F_BRANCH: START_JUMP(); add_to_program(0xe9); ret=DO_NOT_WARN( (INT32) PIKE_PC ); PUSH_INT(0); return ret;
54a26b2011-05-11Henrik Grubbström (Grubba)  }
719c3a2012-06-12Per Hedbor  maybe_update_pc();
54a26b2011-05-11Henrik Grubbström (Grubba)  addr=instrs[off].address;
719c3a2012-06-12Per Hedbor  amd64_call_c_opcode(addr, flags);
622ebc2012-06-25Per Hedbor 
6785ae2012-06-15Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  test_reg(P_REG_RAX);
54a26b2011-05-11Henrik Grubbström (Grubba)  if (backward_jump) {
48f5972011-05-23Henrik Grubbström (Grubba)  INT32 skip;
54a26b2011-05-11Henrik Grubbström (Grubba)  add_to_program (0x74); /* jz rel8 */
48f5972011-05-23Henrik Grubbström (Grubba)  add_to_program (0); /* Bytes to skip. */ skip = (INT32)PIKE_PC;
638a2c2012-07-16Per Hedbor  amd64_ins_branch_check_threads_etc(0);
54a26b2011-05-11Henrik Grubbström (Grubba)  add_to_program (0xe9); /* jmp rel32 */ ret = DO_NOT_WARN ((INT32) PIKE_PC);
48f5972011-05-23Henrik Grubbström (Grubba)  PUSH_INT (0); /* Adjust the skip for the relative jump. */ Pike_compiler->new_program->program[skip-1] = ((INT32)PIKE_PC - skip);
54a26b2011-05-11Henrik Grubbström (Grubba)  } else { add_to_program (0x0f); /* jnz rel32 */ add_to_program (0x85); ret = DO_NOT_WARN ((INT32) PIKE_PC); PUSH_INT (0); } return ret; }
d1fa802011-05-09Henrik Grubbström (Grubba) void ins_f_byte_with_arg(unsigned int a, INT32 b) { maybe_update_pc();
eb42a12011-05-11Henrik Grubbström (Grubba)  switch(a) {
719c3a2012-06-12Per Hedbor  case F_THIS_OBJECT: if( b == 0 ) {
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
719c3a2012-06-12Per Hedbor  amd64_push_this_object(); return; }
1c1a242012-06-13Henrik Grubbström (Grubba)  break; /* Fallback to C-version. */
2ce89e2014-08-31Per Hedbor  case F_POP_N_ELEMS: { int i; LABELS(); ins_debug_instr_prologue(a-F_OFFSET,b, 0); amd64_load_sp_reg(); if( b < 3 ) { for(i=0;i<b;i++) amd64_free_svalue_off( sp_reg, SVAL(-1-i).off, 0 ); amd64_add_sp( -b ); return; } break; /* use c version. */ /* FIXME: Why does not this work? */ /* add_reg_imm_reg( sp_reg, -SVAL(b).off, P_REG_RBX ); */ /* LABEL_A; */ /* amd64_free_svalue_off( sp_reg, SVAL(-1).off,0 ); */ /* amd64_add_sp( -1 ); */ /* cmp_reg_reg(P_REG_RBX,sp_reg); */ /* jne(&label_A); */ } return;
ea82682012-06-25Per Hedbor  case F_RETURN_LOCAL: /* FIXME: The C version has a trick: if locals+b < expendibles, pop to there and return. This saves a push, and the poping has to be done anyway. */ ins_f_byte_with_arg( F_LOCAL, b ); ins_f_byte( F_DUMB_RETURN ); return;
8886032012-06-25Per Hedbor  case F_NEG_INT_INDEX: { LABELS();
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
8886032012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -1*sizeof(struct svalue), P_REG_RAX ); cmp_reg32_imm( P_REG_RAX, PIKE_T_ARRAY );
8886032012-06-25Per Hedbor  jne( &label_A );
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -1*sizeof(struct svalue)+8, P_REG_RDX ); /* u.array */
8886032012-06-25Per Hedbor  /* -> arr[sizeof(arr)-b] */
2fd8052013-10-08Per Hedbor  mov_mem32_reg( P_REG_RDX, OFFSETOF(array,size), P_REG_RBX ); sub_reg_imm( P_REG_RBX, b );
8886032012-06-25Per Hedbor  js( &label_A ); /* if signed result, index outside array */
2fd8052013-10-08Per Hedbor  shl_reg_imm( P_REG_RBX, 4 ); add_reg_mem( P_REG_RBX, P_REG_RDX, OFFSETOF(array,item) );
8886032012-06-25Per Hedbor  /* This overwrites the array. */
2fd8052013-10-08Per Hedbor  amd64_push_svaluep_to( P_REG_RBX, -1 );
8886032012-06-25Per Hedbor  /* We know it's an array. */
2fd8052013-10-08Per Hedbor  add_mem32_imm( P_REG_RDX, OFFSETOF(array,refs), -1);
8886032012-06-25Per Hedbor  jnz( &label_C );
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RDX, ARG1_REG );
8886032012-06-25Per Hedbor  amd64_call_c_function(really_free_array); jmp( &label_C ); LABEL_A; update_arg1( b ); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_C; } return; case F_POS_INT_INDEX: { LABELS();
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
8886032012-06-25Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem8_reg( sp_reg, -1*sizeof(struct svalue), P_REG_RAX ); cmp_reg32_imm( P_REG_RAX, PIKE_T_ARRAY );
8886032012-06-25Per Hedbor  jne( &label_A );
2fd8052013-10-08Per Hedbor  mov_mem_reg( sp_reg, -1*sizeof(struct svalue)+8, P_REG_RDX ); /* u.array */
8886032012-06-25Per Hedbor  /* -> arr[sizeof(arr)-b] */
2fd8052013-10-08Per Hedbor  mov_mem32_reg( P_REG_RDX, OFFSETOF(array,size), P_REG_RCX ); cmp_reg32_imm( P_REG_RCX, b );
bfca6c2013-06-19Henrik Grubbström (Grubba)  jle( &label_A ); /* RCX <= b, index outside array */
2fd8052013-10-08Per Hedbor  mov_imm_reg( b * sizeof(struct svalue), P_REG_RBX); add_reg_mem( P_REG_RBX, P_REG_RDX, OFFSETOF(array,item) );
8886032012-06-25Per Hedbor  /* This overwrites the array. */
2fd8052013-10-08Per Hedbor  amd64_push_svaluep_to( P_REG_RBX, -1 );
8886032012-06-25Per Hedbor  /* We know it's an array. */
2fd8052013-10-08Per Hedbor  add_mem32_imm( P_REG_RDX, OFFSETOF(array,refs), -1);
8886032012-06-25Per Hedbor  jnz( &label_C );
2fd8052013-10-08Per Hedbor  mov_reg_reg( P_REG_RDX, ARG1_REG );
8886032012-06-25Per Hedbor  amd64_call_c_function(really_free_array); jmp( &label_C ); LABEL_A; update_arg1( b ); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_C; } return; case F_LOCAL_INDEX: { LABELS(); /*
94040e2014-08-14Per Hedbor  local[pike_sp[-1]]
8886032012-06-25Per Hedbor  */
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
8886032012-06-25Per Hedbor  amd64_load_sp_reg(); amd64_load_fp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame,locals), P_REG_RDX); add_reg_imm( P_REG_RDX, b*sizeof(struct svalue)); mov_mem8_reg( P_REG_RDX, 0, P_REG_RAX ); mov_mem8_reg( sp_reg, -1*sizeof(struct svalue), P_REG_RBX ); shl_reg_imm( P_REG_RAX, 8 ); add_reg_reg( P_REG_RAX, P_REG_RBX ); mov_mem_reg( sp_reg, -1*sizeof(struct svalue)+8, P_REG_RBX ); /* int */ mov_mem_reg( P_REG_RDX, 8, P_REG_RCX ); /* value */ cmp_reg32_imm( P_REG_RAX, (PIKE_T_ARRAY<<8)|PIKE_T_INT );
8886032012-06-25Per Hedbor  jne( &label_A ); /* Array and int index. */
2fd8052013-10-08Per Hedbor  mov_mem32_reg( P_REG_RCX, OFFSETOF(array,size), P_REG_RDX ); cmp_reg32_imm( P_REG_RBX, 0 ); jge( &label_D );
8886032012-06-25Per Hedbor  /* less than 0, add size */
2fd8052013-10-08Per Hedbor  add_reg_reg( P_REG_RBX, P_REG_RDX );
8886032012-06-25Per Hedbor  LABEL_D;
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RBX, 0 ); jl( &label_B ); // <0 cmp_reg_reg( P_REG_RBX, P_REG_RCX); jge( &label_B ); // >=size
8886032012-06-25Per Hedbor  /* array, index inside array. push item, swap, pop, done */
2fd8052013-10-08Per Hedbor  mov_mem_reg( P_REG_RCX, OFFSETOF(array,item), P_REG_RCX ); shl_reg_imm( P_REG_RBX, 4 ); add_reg_reg( P_REG_RBX, P_REG_RCX ); amd64_push_svaluep_to( P_REG_RBX, -1 );
8886032012-06-25Per Hedbor  jmp( &label_C ); LABEL_A; #if 0
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, (PIKE_T_STRING<<8)|PIKE_T_INT );
8886032012-06-25Per Hedbor  jne( &label_B ); #endif LABEL_B; /* something else. */ update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_C; /* done */ } return;
2e82fb2014-08-31Per Hedbor  case F_MULTIPLY_INT: { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX );
2ce89e2014-08-31Per Hedbor  test_reg32(P_REG_RAX);
2e82fb2014-08-31Per Hedbor  jnz(&label_A); /* FIXME: mul_reg_mem_imm actually exists as an instruction. */ mov_imm_reg(b, P_REG_RAX); mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RBX ); mul_reg_reg(P_REG_RBX,P_REG_RAX); jo(&label_A); mov_imm_mem(PIKE_T_INT, sp_reg, SVAL(-1).type); mov_reg_mem(P_REG_RBX, sp_reg, SVAL(-1).value); jmp(&label_B); LABEL_A; update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
ba5fbf2014-09-01Per Hedbor  case F_MOD_INT:
da865c2014-09-01Per Hedbor  if( b < 0 ) break;
2ce89e2014-08-31Per Hedbor  case F_DIVIDE_INT: if( b == 0 ) { yyerror("Divide by constant 0\n"); break; } { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_sp_reg(); mov_mem8_reg(sp_reg,SVAL(-1).type, P_REG_RAX); test_reg32(P_REG_RAX); jnz(&label_A); mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RAX); if( b == -1 ) {
da865c2014-09-01Per Hedbor  /* 64bit imm not done. */ cmp_reg_imm(P_REG_RAX,-0x80000000); jle(&label_A);
2ce89e2014-08-31Per Hedbor  }
da865c2014-09-01Per Hedbor  /*if( a == F_MOD_INT )*/
2ce89e2014-08-31Per Hedbor  {
da865c2014-09-01Per Hedbor  cmp_reg_imm(P_REG_RAX, 0); jl(&label_A);
2ce89e2014-08-31Per Hedbor  } mov_imm_reg(b,P_REG_RBX); div_reg_reg(P_REG_RAX,P_REG_RBX); mov_imm_mem(PIKE_T_INT, sp_reg, SVAL(-1).type); if( a == F_MOD_INT ) mov_reg_mem(P_REG_RDX, sp_reg, SVAL(-1).value); else mov_reg_mem(P_REG_RAX, sp_reg, SVAL(-1).value); jmp(&label_B); LABEL_A; update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
2e82fb2014-08-31Per Hedbor 
156ab02014-08-31Per Hedbor  case F_AND_INT: { LABELS();
2e82fb2014-08-31Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET,b, 0);
156ab02014-08-31Per Hedbor  amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX );
2ce89e2014-08-31Per Hedbor  test_reg32(P_REG_RAX);
156ab02014-08-31Per Hedbor  jnz(&label_A); /* FIXME: and_mem_imm */ mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RAX ); and_reg_imm(P_REG_RAX,b); mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-1).type);
36fea02014-08-31Per Hedbor  mov_reg_mem(P_REG_RAX,sp_reg,SVAL(-1).value);
156ab02014-08-31Per Hedbor  jmp(&label_B); LABEL_A; update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
2ce89e2014-08-31Per Hedbor  case F_XOR_INT: { LABELS(); amd64_load_sp_reg(); ins_debug_instr_prologue(a,b,0); mov_mem8_reg(sp_reg,SVAL(-1).type, P_REG_RAX); test_reg32(P_REG_RAX); jnz(&label_A); mov_mem_reg(sp_reg,SVAL(-1).value, P_REG_RAX); mov_imm_reg(b,P_REG_RBX); xor_reg_reg(P_REG_RAX,P_REG_RBX); mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-1).type); mov_reg_mem(P_REG_RBX,sp_reg,SVAL(-1).value); jmp(&label_B); LABEL_A; update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
156ab02014-08-31Per Hedbor  case F_OR_INT: if( !b ) return; { LABELS();
2e82fb2014-08-31Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
156ab02014-08-31Per Hedbor  amd64_load_sp_reg(); mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX );
2ce89e2014-08-31Per Hedbor  test_reg32(P_REG_RAX);
156ab02014-08-31Per Hedbor  jnz(&label_A);
ba5fbf2014-09-01Per Hedbor 
156ab02014-08-31Per Hedbor  /* FIXME: or_mem_imm */ mov_mem_reg(sp_reg, SVAL(-1).value, P_REG_RAX ); or_reg_imm(P_REG_RAX,b); mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-1).type); mov_reg_mem(P_REG_RAX,sp_reg,SVAL(-1).value); jmp(&label_B); LABEL_A; update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
a4cee82014-08-31Per Hedbor  case F_LSH_INT: if( b > 0 && b <= 63 ) { LABELS();
2e82fb2014-08-31Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
a4cee82014-08-31Per Hedbor  amd64_load_sp_reg();
d1faf92014-08-31Per Hedbor  mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX );
a4cee82014-08-31Per Hedbor  test_reg32(P_REG_RAX); jz(&label_B);
2e82fb2014-08-31Per Hedbor  LABEL_A;
a4cee82014-08-31Per Hedbor  update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); jmp(&label_C); LABEL_B;
d1faf92014-08-31Per Hedbor  mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-1).type); mov_mem_reg( sp_reg, SVAL(-1).value, P_REG_RBX);
a4cee82014-08-31Per Hedbor  /* ok. It would have been nice is sal set a bit that stayed set when you shifted out a 1 bit. :) */ mov_reg_reg( P_REG_RBX, P_REG_RAX );
2e82fb2014-08-31Per Hedbor  shl_reg_imm( P_REG_RBX, b);
a4cee82014-08-31Per Hedbor  mov_reg_reg( P_REG_RBX, P_REG_RDX );
2e82fb2014-08-31Per Hedbor  shr_reg_imm( P_REG_RDX, b );
a4cee82014-08-31Per Hedbor  cmp_reg_reg( P_REG_RDX, P_REG_RAX ); jne( &label_A );
d1faf92014-08-31Per Hedbor  mov_reg_mem( P_REG_RBX, sp_reg, SVAL(-1).value);
a4cee82014-08-31Per Hedbor  LABEL_C;
d1faf92014-08-31Per Hedbor  return;
a4cee82014-08-31Per Hedbor  } if(!b) return; if( b < 0 ) yyerror("<< with negative constant\n"); break;
156ab02014-08-31Per Hedbor  case F_RSH_INT:
a4cee82014-08-31Per Hedbor  if( b > 0 && b <= 63 )
156ab02014-08-31Per Hedbor  { LABELS();
2e82fb2014-08-31Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
156ab02014-08-31Per Hedbor  amd64_load_sp_reg();
d1faf92014-08-31Per Hedbor  mov_mem8_reg(sp_reg, SVAL(-1).type, P_REG_RAX );
2ce89e2014-08-31Per Hedbor  test_reg32(P_REG_RAX);
156ab02014-08-31Per Hedbor  jnz(&label_A); mov_mem_reg( sp_reg, SVAL(-1).value, P_REG_RAX); /* FIXME: shr_mem_imm */ mov_imm_mem(PIKE_T_INT,sp_reg,SVAL(-1).type);
d1faf92014-08-31Per Hedbor  shr_reg_imm( P_REG_RAX, b);
156ab02014-08-31Per Hedbor  mov_reg_mem( P_REG_RAX, sp_reg, SVAL(-1).value); jmp(&label_B); LABEL_A; update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags);
1f5f652014-08-31Henrik Grubbström (Grubba)  LABEL_B;
156ab02014-08-31Per Hedbor  return; } if(!b) return; if( b < 0 ) yyerror(">> with negative constant\n");
ba5fbf2014-09-01Per Hedbor  break;
8886032012-06-25Per Hedbor 
156ab02014-08-31Per Hedbor  case F_SUBTRACT_INT:
6785ae2012-06-15Per Hedbor  case F_ADD_NEG_INT: b = -b; case F_ADD_INT: { LABELS();
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
6785ae2012-06-15Per Hedbor  amd64_load_sp_reg();
ba5fbf2014-09-01Per Hedbor  mov_mem8_reg( sp_reg,SVAL(-1).type, P_REG_RAX ); test_reg32(P_REG_RAX); jnz( &label_A ); mov_mem_reg(sp_reg, SVAL(-1).value,P_REG_RAX );
2fd8052013-10-08Per Hedbor  add_reg_imm( P_REG_RAX, b );
6785ae2012-06-15Per Hedbor  jo( &label_A ); /* if overflow, use f_add */
ba5fbf2014-09-01Per Hedbor  mov_imm_mem( PIKE_T_INT, sp_reg, SVAL(-1).type ); mov_reg_mem( P_REG_RAX, sp_reg, SVAL(-1).value );
6785ae2012-06-15Per Hedbor  jmp(&label_B); /* all done. */ LABEL_A; amd64_push_int(b,0); update_arg1(2); amd64_call_c_opcode( f_add, I_UPDATE_SP ); amd64_load_sp_reg(); LABEL_B; } return;
eb42a12011-05-11Henrik Grubbström (Grubba)  case F_NUMBER:
ab7cf52011-05-24Henrik Grubbström (Grubba)  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
eb42a12011-05-11Henrik Grubbström (Grubba)  amd64_push_int(b, 0); return; case F_NEG_NUMBER:
ab7cf52011-05-24Henrik Grubbström (Grubba)  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
eb42a12011-05-11Henrik Grubbström (Grubba)  amd64_push_int(-(INT64)b, 0); return;
ab7cf52011-05-24Henrik Grubbström (Grubba)  case F_STRING: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_push_string(b, 0); return; case F_ARROW_STRING: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_push_string(b, 1); return; case F_MARK_AND_CONST0: ins_f_byte(F_MARK); ins_f_byte(F_CONST0); return; case F_MARK_AND_CONST1: ins_f_byte(F_MARK); ins_f_byte(F_CONST0); return; case F_MARK_AND_STRING: ins_f_byte(F_MARK); ins_f_byte_with_arg(F_STRING, b); return; case F_MARK_AND_GLOBAL: ins_f_byte(F_MARK); ins_f_byte_with_arg(F_GLOBAL, b); return; case F_MARK_AND_LOCAL: ins_f_byte(F_MARK); ins_f_byte_with_arg(F_LOCAL, b); return; case F_MARK_X: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_mark(b); return;
9030f62011-05-26Henrik Grubbström (Grubba)  case F_LFUN: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_push_local_function(b); return;
719c3a2012-06-12Per Hedbor  case F_ASSIGN_LOCAL: ins_debug_instr_prologue(a-F_OFFSET, b, 0);
fd1b5a2012-06-19Henrik Grubbström (Grubba)  amd64_load_sp_reg();
719c3a2012-06-12Per Hedbor  amd64_assign_local(b); add_reg_imm_reg(sp_reg, -sizeof(struct svalue), ARG1_REG);
b505a72012-06-13Per Hedbor  amd64_ref_svalue(ARG1_REG, 0);
719c3a2012-06-12Per Hedbor  return; case F_ASSIGN_LOCAL_AND_POP: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_assign_local(b); amd64_add_sp(-1); return;
83b92e2014-08-16Per Hedbor 
4ec32c2014-08-07Per Hedbor  case F_ASSIGN_PRIVATE_GLOBAL_AND_POP: case F_ASSIGN_PRIVATE_GLOBAL:
2ee7d12014-08-11Per Hedbor  { LABELS();
4ec32c2014-08-07Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_get_storage( P_REG_RBX, b );
2ee7d12014-08-11Per Hedbor  /* do not assign if this object is destructed. */ mov_mem_reg( fp_reg, OFFSETOF(pike_frame,current_object), ARG1_REG ); mov_mem_reg( ARG1_REG, OFFSETOF(object,prog), P_REG_RAX ); test_reg(P_REG_RAX); jnz(&label_A); /* Note: will throw error ('cannot index destructed object' or similar).
36e0822014-08-11Per Hedbor  * Note: Only arg1 is correct here (points to the destructed object)
2ee7d12014-08-11Per Hedbor  */ amd64_call_c_function(object_low_set_index); LABEL_A;
4ec32c2014-08-07Per Hedbor  amd64_free_svalue( P_REG_RBX, 0 ); amd64_load_sp_reg(); if( a == F_ASSIGN_PRIVATE_GLOBAL_AND_POP ) { amd64_add_sp(-1); amd64_assign_svalue_no_free( P_REG_RBX, sp_reg, 0); } else { amd64_assign_svalue_no_free( P_REG_RBX, sp_reg, -sizeof(struct svalue));
2ee7d12014-08-11Per Hedbor  amd64_ref_svalue(P_REG_RBX,0);
4ec32c2014-08-07Per Hedbor  }
2ee7d12014-08-11Per Hedbor  }
36e0822014-08-11Per Hedbor  return;
4ec32c2014-08-07Per Hedbor 
719c3a2012-06-12Per Hedbor  case F_ASSIGN_GLOBAL: case F_ASSIGN_GLOBAL_AND_POP: /* arg1: pike_fp->current obj arg2: arg1+idenfier level arg3: Pike_sp-1 */ /* NOTE: We cannot simply do the same optimization as for ASSIGN_LOCAL_AND_POP with assign global, since assigning at times does not add references. We do know, however, that refs (should) never reach 0 when poping the stack. We can thus skip that part of pop_value */ ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg(); mov_mem_reg(fp_reg, OFFSETOF(pike_frame, current_object), ARG1_REG); mov_mem_reg(fp_reg, OFFSETOF(pike_frame,context), ARG2_REG); mov_mem16_reg(ARG2_REG, OFFSETOF(inherit, identifier_level), ARG2_REG); add_reg_imm( ARG2_REG, b ); add_reg_imm_reg( sp_reg, -sizeof(struct svalue), ARG3_REG ); amd64_call_c_function( object_low_set_index ); if( a == F_ASSIGN_GLOBAL_AND_POP ) { /* assign done, pop. */
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
719c3a2012-06-12Per Hedbor  amd64_add_sp( -1 ); amd64_free_svalue( sp_reg, 1 ); } return; case F_SIZEOF_LOCAL: { LABELS();
5c82f42012-06-13Henrik Grubbström (Grubba)  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
719c3a2012-06-12Per Hedbor  amd64_load_fp_reg(); amd64_load_sp_reg(); mov_mem_reg( fp_reg, OFFSETOF(pike_frame,locals), ARG1_REG); add_reg_imm( ARG1_REG, b*sizeof(struct svalue));
2fd8052013-10-08Per Hedbor  mov_sval_type( ARG1_REG, P_REG_RAX );
719c3a2012-06-12Per Hedbor  /* type in RAX, svalue in ARG1 */
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, PIKE_T_ARRAY );
719c3a2012-06-12Per Hedbor  jne( &label_A ); /* It's an array */ /* move arg to point to the array */ mov_mem_reg( ARG1_REG, OFFSETOF(svalue, u.array ), ARG1_REG); /* load size -> RAX*/
2fd8052013-10-08Per Hedbor  mov_mem32_reg( ARG1_REG,OFFSETOF(array, size), P_REG_RAX );
719c3a2012-06-12Per Hedbor  jmp( &label_C ); LABEL_A;
2fd8052013-10-08Per Hedbor  cmp_reg32_imm( P_REG_RAX, PIKE_T_STRING );
719c3a2012-06-12Per Hedbor  jne( &label_B ); /* It's a string */ /* move arg to point to the string */ mov_mem_reg( ARG1_REG, OFFSETOF(svalue, u.string ), ARG1_REG); /* load size ->RAX*/
2fd8052013-10-08Per Hedbor  mov_mem32_reg( ARG1_REG,OFFSETOF(pike_string, len ), P_REG_RAX );
719c3a2012-06-12Per Hedbor  jmp( &label_C ); LABEL_B; /* It's something else, svalue already in ARG1. */ amd64_call_c_function( pike_sizeof ); LABEL_C;/* all done, res in RAX */ /* Store result on stack */
2fd8052013-10-08Per Hedbor  amd64_push_int_reg( P_REG_RAX );
719c3a2012-06-12Per Hedbor  } return;
956b122014-09-23Per Hedbor  case F_SIZEOF_LOCAL_STRING: { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg(); mov_mem_reg( fp_reg, OFFSETOF(pike_frame,locals), ARG1_REG); add_reg_imm( ARG1_REG, b*sizeof(struct svalue)); mov_sval_type( ARG1_REG, P_REG_RAX ); mov_mem_reg( ARG1_REG, OFFSETOF(svalue, u.string ), P_REG_RBX); /* type in RAX, svalue in ARG1 */ cmp_reg32_imm( P_REG_RAX, PIKE_T_STRING ); je( &label_B ); /* It's something else, svalue already in ARG1. */ amd64_call_c_function( pike_sizeof ); LABEL_B; mov_mem32_reg( P_REG_RBX,OFFSETOF(pike_string, len ), P_REG_RAX ); /* Store result on stack */ amd64_push_int_reg( P_REG_RAX ); } return;
4ec32c2014-08-07Per Hedbor  case F_PRIVATE_GLOBAL: /* This is a private (or final) global which is not a 'short' one. It is guaranteed to be available as an svalue at pike_fp->current_object->storage + pike_fp->context->storage_offset + offset (I really wish pike_fp->current_storage was kept up to date for pike function..) */ ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_sp_reg(); amd64_get_storage( P_REG_RBX, b ); amd64_push_svaluep( P_REG_RBX ); return;
9030f62011-05-26Henrik Grubbström (Grubba)  case F_GLOBAL: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg();
719c3a2012-06-12Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, context), ARG3_REG); mov_mem_reg(fp_reg, OFFSETOF(pike_frame, current_object),
9030f62011-05-26Henrik Grubbström (Grubba)  ARG2_REG);
719c3a2012-06-12Per Hedbor  mov_reg_reg(sp_reg, ARG1_REG);
4ceb792012-06-22Per Hedbor  mov_mem16_reg(ARG3_REG, OFFSETOF(inherit, identifier_level),
719c3a2012-06-12Per Hedbor  ARG3_REG); add_reg_imm(ARG3_REG, b);
83b92e2014-08-16Per Hedbor  amd64_call_c_function(low_object_index_no_free);
719c3a2012-06-12Per Hedbor  amd64_add_sp(1);
9030f62011-05-26Henrik Grubbström (Grubba)  return;
719c3a2012-06-12Per Hedbor 
9030f62011-05-26Henrik Grubbström (Grubba)  case F_LOCAL: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), P_REG_RCX); add_reg_imm(P_REG_RCX, b*sizeof(struct svalue)); amd64_push_svaluep(P_REG_RCX);
9030f62011-05-26Henrik Grubbström (Grubba)  return;
719c3a2012-06-12Per Hedbor 
91080b2012-06-15Henrik Grubbström (Grubba)  case F_CLEAR_LOCAL:
6b26382014-08-15Per Hedbor  ins_f_byte_with_2_args( F_CLEAR_N_LOCAL, b, 1 );
91080b2012-06-15Henrik Grubbström (Grubba)  return; case F_INC_LOCAL_AND_POP: { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), P_REG_RCX); add_reg_imm(P_REG_RCX, b*sizeof(struct svalue)); mov_sval_type(P_REG_RCX, P_REG_RAX); cmp_reg32_imm(P_REG_RAX, PIKE_T_INT);
91080b2012-06-15Henrik Grubbström (Grubba)  jne(&label_A); /* Integer - Zap subtype and try just incrementing it. */
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_reg_mem32(P_REG_RAX, P_REG_RCX, OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  add_imm_mem(1, P_REG_RCX, OFFSETOF(svalue, u.integer));
ec96912012-06-17Henrik Grubbström (Grubba)  jno(&label_B);
2fd8052013-10-08Per Hedbor  add_imm_mem(-1, P_REG_RCX, OFFSETOF(svalue, u.integer));
91080b2012-06-15Henrik Grubbström (Grubba)  LABEL_A; /* Fallback to the C-implementation. */ update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
b669592012-06-19Henrik Grubbström (Grubba)  case F_INC_LOCAL: ins_f_byte_with_arg(F_INC_LOCAL_AND_POP, b); ins_f_byte_with_arg(F_LOCAL, b); return; case F_POST_INC_LOCAL: ins_f_byte_with_arg(F_LOCAL, b); ins_f_byte_with_arg(F_INC_LOCAL_AND_POP, b); return;
91080b2012-06-15Henrik Grubbström (Grubba)  case F_DEC_LOCAL_AND_POP: { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), P_REG_RCX); add_reg_imm(P_REG_RCX, b*sizeof(struct svalue)); mov_sval_type(P_REG_RCX, P_REG_RAX); cmp_reg32_imm(P_REG_RAX, PIKE_T_INT);
91080b2012-06-15Henrik Grubbström (Grubba)  jne(&label_A); /* Integer - Zap subtype and try just decrementing it. */
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_reg_mem32(P_REG_RAX, P_REG_RCX, OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  add_imm_mem(-1, P_REG_RCX, OFFSETOF(svalue, u.integer));
ec96912012-06-17Henrik Grubbström (Grubba)  jno(&label_B);
2fd8052013-10-08Per Hedbor  add_imm_mem(1, P_REG_RCX, OFFSETOF(svalue, u.integer));
91080b2012-06-15Henrik Grubbström (Grubba)  LABEL_A; /* Fallback to the C-implementation. */ update_arg1(b); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_B; } return;
b669592012-06-19Henrik Grubbström (Grubba)  case F_DEC_LOCAL: ins_f_byte_with_arg(F_DEC_LOCAL_AND_POP, b); ins_f_byte_with_arg(F_LOCAL, b); return; case F_POST_DEC_LOCAL: ins_f_byte_with_arg(F_LOCAL, b); ins_f_byte_with_arg(F_DEC_LOCAL_AND_POP, b); return;
ea82682012-06-25Per Hedbor #if 0 /* These really have to be done inline: reg = (sp- *--mark_sp)>>16 ltosval_and_free(-(args+2)) -> pike_sp-args call_builtin(reg) -- stack now lvalue, result, so now.. ins_f_byte( F_ASSIGN or F_ASSIGN_AND_POP ) */ case F_LTOSVAL_CALL_BUILTIN_AND_ASSIGN_POP: case F_LTOSVAL_CALL_BUILTIN_AND_ASSIGN: return; #endif
622ebc2012-06-25Per Hedbor  case F_CALL_BUILTIN_AND_POP: ins_f_byte_with_arg( F_CALL_BUILTIN, b ); ins_f_byte( F_POP_VALUE ); return; case F_MARK_CALL_BUILTIN_AND_RETURN: ins_f_byte_with_arg( F_MARK_CALL_BUILTIN, b ); ins_f_byte( F_DUMB_RETURN ); return; case F_MARK_CALL_BUILTIN_AND_POP: ins_f_byte_with_arg( F_MARK_CALL_BUILTIN, b ); ins_f_byte( F_POP_VALUE ); return; case F_CALL_BUILTIN1_AND_POP: ins_f_byte_with_arg( F_CALL_BUILTIN1, b ); ins_f_byte( F_POP_VALUE ); return; case F_CALL_BUILTIN_AND_RETURN: ins_f_byte_with_arg( F_CALL_BUILTIN, b ); ins_f_byte( F_DUMB_RETURN ); return; case F_CALL_BUILTIN: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_mark_sp_reg(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg( mark_sp_reg, -sizeof(struct svalue*), P_REG_RAX );
622ebc2012-06-25Per Hedbor  amd64_add_mark_sp( -1 ); mov_reg_reg( sp_reg, ARG1_REG );
2fd8052013-10-08Per Hedbor  sub_reg_reg( ARG1_REG, P_REG_RAX );
622ebc2012-06-25Per Hedbor  shr_reg_imm( ARG1_REG, 4 ); /* arg1 = (sp_reg - *--mark_sp)/16 (sizeof(svalue)) */ case F_MARK_CALL_BUILTIN: if(a == F_MARK_CALL_BUILTIN ) {
c33ce92012-06-28Per Hedbor  /* Note: It is not actually possible to do ins_debug_instr_prologue here. ins_debug_instr_prologue(a-F_OFFSET, b, 0); */
622ebc2012-06-25Per Hedbor  mov_imm_reg( 0, ARG1_REG ); } case F_CALL_BUILTIN1: if(a == F_CALL_BUILTIN1 ) {
c33ce92012-06-28Per Hedbor  /* Note: It is not actually possible to do ins_debug_instr_prologue here. ins_debug_instr_prologue(a-F_OFFSET, b, 0); */
622ebc2012-06-25Per Hedbor  mov_imm_reg( 1, ARG1_REG ); }
1807c02014-07-15Per Hedbor 
622ebc2012-06-25Per Hedbor  /* Get function pointer */
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, 0);
622ebc2012-06-25Per Hedbor  amd64_call_c_opcode(Pike_compiler->new_program->constants[b].sval.u.efun->function, I_UPDATE_SP); return;
719c3a2012-06-12Per Hedbor  case F_CONSTANT: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame,context), P_REG_RCX ); mov_mem_reg( P_REG_RCX, OFFSETOF(inherit,prog), P_REG_RCX ); mov_mem_reg( P_REG_RCX, OFFSETOF(program,constants), P_REG_RCX ); add_reg_imm( P_REG_RCX, b*sizeof(struct program_constant) +
719c3a2012-06-12Per Hedbor  OFFSETOF(program_constant,sval) );
2fd8052013-10-08Per Hedbor  amd64_push_svaluep( P_REG_RCX );
719c3a2012-06-12Per Hedbor  return; case F_GLOBAL_LVALUE: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg(); amd64_push_this_object( );
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( T_OBJ_INDEX, sp_reg, OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, context), P_REG_RAX); mov_mem16_reg( P_REG_RAX,OFFSETOF(inherit, identifier_level), P_REG_RAX); add_reg_imm( P_REG_RAX, b ); mov_reg_mem( P_REG_RAX, sp_reg, OFFSETOF(svalue,u.identifier) );
719c3a2012-06-12Per Hedbor  amd64_add_sp( 1 ); return; case F_LOCAL_LVALUE: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg(); /* &frame->locals[b] */
2fd8052013-10-08Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), P_REG_RAX); add_reg_imm( P_REG_RAX, b*sizeof(struct svalue));
719c3a2012-06-12Per Hedbor 
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( T_SVALUE_PTR, sp_reg, OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  mov_reg_mem( P_REG_RAX, sp_reg, OFFSETOF(svalue,u.lval) );
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( T_VOID, sp_reg, OFFSETOF(svalue, tu.t.type)+sizeof(struct svalue));
719c3a2012-06-12Per Hedbor  amd64_add_sp( 2 ); return;
5151e52012-06-10Henrik Grubbström (Grubba)  case F_PROTECT_STACK: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg();
719c3a2012-06-12Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG);
5151e52012-06-10Henrik Grubbström (Grubba)  if (b) {
719c3a2012-06-12Per Hedbor  add_reg_imm_reg(ARG1_REG, sizeof(struct svalue) * b, ARG1_REG);
5151e52012-06-10Henrik Grubbström (Grubba)  }
719c3a2012-06-12Per Hedbor  mov_reg_mem(ARG1_REG, fp_reg, OFFSETOF(pike_frame, expendible));
5151e52012-06-10Henrik Grubbström (Grubba)  return; case F_MARK_AT: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_mark_sp_reg();
719c3a2012-06-12Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG);
5151e52012-06-10Henrik Grubbström (Grubba)  if (b) {
719c3a2012-06-12Per Hedbor  add_reg_imm_reg(ARG1_REG, sizeof(struct svalue) * b, ARG1_REG);
5151e52012-06-10Henrik Grubbström (Grubba)  }
719c3a2012-06-12Per Hedbor  mov_reg_mem(ARG1_REG, mark_sp_reg, 0x00);
1af0932012-06-22Per Hedbor  amd64_add_mark_sp( 1 );
5151e52012-06-10Henrik Grubbström (Grubba)  return;
eb42a12011-05-11Henrik Grubbström (Grubba)  }
d1fa802011-05-09Henrik Grubbström (Grubba)  update_arg1(b); ins_f_byte(a);
7990392006-04-27Tor Edvardsson }
54a26b2011-05-11Henrik Grubbström (Grubba) int amd64_ins_f_jump_with_arg(unsigned int op, INT32 a, int backward_jump) {
719c3a2012-06-12Per Hedbor  LABELS();
54a26b2011-05-11Henrik Grubbström (Grubba)  if (!(instrs[op - F_OFFSET].flags & I_BRANCH)) return -1;
719c3a2012-06-12Per Hedbor  switch( op ) {
94040e2014-08-14Per Hedbor  /* case F_BRANCH_IF_TYPE_IS_NOT: */ /* ins_debug_instr_prologue(op-F_OFFSET, a, 0); */ /* amd64_load_sp_reg(); */ /* amd64_add_sp(-1); */ /* mov_sval_type( sp_reg, P_REG_RAX ); */ /* cmp_reg32_imm(P_REG_RAX, PIKE_T_OBJECT ); */ /* jne( &label_A ); */ /* amd64_add_sp(1); */ /* amd64_call_c_opcode(instrs[op-F_OFFSET].address, I_UPDATE_SP); */ /* jz( &label_B ); /\* do not branch. *\/ */ /* /\* branch. *\/ */ /* mov_imm_reg( a, P_REG_RAX ); */ /* jmp( &label_C ); */ /* amd64_load_sp_reg(); */ /* label_A; */ /* amd64_free_svalue( sp_reg, 0); */ /* label_C; */ /* cmp_reg32_imm(P_REG_RAX, a); */ /* int res= jnz_imm_rel32(0); */ /* label_B; */ /* return res; */
719c3a2012-06-12Per Hedbor  case F_BRANCH_IF_NOT_LOCAL: case F_BRANCH_IF_LOCAL: ins_debug_instr_prologue(op-F_OFFSET, a, 0); amd64_load_fp_reg();
4ceb792012-06-22Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG); add_reg_imm( ARG1_REG, a*sizeof(struct svalue)); /* if( type == PIKE_T_INT )
719c3a2012-06-12Per Hedbor  u.integer -> RAX
4ceb792012-06-22Per Hedbor  else if( type == PIKE_T_OBJECT || type == PIKE_T_FUNCTION ) call svalue_is_true(&local)
1af0932012-06-22Per Hedbor  else 1 -> RAX
4ceb792012-06-22Per Hedbor  The tests are ordered assuming integers are most commonly checked. That is not nessasarily true.
719c3a2012-06-12Per Hedbor  */
2fd8052013-10-08Per Hedbor  mov_sval_type( ARG1_REG, P_REG_RCX );
67d80c2014-08-18Per Hedbor  cmp_reg32_imm( P_REG_RCX, PIKE_T_INT ); je( &label_C );
2fd8052013-10-08Per Hedbor  mov_imm_reg( 1, P_REG_RAX ); shl_reg32_reg( P_REG_RAX, P_REG_RCX ); and_reg32_imm( P_REG_RAX, BIT_FUNCTION|BIT_OBJECT );
378ad02012-06-25Per Hedbor  jnz( &label_A );
4ceb792012-06-22Per Hedbor  /* Not object, int or function. Always true. */
2fd8052013-10-08Per Hedbor  mov_imm_reg( 1, P_REG_RAX );
719c3a2012-06-12Per Hedbor  jmp( &label_B );
378ad02012-06-25Per Hedbor  LABEL_A;
4ceb792012-06-22Per Hedbor  amd64_call_c_function(svalue_is_true); jmp( &label_B ); LABEL_C;
2fd8052013-10-08Per Hedbor  mov_mem_reg( ARG1_REG, OFFSETOF(svalue, u.integer ), P_REG_RAX );
4ceb792012-06-22Per Hedbor  /* integer. */
719c3a2012-06-12Per Hedbor  LABEL_B;
2fd8052013-10-08Per Hedbor  test_reg( P_REG_RAX );
719c3a2012-06-12Per Hedbor  if( op == F_BRANCH_IF_LOCAL ) return jnz_imm_rel32(0); return jz_imm_rel32(0); }
54a26b2011-05-11Henrik Grubbström (Grubba)  maybe_update_pc(); update_arg1(a); return amd64_ins_f_jump(op, backward_jump); }
d1fa802011-05-09Henrik Grubbström (Grubba) void ins_f_byte_with_2_args(unsigned int a, INT32 b, INT32 c) { maybe_update_pc();
eb42a12011-05-11Henrik Grubbström (Grubba)  switch(a) { case F_NUMBER64:
ab7cf52011-05-24Henrik Grubbström (Grubba)  ins_debug_instr_prologue(a-F_OFFSET, b, c);
eb42a12011-05-11Henrik Grubbström (Grubba)  amd64_push_int((((unsigned INT64)b)<<32)|(unsigned INT32)c, 0); return;
ab7cf52011-05-24Henrik Grubbström (Grubba)  case F_MARK_AND_EXTERNAL: ins_f_byte(F_MARK); ins_f_byte_with_2_args(F_EXTERNAL, b, c); return;
6b26382014-08-15Per Hedbor  case F_CLEAR_N_LOCAL: { LABELS();
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
6b26382014-08-15Per Hedbor  amd64_load_fp_reg(); mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), P_REG_RBX); add_reg_imm(P_REG_RBX, b*sizeof(struct svalue)); if( c > 1 ) add_reg_imm_reg(P_REG_RBX, c*sizeof(struct svalue), P_REG_RBP ); LABEL_A; amd64_free_svalue(P_REG_RBX, 0); mov_imm_mem(PIKE_T_INT, P_REG_RBX, OFFSETOF(svalue, tu.t.type)); mov_imm_mem(0, P_REG_RBX, OFFSETOF(svalue, u.integer)); if( c > 1 ) { add_reg_imm(P_REG_RBX, sizeof(struct svalue ) ); cmp_reg_reg( P_REG_RBX, P_REG_RBP ); jne(&label_A); } } return;
94040e2014-08-14Per Hedbor  case F_LOCAL_LOCAL_INDEX: { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, c); amd64_load_fp_reg(); mov_mem_reg( fp_reg, OFFSETOF(pike_frame,locals), P_REG_RDX); add_reg_imm_reg( P_REG_RDX, b*sizeof(struct svalue), P_REG_RBX); add_reg_imm_reg( P_REG_RDX, c*sizeof(struct svalue), P_REG_RCX); /* bx = svalue for value */ /* cx = svalue for index */ mov_sval_type( P_REG_RBX, P_REG_RAX ); mov_sval_type( P_REG_RCX, P_REG_RDX ); shl_reg_imm( P_REG_RAX, 8 ); /* fixme: Add add_mem8_reg */ add_reg_reg( P_REG_RAX, P_REG_RDX ); cmp_reg32_imm( P_REG_RAX, (PIKE_T_ARRAY<<8)|PIKE_T_INT ); jne( &label_B ); /* ax = value->tyupe<<8|index->type */ mov_mem_reg( P_REG_RBX, 8, P_REG_RBX ); /* value */ mov_mem_reg( P_REG_RCX, 8, P_REG_R9 ); /* int */ /* dx = value->u.array, bx = index */ /* Array and int index. */ mov_mem32_reg( P_REG_RDX, OFFSETOF(array,size), P_REG_R8 ); cmp_reg32_imm( P_REG_RBX, 0 ); jge( &label_D ); /* index less than 0, add array size */ add_reg_reg( P_REG_RBX, P_REG_R8 );
da045c2014-08-14Per Hedbor 
94040e2014-08-14Per Hedbor  LABEL_D; cmp_reg32_imm( P_REG_RBX, 0 ); jl( &label_B ); // index (still) <0 cmp_reg_reg( P_REG_RBX, P_REG_R8); jge( &label_B ); //index >=size /* array->item + x to stack. done */ mov_mem_reg( P_REG_RDX, OFFSETOF(array,item), P_REG_RCX ); /* cx = arr->item */ shl_reg_imm( P_REG_RBX, 4 ); add_reg_reg( P_REG_RBX, P_REG_RCX ); /* bx = bx * sizeof(svalue) + cx */ amd64_push_svaluep( P_REG_RBX ); jmp( &label_C ); LABEL_B; /* something else. */ update_arg1(b); update_arg2(c); amd64_call_c_opcode(instrs[a-F_OFFSET].address, instrs[a-F_OFFSET].flags); LABEL_C; /* done */ } return;
b40c772014-08-18Per Hedbor 
83b92e2014-08-16Per Hedbor  case F_PRIVATE_IF_DIRECT_GLOBAL: { LABELS(); amd64_load_sp_reg(); ins_debug_instr_prologue(a-F_OFFSET, b, c); /* do not assign if this object is destructed. */ mov_mem_reg( fp_reg, OFFSETOF(pike_frame,current_object), ARG2_REG ); mov_mem_reg( fp_reg, OFFSETOF(pike_frame,context), ARG3_REG ); /* if ctx->prog != arg1->prog */ mov_mem_reg( ARG3_REG, OFFSETOF(inherit,prog), P_REG_R8 ); mov_mem_reg( ARG2_REG, OFFSETOF(object,prog), P_REG_RAX ); cmp_reg_reg(P_REG_R8,P_REG_RAX); je(&label_A); mov_reg_reg(sp_reg, ARG1_REG); mov_mem16_reg(ARG3_REG, OFFSETOF(inherit, identifier_level), ARG3_REG); add_reg_imm(ARG3_REG, c); amd64_call_c_function(low_object_index_no_free); amd64_add_sp(1); jmp(&label_B); LABEL_A; mov_mem_reg(ARG2_REG, OFFSETOF(object,storage), P_REG_RBX ); add_reg_mem(P_REG_RBX, ARG3_REG, OFFSETOF(inherit,storage_offset)); add_reg_imm(P_REG_RBX, b ); amd64_push_svaluep( P_REG_RBX ); LABEL_B; return; } case F_ASSIGN_PRIVATE_IF_DIRECT_GLOBAL: { LABELS(); amd64_load_sp_reg(); ins_debug_instr_prologue(a-F_OFFSET, b, c); /* do not assign if this object is destructed. */ mov_mem_reg( fp_reg, OFFSETOF(pike_frame,current_object), ARG1_REG ); mov_mem_reg( fp_reg, OFFSETOF(pike_frame,context), ARG2_REG ); /* if ctx->prog != arg1->prog */ mov_mem_reg( ARG1_REG, OFFSETOF(object,prog), P_REG_RAX ); mov_mem_reg( ARG2_REG, OFFSETOF(inherit,prog), P_REG_R8 ); cmp_reg_reg(P_REG_R8,P_REG_RAX); je(&label_A); /* arg1 = object */ /* arg2 = c + identifier_level */ /* arg3 = Pike_sp-1 */ mov_mem16_reg( ARG2_REG, OFFSETOF(inherit,identifier_level), ARG2_REG); add_reg_imm( ARG2_REG, c ); add_reg_imm_reg( sp_reg, -1*sizeof(struct svalue), ARG3_REG ); amd64_call_c_function(object_low_set_index); jmp(&label_B); LABEL_A; mov_mem_reg(ARG1_REG, OFFSETOF(object,storage), P_REG_RBX ); add_reg_mem(P_REG_RBX, ARG2_REG, OFFSETOF(inherit,storage_offset)); add_reg_imm(P_REG_RBX, b ); amd64_free_svalue( P_REG_RBX, 0 ); amd64_assign_svalue_no_free( P_REG_RBX, sp_reg, -sizeof(struct svalue)); amd64_ref_svalue(P_REG_RBX,0); LABEL_B; } return;
a92d452014-08-18Per Hedbor 
da045c2014-08-14Per Hedbor  case F_ASSIGN_PRIVATE_TYPED_GLOBAL_AND_POP: case F_ASSIGN_PRIVATE_TYPED_GLOBAL: { LABELS(); amd64_load_sp_reg();
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
da045c2014-08-14Per Hedbor  amd64_get_storage( P_REG_RBX, b ); /* do not assign anything if this object is destructed. */ /* we really only need to redo this check after we have called some other function. It is sort of hard to know when that happens while generating the code, however. Simply assigning the global could in theory actually destruct this object (old value includes something with a destroy that calls destruct on this object...) Note that nothing crash if we do the assign. We "just" leak the variable when the storage is eventually free:d. The generated code does look sort of silly, however. */ mov_mem_reg( fp_reg, OFFSETOF(pike_frame,current_object), ARG1_REG ); mov_mem_reg( ARG1_REG, OFFSETOF(object,prog), P_REG_RAX ); test_reg(P_REG_RAX); jnz(&label_A); /* will not return. */ amd64_call_c_function(object_low_set_index); LABEL_A; /* 1: check type of argument */ mov_mem8_reg( sp_reg, -sizeof(struct svalue )+OFFSETOF(svalue,tu.t.type), P_REG_RAX); cmp_reg32_imm( P_REG_RAX, c ); je( &label_B ); /* not the same type. Fall back to use C-function (for code size mainly).. */ mov_reg_reg( P_REG_RBX, ARG1_REG ); mov_imm_reg( c, ARG2_REG ); add_reg_imm_reg( sp_reg, -sizeof(struct svalue), ARG3_REG ); amd64_call_c_function(assign_to_short_svalue); /* pop stack if needed. */
7af2af2014-08-16Henrik Grubbström (Grubba)  if( a == F_ASSIGN_PRIVATE_TYPED_GLOBAL_AND_POP )
da045c2014-08-14Per Hedbor  { amd64_add_sp(-1); /* this will either have assigned 0 or thrown an error. * if we assigned 0, it was because the argument evaluated as false. * However, that means that we might possibly have to free it here.. * * We do not even know that is has at least two references. */ amd64_free_svalue( sp_reg, 0 ); } jmp( &label_E ); /* pop or not. */ LABEL_B; /* at this point: o->storage + off in RBX. */ if( c >= MIN_REF_TYPE ) { mov_mem_reg( P_REG_RBX, 0, P_REG_RAX ); test_reg(P_REG_RAX); /* old value == NULL? */ jz(&label_C); /* deref. */ add_mem32_imm( P_REG_RAX, OFFSETOF(pike_string,refs), -1); jnz(&label_C); mov_reg_reg( P_REG_RBX, ARG1_REG ); mov_imm_reg( c, ARG2_REG ); amd64_call_c_function(really_free_short_svalue_ptr); } LABEL_C; /* old value is gone. Assign new value */ mov_mem_reg( sp_reg, -8, P_REG_RAX ); mov_reg_mem( P_REG_RAX, P_REG_RBX, 0 );
7af2af2014-08-16Henrik Grubbström (Grubba)  if( a == F_ASSIGN_PRIVATE_TYPED_GLOBAL_AND_POP )
da045c2014-08-14Per Hedbor  amd64_add_sp( -1 ); else if( c >= MIN_REF_TYPE ) add_mem_imm( P_REG_RAX, OFFSETOF(pike_string,refs), 1 ); LABEL_E; } return;
dc10542014-08-14Per Hedbor  case F_PRIVATE_TYPED_GLOBAL: /* b -> off, c -> type */ ins_debug_instr_prologue(a-F_OFFSET, b, c); amd64_load_sp_reg(); if( c < MIN_REF_TYPE ) { /* really: int or float. And right now, only float */ amd64_get_storage( P_REG_RBX, b ); mov_mem_reg( P_REG_RBX, 0, P_REG_RBX ); mov_imm_mem( c, sp_reg, 0 ); mov_reg_mem( P_REG_RBX, sp_reg, 8 ); amd64_add_sp(1); } else { LABELS(); /* one of the refcounted types. */ amd64_get_storage( P_REG_RBX, b ); mov_mem_reg( P_REG_RBX, 0, P_REG_RBX ); test_reg( P_REG_RBX ); jnz(&label_A); amd64_push_int(0,1); jmp(&label_B); LABEL_A; add_mem_imm( P_REG_RBX, 0, 1 ); mov_imm_mem( c, sp_reg, 0 ); mov_reg_mem( P_REG_RBX, sp_reg, 8 ); amd64_add_sp(1); LABEL_B; } return;
1af0932012-06-22Per Hedbor  case F_ADD_LOCAL_INT:
f4107d2012-06-18Per Hedbor  case F_ADD_LOCAL_INT_AND_POP: { LABELS();
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
fd1b5a2012-06-19Henrik Grubbström (Grubba)  amd64_load_fp_reg();
f4107d2012-06-18Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG); add_reg_imm( ARG1_REG, b*sizeof(struct svalue) ); /* arg1 = dst arg2 = int */
2fd8052013-10-08Per Hedbor  mov_sval_type( ARG1_REG, P_REG_RAX ); cmp_reg32_imm( P_REG_RAX, PIKE_T_INT );
f4107d2012-06-18Per Hedbor  jne(&label_A); /* Fallback */
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( PIKE_T_INT, ARG1_REG, OFFSETOF(svalue, tu.t.type));
f4107d2012-06-18Per Hedbor  add_imm_mem( c, ARG1_REG,OFFSETOF(svalue,u.integer)); jno( &label_B); add_imm_mem( -c, ARG1_REG,OFFSETOF(svalue,u.integer));
1af0932012-06-22Per Hedbor  /* Overflow. Use C version */
f4107d2012-06-18Per Hedbor  LABEL_A; update_arg2(c); update_arg1(b);
1af0932012-06-22Per Hedbor  ins_f_byte(a); /* Push already done by C version. */ if( a == F_ADD_LOCAL_INT ) jmp( &label_C );
f4107d2012-06-18Per Hedbor  LABEL_B;
1af0932012-06-22Per Hedbor  if( a == F_ADD_LOCAL_INT ) { /* push the local. */ /* We know it's an integer (since we did not use the fallback) */
c33ce92012-06-28Per Hedbor  amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg( ARG1_REG, OFFSETOF(svalue,u.integer), P_REG_RAX ); amd64_push_int_reg( P_REG_RAX );
1af0932012-06-22Per Hedbor  } LABEL_C;
f4107d2012-06-18Per Hedbor  return; }
1807c02014-07-15Per Hedbor  #if 0
a92d452014-08-18Per Hedbor  /* this is a: nonworking, and b: not really all that more efficient anyway.. */
1807c02014-07-15Per Hedbor  case F_APPLY_N:
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
1807c02014-07-15Per Hedbor  mov_imm_reg( APPLY_SVALUE_STRICT, ARG1_REG ); mov_imm_reg( c, ARG2_REG ); mov_ptr_reg( &((Pike_fp->context->prog->constants + b)->sval), ARG3_REG ); clear_reg( ARG4_REG ); amd64_call_c_opcode(mega_apply, I_UPDATE_SP); return; #endif case F_CALL_BUILTIN_N:
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
1807c02014-07-15Per Hedbor  mov_imm_reg( c, ARG1_REG ); amd64_call_c_opcode(Pike_compiler->new_program->constants[b].sval.u.efun->function, I_UPDATE_SP); return;
f4107d2012-06-18Per Hedbor  case F_ADD_LOCALS_AND_POP: { LABELS(); ins_debug_instr_prologue(a-F_OFFSET, b, 0);
fd1b5a2012-06-19Henrik Grubbström (Grubba)  amd64_load_fp_reg();
f4107d2012-06-18Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG); add_reg_imm( ARG1_REG, b*sizeof(struct svalue) ); add_reg_imm_reg( ARG1_REG,(c-b)*sizeof(struct svalue), ARG2_REG ); /* arg1 = dst arg2 = src */
2fd8052013-10-08Per Hedbor  mov_sval_type( ARG1_REG, P_REG_RAX ); mov_sval_type( ARG2_REG, P_REG_RBX ); shl_reg_imm( P_REG_RAX, 8 ); add_reg_reg( P_REG_RAX, P_REG_RBX ); cmp_reg32_imm( P_REG_RAX, (PIKE_T_INT<<8) | PIKE_T_INT );
f4107d2012-06-18Per Hedbor  jne(&label_A); /* Fallback */
2fd8052013-10-08Per Hedbor  mov_mem_reg( ARG2_REG, OFFSETOF(svalue,u.integer), P_REG_RAX ); add_reg_mem( P_REG_RAX, ARG1_REG, OFFSETOF(svalue,u.integer));
f4107d2012-06-18Per Hedbor  jo( &label_A); /* Clear subtype */
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem( PIKE_T_INT, ARG1_REG,OFFSETOF(svalue, tu.t.type));
2fd8052013-10-08Per Hedbor  mov_reg_mem( P_REG_RAX, ARG1_REG, OFFSETOF(svalue,u.integer));
f4107d2012-06-18Per Hedbor  jmp( &label_B ); LABEL_A; update_arg2(c); update_arg1(b); ins_f_byte(a); /* Will call C version */ LABEL_B; return; } case F_ASSIGN_LOCAL_NUMBER_AND_POP:
c33ce92012-06-28Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
fd1b5a2012-06-19Henrik Grubbström (Grubba)  amd64_load_fp_reg();
f4107d2012-06-18Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG); add_reg_imm( ARG1_REG,b*sizeof(struct svalue) );
2fd8052013-10-08Per Hedbor  mov_reg_reg( ARG1_REG, P_REG_RBX );
f4107d2012-06-18Per Hedbor  amd64_free_svalue(ARG1_REG, 0);
2fd8052013-10-08Per Hedbor  mov_imm_mem(c, P_REG_RBX, OFFSETOF(svalue, u.integer));
a5cf652014-06-24Henrik Grubbström (Grubba)  mov_imm_mem32(PIKE_T_INT, P_REG_RBX, OFFSETOF(svalue, tu.t.type));
f4107d2012-06-18Per Hedbor  return;
f7202c2013-06-12Per Hedbor  case F_LOCAL_2_GLOBAL: ins_debug_instr_prologue(a-F_OFFSET, b, 0); amd64_load_fp_reg(); amd64_load_sp_reg(); mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), ARG3_REG ); add_reg_imm( ARG3_REG, c*sizeof(struct svalue) ); mov_mem_reg(fp_reg, OFFSETOF(pike_frame, current_object), ARG1_REG); mov_mem_reg(fp_reg, OFFSETOF(pike_frame,context), ARG2_REG); mov_mem16_reg(ARG2_REG, OFFSETOF(inherit, identifier_level), ARG2_REG); add_reg_imm( ARG2_REG, b ); amd64_call_c_function( object_low_set_index ); return;
7e8ef52014-08-15Per Hedbor  case F_LEXICAL_LOCAL: if( c < 5 ) {
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
7e8ef52014-08-15Per Hedbor  amd64_load_fp_reg(); mov_reg_reg( fp_reg, P_REG_RAX ); while(c--) mov_mem_reg( P_REG_RAX, OFFSETOF(pike_frame,scope), P_REG_RAX ); mov_mem_reg( P_REG_RAX, OFFSETOF(pike_frame,locals), P_REG_RAX ); add_reg_imm_reg( P_REG_RAX, b*sizeof(struct svalue), P_REG_RBX ); amd64_push_svaluep( P_REG_RBX ); return; } /* use c version. Could have a loop version here. */ break;
dbce5f2014-08-15Per Hedbor  case F_LEXICAL_LOCAL_LVALUE: if( c < 5 ) {
a92d452014-08-18Per Hedbor  ins_debug_instr_prologue(a-F_OFFSET, b, c);
dbce5f2014-08-15Per Hedbor  amd64_load_fp_reg(); amd64_load_sp_reg(); mov_reg_reg( fp_reg, P_REG_RAX ); while(c--) mov_mem_reg( P_REG_RAX, OFFSETOF(pike_frame,scope), P_REG_RAX ); mov_mem_reg( P_REG_RAX, OFFSETOF(pike_frame,locals), P_REG_RAX ); add_reg_imm_reg( P_REG_RAX, b*sizeof(struct svalue), P_REG_RAX ); mov_imm_mem( T_SVALUE_PTR, sp_reg, OFFSETOF(svalue, tu.t.type)); mov_reg_mem( P_REG_RAX, sp_reg, OFFSETOF(svalue,u.lval) ); mov_imm_mem( T_VOID, sp_reg, OFFSETOF(svalue, tu.t.type)+sizeof(struct svalue)); amd64_add_sp( 2 ); return; } /* use c version. Could have a loop version here. */ break;
b505a72012-06-13Per Hedbor  case F_LOCAL_2_LOCAL:
5c82f42012-06-13Henrik Grubbström (Grubba)  ins_debug_instr_prologue(a-F_OFFSET, b, c);
b505a72012-06-13Per Hedbor  if( b != c ) { amd64_load_fp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg( fp_reg, OFFSETOF(pike_frame, locals), P_REG_RBX ); add_reg_imm( P_REG_RBX, b*sizeof(struct svalue) );
b505a72012-06-13Per Hedbor  /* RBX points to dst. */
2fd8052013-10-08Per Hedbor  amd64_free_svalue( P_REG_RBX, 0 );
373ff52012-06-13Henrik Grubbström (Grubba)  /* assign rbx[0] = rbx[c-b] */
2fd8052013-10-08Per Hedbor  mov_mem_reg( P_REG_RBX, (c-b)*sizeof(struct svalue), P_REG_RAX ); mov_mem_reg( P_REG_RBX, (c-b)*sizeof(struct svalue)+8, P_REG_RCX ); mov_reg_mem( P_REG_RAX, P_REG_RBX, 0 ); mov_reg_mem( P_REG_RCX, P_REG_RBX, 8 ); amd64_ref_svalue( P_REG_RBX, 1 );
b505a72012-06-13Per Hedbor  } return;
9030f62011-05-26Henrik Grubbström (Grubba)  case F_2_LOCALS:
719c3a2012-06-12Per Hedbor #if 1
5c82f42012-06-13Henrik Grubbström (Grubba)  ins_debug_instr_prologue(a-F_OFFSET, b, c);
719c3a2012-06-12Per Hedbor  amd64_load_fp_reg(); amd64_load_sp_reg();
2fd8052013-10-08Per Hedbor  mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), P_REG_R8); add_reg_imm( P_REG_R8, b*sizeof(struct svalue) ); amd64_push_svaluep(P_REG_R8); add_reg_imm( P_REG_R8, (c-b)*sizeof(struct svalue) ); amd64_push_svaluep(P_REG_R8);
719c3a2012-06-12Per Hedbor #else ins_f_byte_with_arg( F_LOCAL, b ); ins_f_byte_with_arg( F_LOCAL, c ); #endif
9030f62011-05-26Henrik Grubbström (Grubba)  return;
719c3a2012-06-12Per Hedbor 
9853bd2012-06-13Henrik Grubbström (Grubba)  case F_FILL_STACK: { LABELS(); if (!b) return; ins_debug_instr_prologue(a-F_OFFSET, b, c); amd64_load_fp_reg(); amd64_load_sp_reg(); mov_mem_reg(fp_reg, OFFSETOF(pike_frame, locals), ARG1_REG); add_reg_imm(ARG1_REG, b*sizeof(struct svalue)); jmp(&label_A); LABEL_B; amd64_push_int(0, c); LABEL_A; cmp_reg_reg(sp_reg, ARG1_REG);
996c482013-06-19Henrik Grubbström (Grubba)  jl(&label_B);
9853bd2012-06-13Henrik Grubbström (Grubba)  } return;
5151e52012-06-10Henrik Grubbström (Grubba)  case F_INIT_FRAME: ins_debug_instr_prologue(a-F_OFFSET, b, c); amd64_load_fp_reg();
719c3a2012-06-12Per Hedbor  if(OFFSETOF(pike_frame, num_locals) != OFFSETOF(pike_frame, num_args)-2 ) Pike_fatal("This code does not with unless num_args\n" "directly follows num_locals in struct pike_frame\n"); mov_imm_mem32( (b<<16)|c, fp_reg, OFFSETOF(pike_frame, num_locals));
5151e52012-06-10Henrik Grubbström (Grubba)  return;
eb42a12011-05-11Henrik Grubbström (Grubba)  }
d1fa802011-05-09Henrik Grubbström (Grubba)  update_arg2(c); update_arg1(b); ins_f_byte(a);
7990392006-04-27Tor Edvardsson }
54a26b2011-05-11Henrik Grubbström (Grubba) int amd64_ins_f_jump_with_2_args(unsigned int op, INT32 a, INT32 b, int backward_jump) { if (!(instrs[op - F_OFFSET].flags & I_BRANCH)) return -1; maybe_update_pc(); update_arg2(b); update_arg1(a); return amd64_ins_f_jump(op, backward_jump); } void amd64_update_f_jump(INT32 offset, INT32 to_offset) { upd_pointer(offset, to_offset - offset - 4); } INT32 amd64_read_f_jump(INT32 offset) { return read_pointer(offset) + offset + 4; }