memory. All functions writing values into the data area return the offset
relative the begin of the code area (start of procedure).
- $Id: codegen-common.c 7576 2007-03-25 20:39:17Z ajordan $
-
*/
# include "codegen.h"
#endif
-#if defined(__ARM__)
-/* this is required for REG_SPLIT */
-# include "md-abi.h"
-#endif
-
#include "mm/memory.h"
#include "toolbox/avl.h"
#include "toolbox/logging.h"
#include "native/jni.h"
+#include "native/localref.h"
#include "native/native.h"
-#if defined(ENABLE_THREADS)
-# include "threads/native/threads.h"
-#endif
+#include "threads/threads-common.h"
#include "vm/exceptions.h"
#include "vm/stringlocal.h"
#include "vm/jit/emit-common.h"
#include "vm/jit/jit.h"
#include "vm/jit/md.h"
+#include "vm/jit/patcher-common.h"
#include "vm/jit/replace.h"
+#if defined(ENABLE_SSA)
+# include "vm/jit/optimizing/lsra.h"
+# include "vm/jit/optimizing/ssa.h"
+#endif
#include "vm/jit/stacktrace.h"
#if defined(ENABLE_INTRP)
# include "vmcore/statistics.h"
+#if defined(ENABLE_VMLOG)
+#include <vmlog_cacao.h>
+#endif
+
+#include "show.h"
/* in this tree we store all method addresses *********************************/
-static avl_tree *methodtree = NULL;
-static s4 methodtree_comparator(const void *pc, const void *element);
+static avl_tree_t *methodtree = NULL;
+static s4 methodtree_comparator(const void *treenode, const void *node);
/* codegen_init ****************************************************************
avl_insert(methodtree, mte);
#endif /* defined(ENABLE_JIT) */
+
}
+
}
/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
cd->patchrefs = NULL;
cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
+ cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
cd->linenumberreferences = NULL;
cd->linenumbertablesizepos = 0;
cd->linenumbertablestartpos = 0;
cd->linenumbertab = 0;
-
- cd->maxstack = m->maxstack;
-
-#if defined(ENABLE_THREADS)
- cd->threadcritcurrent.next = NULL;
- cd->threadcritcount = 0;
-#endif
}
/* cd->patchrefs = list_create_dump(OFFSET(patchref, linkage)); */
cd->patchrefs = NULL;
cd->brancheslabel = list_create_dump(OFFSET(branch_label_ref_t, linkage));
+ cd->listcritical = list_create_dump(OFFSET(critical_section_ref_t, linkage));
cd->linenumberreferences = NULL;
cd->linenumbertablesizepos = 0;
cd->linenumbertablestartpos = 0;
cd->linenumbertab = 0;
-#if defined(ENABLE_THREADS)
- cd->threadcritcurrent.next = NULL;
- cd->threadcritcount = 0;
-#endif
-
/* We need to clear the mpc and the branch references from all
basic blocks as they will definitely change. */
bptr->branchrefs = NULL;
}
+ /* We need to clear all the patcher references from the codeinfo
+ since they all will be regenerated */
+
+ patcher_list_reset(code);
+
#if defined(ENABLE_REPLACEMENT)
code->rplpoints = NULL;
code->rplpointcount = 0;
if (CODEGENDATA_HAS_FLAG_ERROR(cd)) {
/* check for long-branches flag, if it is set we recompile the
method */
+
+#if !defined(NDEBUG)
+ if (compileverbose)
+ log_message_method("Re-generating code: ", jd->m);
+#endif
+
/* XXX maybe we should tag long-branches-methods for recompilation */
if (CODEGENDATA_HAS_FLAG_LONGBRANCHES(cd)) {
else {
vm_abort("codegen_generate: unknown error occurred during codegen_emit: flags=%x\n", cd->flags);
}
+
+#if !defined(NDEBUG)
+ if (compileverbose)
+ log_message_method("Re-generating code done: ", jd->m);
+#endif
}
/* reallocate the memory and finish the code generation */
cd->mcodeptr = cd->mcodebase + (cd->mcodeptr - oldmcodebase);
-#if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(ENABLE_INTRP)
+#if defined(__I386__) || defined(__MIPS__) || defined(__X86_64__) || defined(__M68K__) || defined(ENABLE_INTRP) \
+ || defined(__SPARC_64__)
/* adjust the pointer to the last patcher position */
if (cd->lastmcodeptr != NULL)
void codegen_branch_label_add(codegendata *cd, s4 label, s4 condition, s4 reg, u4 options)
{
- list *list;
+ list_t *list;
branch_label_ref_t *br;
s4 mpc;
if (opt_shownops)
PATCHER_NOPS;
-#if defined(ENABLE_JIT) && (defined(__I386__) || defined(__MIPS__) || defined(__X86_64__))
+#if defined(ENABLE_JIT) && (defined(__I386__) || defined(__M68K__) || defined(__MIPS__) \
+ || defined(__SPARC_64__) || defined(__X86_64__))
+
/* On some architectures the patcher stub call instruction might
be longer than the actual instruction generated. On this
architectures we store the last patcher call position and after
the basic block code generation is completed, we check the
range and maybe generate some nop's. */
+ /* The nops are generated in codegen_emit in each codegen */
cd->lastmcodeptr = cd->mcodeptr + PATCHER_CALL_SIZE;
#endif
}
+/* codegen_critical_section_new ************************************************
+
+ Allocates a new critical-section reference and adds it to the
+ critical-section list.
+
+*******************************************************************************/
+
+#if defined(ENABLE_THREADS)
+void codegen_critical_section_new(codegendata *cd)
+{
+ list_t *list;
+ critical_section_ref_t *csr;
+ s4 mpc;
+
+ /* get the critical section list */
+
+ list = cd->listcritical;
+
+ /* calculate the current mpc */
+
+ mpc = cd->mcodeptr - cd->mcodebase;
+
+ csr = DNEW(critical_section_ref_t);
+
+ /* We only can set restart right now, as start and end are set by
+ the following, corresponding functions. */
+
+ csr->start = -1;
+ csr->end = -1;
+ csr->restart = mpc;
+
+ /* add the branch to the list */
+
+ list_add_last_unsynced(list, csr);
+}
+#endif
+
+
+/* codegen_critical_section_start **********************************************
+
+ Set the start-point of the current critical section (which is the
+ last element of the list).
+
+*******************************************************************************/
+
+#if defined(ENABLE_THREADS)
+void codegen_critical_section_start(codegendata *cd)
+{
+ list_t *list;
+ critical_section_ref_t *csr;
+ s4 mpc;
+
+ /* get the critical section list */
+
+ list = cd->listcritical;
+
+ /* calculate the current mpc */
+
+ mpc = cd->mcodeptr - cd->mcodebase;
+
+ /* get the current critical section */
+
+ csr = list_last_unsynced(list);
+
+ /* set the start point */
+
+ assert(csr->start == -1);
+
+ csr->start = mpc;
+}
+#endif
+
+
+/* codegen_critical_section_end ************************************************
+
+ Set the end-point of the current critical section (which is the
+ last element of the list).
+
+*******************************************************************************/
+
+#if defined(ENABLE_THREADS)
+void codegen_critical_section_end(codegendata *cd)
+{
+ list_t *list;
+ critical_section_ref_t *csr;
+ s4 mpc;
+
+ /* get the critical section list */
+
+ list = cd->listcritical;
+
+ /* calculate the current mpc */
+
+ mpc = cd->mcodeptr - cd->mcodebase;
+
+ /* get the current critical section */
+
+ csr = list_last_unsynced(list);
+
+ /* set the end point */
+
+ assert(csr->end == -1);
+
+ csr->end = mpc;
+}
+#endif
+
+
+/* codegen_critical_section_finish *********************************************
+
+ Finish the critical sections, create the critical section nodes for
+ the AVL tree and insert them into the tree.
+
+*******************************************************************************/
+
+#if defined(ENABLE_THREADS)
+static void codegen_critical_section_finish(jitdata *jd)
+{
+ codeinfo *code;
+ codegendata *cd;
+ list_t *list;
+ critical_section_ref_t *csr;
+ critical_section_node_t *csn;
+
+ /* get required compiler data */
+
+ code = jd->code;
+ cd = jd->cd;
+
+ /* get the critical section list */
+
+ list = cd->listcritical;
+
+ /* iterate over all critical sections */
+
+ for (csr = list_first_unsynced(list); csr != NULL;
+ csr = list_next_unsynced(list, csr)) {
+ /* check if all points are set */
+
+ assert(csr->start != -1);
+ assert(csr->end != -1);
+ assert(csr->restart != -1);
+
+ /* allocate tree node */
+
+ csn = NEW(critical_section_node_t);
+
+ csn->start = code->entrypoint + csr->start;
+ csn->end = code->entrypoint + csr->end;
+ csn->restart = code->entrypoint + csr->restart;
+
+ /* insert into the tree */
+
+ critical_section_register(csn);
+ }
+}
+#endif
+
+
/* methodtree_comparator *******************************************************
Comparator function used for the AVL tree of methods.
+ ARGUMENTS:
+ treenode....the node from the tree
+ node........the node to compare to the tree-node
+
*******************************************************************************/
-static s4 methodtree_comparator(const void *pc, const void *element)
+static s4 methodtree_comparator(const void *treenode, const void *node)
{
methodtree_element *mte;
methodtree_element *mtepc;
- mte = (methodtree_element *) element;
- mtepc = (methodtree_element *) pc;
+ mte = (methodtree_element *) treenode;
+ mtepc = (methodtree_element *) node;
/* compare both startpc and endpc of pc, even if they have the same value,
otherwise the avl_probe sometimes thinks the element is already in the
if (mte == NULL) {
/* No method was found. Let's dump a stacktrace. */
+#if defined(ENABLE_VMLOG)
+ vmlog_cacao_signl("SIGSEGV");
+#endif
+
log_println("We received a SIGSEGV and tried to handle it, but we were");
log_println("unable to find a Java method at:");
log_println("");
log_println("PC=0x%08x", pc);
#endif
log_println("");
+ assert(0);
log_println("Dumping the current stacktrace:");
- stacktrace_dump_trace(THREADOBJECT);
+#if defined(ENABLE_THREADS)
+ /* XXX michi: This should be available even without threads! */
+ threads_print_stacktrace();
+#endif
vm_abort("Exiting...");
}
#endif
s4 alignedmcodelen;
jumpref *jr;
+ patchref_t *pr;
u1 *epoint;
- s4 extralen;
s4 alignedlen;
/* get required compiler data */
mcodelen = (s4) (cd->mcodeptr - cd->mcodebase);
-#if defined(ENABLE_THREADS)
- extralen = sizeof(critical_section_node_t) * cd->threadcritcount;
-#else
- extralen = 0;
-#endif
-
#if defined(ENABLE_STATISTICS)
if (opt_stat) {
count_code_len += mcodelen;
/* allocate new memory */
code->mcodelength = mcodelen + cd->dseglen;
- code->mcode = CNEW(u1, alignedlen + extralen);
+ code->mcode = CNEW(u1, alignedlen);
/* set the entrypoint of the method */
*((ptrint *) ((ptrint) epoint + cd->linenumbertablesizepos)) = lrtlen;
}
+ /* patcher resolving */
+
+ pr = list_first_unsynced(code->patchers);
+ while (pr) {
+ pr->mpc += (ptrint) epoint;
+ pr->datap = (ptrint) (pr->disp + epoint);
+ pr = list_next_unsynced(code->patchers, pr);
+ }
+
#if defined(ENABLE_REPLACEMENT)
/* replacement point resolving */
{
int i;
rplpoint *rp;
- code->replacementstubs += (ptrint) epoint;
-
rp = code->rplpoints;
for (i=0; i<code->rplpointcount; ++i, ++rp) {
rp->pc = (u1*) ((ptrint) epoint + (ptrint) rp->pc);
#endif
#if defined(ENABLE_THREADS)
- {
- critical_section_node_t *n = (critical_section_node_t *) ((ptrint) code->mcode + alignedlen);
- s4 i;
- codegen_critical_section_t *nt = cd->threadcrit;
-
- for (i = 0; i < cd->threadcritcount; i++) {
- n->mcodebegin = (u1 *) (ptrint) code->mcode + nt->mcodebegin;
- n->mcodeend = (u1 *) (ptrint) code->mcode + nt->mcodeend;
- n->mcoderestart = (u1 *) (ptrint) code->mcode + nt->mcoderestart;
- critical_register_critical_section(n);
- n++;
- nt = nt->next;
- }
- }
+ /* create cirtical sections */
+
+ codegen_critical_section_finish(jd);
#endif
/* flush the instruction and data caches */
}
-/* codegen_createnativestub ****************************************************
+/* codegen_generate_stub_compiler **********************************************
- Wrapper for createnativestub.
+ Wrapper for codegen_emit_stub_compiler.
+
+ Returns:
+ pointer to the compiler stub code.
+
+*******************************************************************************/
+
+u1 *codegen_generate_stub_compiler(methodinfo *m)
+{
+ jitdata *jd;
+ codegendata *cd;
+ ptrint *d; /* pointer to data memory */
+ u1 *c; /* pointer to code memory */
+ s4 dumpsize;
+
+ /* mark dump memory */
+
+ dumpsize = dump_size();
+
+ /* allocate required data structures */
+
+ jd = DNEW(jitdata);
+
+ jd->m = m;
+ jd->cd = DNEW(codegendata);
+ jd->flags = 0;
+
+ /* get required compiler data */
+
+ cd = jd->cd;
+
+ /* allocate code memory */
+
+ c = CNEW(u1, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+
+ /* set pointers correctly */
+
+ d = (ptrint *) c;
+
+ cd->mcodebase = c;
+
+ c = c + 3 * SIZEOF_VOID_P;
+ cd->mcodeptr = c;
+
+ /* NOTE: The codeinfo pointer is actually a pointer to the
+ methodinfo (this fakes a codeinfo structure). */
+
+ d[0] = (ptrint) asm_call_jit_compiler;
+ d[1] = (ptrint) m;
+ d[2] = (ptrint) &d[1]; /* fake code->m */
+
+ /* call the emit function */
+
+ codegen_emit_stub_compiler(jd);
+
+#if defined(ENABLE_STATISTICS)
+ if (opt_stat)
+ count_cstub_len += 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE;
+#endif
+
+ /* flush caches */
+
+ md_cacheflush(cd->mcodebase, 3 * SIZEOF_VOID_P + COMPILERSTUB_CODESIZE);
+
+ /* release dump memory */
+
+ dump_release(dumpsize);
+
+ /* return native stub code */
+
+ return c;
+}
+
+
+/* codegen_generate_stub_native ************************************************
+
+ Wrapper for codegen_emit_stub_native.
Returns:
the codeinfo representing the stub code.
*******************************************************************************/
-codeinfo *codegen_createnativestub(functionptr f, methodinfo *m)
+codeinfo *codegen_generate_stub_native(methodinfo *m, functionptr f)
{
jitdata *jd;
codeinfo *code;
#if defined(ENABLE_JIT)
# if defined(ENABLE_INTRP)
if (opt_intrp)
- code->entrypoint = intrp_createnativestub(f, jd, nmd);
+ intrp_createnativestub(f, jd, nmd);
else
# endif
- code->entrypoint = createnativestub(f, jd, nmd);
+ codegen_emit_stub_native(jd, nmd, f);
#else
- code->entrypoint = intrp_createnativestub(f, jd, nmd);
+ intrp_createnativestub(f, jd, nmd);
#endif
+ /* reallocate the memory and finish the code generation */
+
+ codegen_finish(jd);
+
#if defined(ENABLE_STATISTICS)
+ /* must be done after codegen_finish() */
+
if (opt_stat)
- count_nstub_len += code->mcodelength;
+ size_stub_native += code->mcodelength;
#endif
#if !defined(NDEBUG)
/* disassemble native stub */
if (opt_shownativestub) {
+#if defined(ENABLE_DEBUG_FILTER)
+ if (m->filtermatches & SHOW_FILTER_FLAG_SHOW_METHOD)
+#endif
+ {
#if defined(ENABLE_DISASSEMBLER)
- codegen_disassemble_nativestub(m,
- (u1 *) (ptrint) code->entrypoint,
- (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
+ codegen_disassemble_nativestub(m,
+ (u1 *) (ptrint) code->entrypoint,
+ (u1 *) (ptrint) code->entrypoint + (code->mcodelength - jd->cd->dseglen));
#endif
- /* show data segment */
+ /* show data segment */
- if (opt_showddatasegment)
- dseg_display(jd);
+ if (opt_showddatasegment)
+ dseg_display(jd);
+ }
}
#endif /* !defined(NDEBUG) */
stackframeinfo *sfi;
localref_table *lrt;
+ STATISTICS(count_calls_java_to_native++);
+
/* get data structures from stack */
sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
stacktrace_create_native_stackframeinfo(sfi, pv, sp, ra);
-#if defined(ENABLE_JAVASE)
+#if defined(ENABLE_JNI)
/* add current JNI local references table to this thread */
- lrt->capacity = LOCALREFTABLE_CAPACITY;
- lrt->used = 0;
- lrt->localframes = 1;
- lrt->prev = LOCALREFTABLE;
-
- /* clear the references array (memset is faster the a for-loop) */
-
- MSET(lrt->refs, 0, java_objectheader*, LOCALREFTABLE_CAPACITY);
-
- LOCALREFTABLE = lrt;
+ localref_table_add(lrt);
#endif
}
*******************************************************************************/
-java_objectheader *codegen_finish_native_call(u1 *datasp)
+java_object_t *codegen_finish_native_call(u1 *datasp)
{
- stackframeinfo *sfi;
- stackframeinfo **psfi;
- localref_table *lrt;
- localref_table *plrt;
- s4 localframes;
- java_objectheader *e;
+ stackframeinfo *sfi;
+ stackframeinfo **psfi;
+ java_handle_t *e;
/* get data structures from stack */
sfi = (stackframeinfo *) (datasp - sizeof(stackframeinfo));
- lrt = (localref_table *) (datasp - sizeof(stackframeinfo) -
- sizeof(localref_table));
/* remove current stackframeinfo from chain */
- psfi = STACKFRAMEINFO;
+ psfi = &STACKFRAMEINFO;
*psfi = sfi->prev;
-#if defined(ENABLE_JAVASE)
- /* release JNI local references tables for this thread */
-
- lrt = LOCALREFTABLE;
-
- /* release all current local frames */
-
- for (localframes = lrt->localframes; localframes >= 1; localframes--) {
- /* get previous frame */
-
- plrt = lrt->prev;
-
- /* Clear all reference entries (only for tables allocated on
- the Java heap). */
-
- if (localframes > 1)
- MSET(&lrt->refs[0], 0, java_objectheader*, lrt->capacity);
+#if defined(ENABLE_JNI)
+ /* release JNI local references table for this thread */
- lrt->prev = NULL;
-
- /* set new local references table */
-
- lrt = plrt;
- }
-
- /* now store the previous local frames in the thread structure */
-
- LOCALREFTABLE = lrt;
+ localref_frame_pop_all();
+ localref_table_remove();
#endif
/* get the exception and return it */
spilled) this function returns tempregnum. If not already done,
regoff and flags are set in the stack location.
- On ARM we have to check if a long/double variable is splitted
- across reg/stack (HIGH_REG == REG_SPLIT). We return the actual
- register of v for LOW_REG and the tempregnum for HIGH_REG in such
- cases. (michi 2005/07/24)
-
*******************************************************************************/
s4 codegen_reg_of_var(u2 opcode, varinfo *v, s4 tempregnum)
return tempregnum;
#endif
- if (!(v->flags & INMEMORY)) {
-#if defined(__ARM__) && defined(__ARMEL__)
- if (IS_2_WORD_TYPE(v->type) && (GET_HIGH_REG(v->vv.regoff) == REG_SPLIT))
- return PACK_REGS(GET_LOW_REG(v->vv.regoff),
- GET_HIGH_REG(tempregnum));
-#endif
-#if defined(__ARM__) && defined(__ARMEB__)
- if (IS_2_WORD_TYPE(v->type) && (GET_LOW_REG(v->vv.regoff) == REG_SPLIT))
- return PACK_REGS(GET_LOW_REG(tempregnum),
- GET_HIGH_REG(v->vv.regoff));
-#endif
+ if (!(v->flags & INMEMORY))
return v->vv.regoff;
- }
-
-#if defined(ENABLE_STATISTICS)
- if (opt_stat)
- count_spills_read++;
-#endif
return tempregnum;
}
+
/* codegen_reg_of_dst **********************************************************
This function determines a register, to which the result of an
spilled) this function returns tempregnum. If not already done,
regoff and flags are set in the stack location.
- On ARM we have to check if a long/double variable is splitted
- across reg/stack (HIGH_REG == REG_SPLIT). We return the actual
- register of dst.var for LOW_REG and the tempregnum for HIGH_REG in such
- cases. (michi 2005/07/24)
-
*******************************************************************************/
s4 codegen_reg_of_dst(jitdata *jd, instruction *iptr, s4 tempregnum)
}
-#if defined(ENABLE_THREADS)
-void codegen_threadcritrestart(codegendata *cd, int offset)
-{
- cd->threadcritcurrent.mcoderestart = offset;
-}
+/* codegen_emit_phi_moves ****************************************************
+
+ Emits phi moves at the end of the basicblock.
+*******************************************************************************/
-void codegen_threadcritstart(codegendata *cd, int offset)
+#if defined(ENABLE_SSA)
+void codegen_emit_phi_moves(jitdata *jd, basicblock *bptr)
{
- cd->threadcritcurrent.mcodebegin = offset;
-}
+ int lt_d,lt_s,i;
+ lsradata *ls;
+ codegendata *cd;
+ varinfo *s, *d;
+ instruction tmp_i;
+
+ cd = jd->cd;
+ ls = jd->ls;
+ MCODECHECK(512);
-void codegen_threadcritstop(codegendata *cd, int offset)
-{
- cd->threadcritcurrent.next = cd->threadcrit;
- cd->threadcritcurrent.mcodeend = offset;
- cd->threadcrit = DNEW(codegen_critical_section_t);
- *(cd->threadcrit) = cd->threadcritcurrent;
- cd->threadcritcount++;
-}
+ /* Moves from phi functions with highest indices have to be */
+ /* inserted first, since this is the order as is used for */
+ /* conflict resolution */
+
+ for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
+ lt_d = ls->phi_moves[bptr->nr][i][0];
+ lt_s = ls->phi_moves[bptr->nr][i][1];
+#if defined(SSA_DEBUG_VERBOSE)
+ if (compileverbose)
+ printf("BB %3i Move %3i <- %3i ", bptr->nr, lt_d, lt_s);
#endif
+ if (lt_s == UNUSED) {
+#if defined(SSA_DEBUG_VERBOSE)
+ if (compileverbose)
+ printf(" ... not processed \n");
+#endif
+ continue;
+ }
+
+ d = VAR(ls->lifetime[lt_d].v_index);
+ s = VAR(ls->lifetime[lt_s].v_index);
+
+
+ if (d->type == -1) {
+#if defined(SSA_DEBUG_VERBOSE)
+ if (compileverbose)
+ printf("...returning - phi lifetimes where joined\n");
+#endif
+ return;
+ }
+
+ if (s->type == -1) {
+#if defined(SSA_DEBUG_VERBOSE)
+ if (compileverbose)
+ printf("...returning - phi lifetimes where joined\n");
+#endif
+ return;
+ }
+
+ tmp_i.opc = 0;
+ tmp_i.s1.varindex = ls->lifetime[lt_s].v_index;
+ tmp_i.dst.varindex = ls->lifetime[lt_d].v_index;
+ emit_copy(jd, &tmp_i);
+
+#if defined(SSA_DEBUG_VERBOSE)
+ if (compileverbose) {
+ if (IS_INMEMORY(d->flags) && IS_INMEMORY(s->flags)) {
+ /* mem -> mem */
+ printf("M%3i <- M%3i",d->vv.regoff,s->vv.regoff);
+ }
+ else if (IS_INMEMORY(s->flags)) {
+ /* mem -> reg */
+ printf("R%3i <- M%3i",d->vv.regoff,s->vv.regoff);
+ }
+ else if (IS_INMEMORY(d->flags)) {
+ /* reg -> mem */
+ printf("M%3i <- R%3i",d->vv.regoff,s->vv.regoff);
+ }
+ else {
+ /* reg -> reg */
+ printf("R%3i <- R%3i",d->vv.regoff,s->vv.regoff);
+ }
+ printf("\n");
+ }
+#endif /* defined(SSA_DEBUG_VERBOSE) */
+ }
+}
+#endif /* defined(ENABLE_SSA) */
+
/*