Fix 2 trigger corner cases. (#229)
[riscv-isa-sim.git] / riscv / execute.cc
index 878893c80d18b840ee985e4c792004bd7304eb21..e639e90462cf9063404f5cfde99b712ad201b94f 100644 (file)
@@ -2,7 +2,6 @@
 
 #include "processor.h"
 #include "mmu.h"
-#include "sim.h"
 #include <cassert>
 
 
@@ -77,7 +76,7 @@ static reg_t execute_insn(processor_t* p, reg_t pc, insn_fetch_t fetch)
 {
   commit_log_stash_privilege(p);
   reg_t npc = fetch.func(p, fetch.insn, pc);
-  if (!invalid_pc(npc)) {
+  if (npc != PC_SERIALIZE_BEFORE) {
     commit_log_print_insn(p->get_state(), pc, fetch.insn);
     p->update_histogram(pc);
   }
@@ -110,7 +109,8 @@ void processor_t::step(size_t n)
      if (unlikely(invalid_pc(pc))) { \
        switch (pc) { \
          case PC_SERIALIZE_BEFORE: state.serialized = true; break; \
-         case PC_SERIALIZE_AFTER: n = ++instret; break; \
+         case PC_SERIALIZE_AFTER: ++instret; break; \
+         case PC_SERIALIZE_WFI: n = ++instret; break; \
          default: abort(); \
        } \
        pc = state.pc; \
@@ -128,6 +128,15 @@ void processor_t::step(size_t n)
       {
         while (instret < n)
         {
+          if (unlikely(!state.serialized && state.single_step == state.STEP_STEPPED)) {
+            state.single_step = state.STEP_NONE;
+            if (state.dcsr.cause == DCSR_CAUSE_NONE) {
+              enter_debug_mode(DCSR_CAUSE_STEP);
+              // enter_debug_mode changed state.pc, so we can't just continue.
+              break;
+            }
+          }
+
           if (unlikely(state.single_step == state.STEP_STEPPING)) {
             state.single_step = state.STEP_STEPPED;
           }
@@ -136,17 +145,9 @@ void processor_t::step(size_t n)
           if (debug && !state.serialized)
             disasm(fetch.insn);
           pc = execute_insn(this, pc, fetch);
-          bool serialize_before = (pc == PC_SERIALIZE_BEFORE);
 
           advance_pc();
 
-          if (unlikely(state.single_step == state.STEP_STEPPED) && !serialize_before) {
-            state.single_step = state.STEP_NONE;
-            enter_debug_mode(DCSR_CAUSE_STEP);
-            // enter_debug_mode changed state.pc, so we can't just continue.
-            break;
-          }
-
           if (unlikely(state.pc >= DEBUG_ROM_ENTRY &&
                        state.pc < DEBUG_END)) {
             // We're waiting for the debugger to tell us something.
@@ -171,13 +172,6 @@ void processor_t::step(size_t n)
         //
         // According to Andrew Waterman's recollection, this optimization
         // resulted in approximately a 2x performance increase.
-        //
-        // If there is support for compressed instructions, the mmu and the
-        // switch statement get more complicated. Each branch target is stored
-        // in the index corresponding to mmu->icache_index(), but consecutive
-        // non-branching instructions are stored in consecutive indices even if
-        // mmu->icache_index() specifies a different index (which is the case
-        // for 32-bit instructions in the presence of compressed instructions).
 
         // This figures out where to jump to in the switch statement
         size_t idx = _mmu->icache_index(pc);
@@ -193,10 +187,10 @@ void processor_t::step(size_t n)
         // is located within the execute_insn() function call.
         #define ICACHE_ACCESS(i) { \
           insn_fetch_t fetch = ic_entry->data; \
-          ic_entry++; \
           pc = execute_insn(this, pc, fetch); \
+          ic_entry = ic_entry->next; \
           if (i == mmu_t::ICACHE_ENTRIES-1) break; \
-          if (unlikely(ic_entry->tag != pc)) goto miss; \
+          if (unlikely(ic_entry->tag != pc)) break; \
           if (unlikely(instret+1 == n)) break; \
           instret++; \
           state.pc = pc; \
@@ -210,13 +204,6 @@ void processor_t::step(size_t n)
         }
 
         advance_pc();
-        continue;
-
-miss:
-        advance_pc();
-        // refill I$ if it looks like there wasn't a taken branch
-        if (pc > (ic_entry-1)->tag && pc <= (ic_entry-1)->tag + MAX_INSN_LENGTH)
-          _mmu->refill_icache(pc, ic_entry);
       }
     }
     catch(trap_t& t)