@@ -384,6 +384,11 @@ static int cpu_restore_state_from_tb(CPUState *cpu, TranslationBlock *tb,
return 0;
}
+void tb_destroy(TranslationBlock *tb)
+{
+ qemu_spin_destroy(&tb->jmp_lock);
+}
+
bool cpu_restore_state(CPUState *cpu, uintptr_t host_pc, bool will_exit)
{
TranslationBlock *tb;
@@ -413,6 +418,7 @@ bool cpu_restore_state(CPUState *cpu, uintptr_t host_pc, bool will_exit)
/* one-shot translation, invalidate it immediately */
tb_phys_invalidate(tb, -1);
tcg_tb_remove(tb);
+ tb_destroy(tb);
}
r = true;
}
@@ -1886,6 +1892,7 @@ TranslationBlock *tb_gen_code(CPUState *cpu,
orig_aligned -= ROUND_UP(sizeof(*tb), qemu_icache_linesize);
atomic_set(&tcg_ctx->code_gen_ptr, (void *)orig_aligned);
+ tb_destroy(tb);
return existing_tb;
}
tcg_tb_insert(tb);
@@ -2235,6 +2242,7 @@ void cpu_io_recompile(CPUState *cpu, uintptr_t retaddr)
tb_phys_invalidate(tb->orig_tb, -1);
}
tcg_tb_remove(tb);
+ tb_destroy(tb);
}
/* TODO: If env->pc != tb->pc (i.e. the faulting instruction was not
@@ -819,6 +819,7 @@ void tcg_pool_reset(TCGContext *s);
TranslationBlock *tcg_tb_alloc(TCGContext *s);
void tcg_region_init(void);
+void tb_destroy(TranslationBlock *tb);
void tcg_region_reset_all(void);
size_t tcg_code_size(void);
@@ -502,6 +502,14 @@ size_t tcg_nb_tbs(void)
return nb_tbs;
}
+static gboolean tcg_region_tree_traverse(gpointer k, gpointer v, gpointer data)
+{
+ TranslationBlock *tb = v;
+
+ tb_destroy(tb);
+ return FALSE;
+}
+
static void tcg_region_tree_reset_all(void)
{
size_t i;
@@ -510,6 +518,7 @@ static void tcg_region_tree_reset_all(void)
for (i = 0; i < region.n; i++) {
struct tcg_region_tree *rt = region_trees + i * tree_size;
+ g_tree_foreach(rt->tree, tcg_region_tree_traverse, NULL);
/* Increment the refcount first so that destroy acts as a reset */
g_tree_ref(rt->tree);
g_tree_destroy(rt->tree);