@@ -384,6 +384,11 @@ static int cpu_restore_state_from_tb(CPUState *cpu, TranslationBlock *tb,
return 0;
}
+static void tb_destroy(TranslationBlock *tb)
+{
+ qemu_spin_destroy(&tb->jmp_lock);
+}
+
bool cpu_restore_state(CPUState *cpu, uintptr_t host_pc, bool will_exit)
{
TranslationBlock *tb;
@@ -413,6 +418,7 @@ bool cpu_restore_state(CPUState *cpu, uintptr_t host_pc, bool will_exit)
/* one-shot translation, invalidate it immediately */
tb_phys_invalidate(tb, -1);
tcg_tb_remove(tb);
+ tb_destroy(tb);
}
r = true;
}
@@ -1230,7 +1236,7 @@ static void do_tb_flush(CPUState *cpu, run_on_cpu_data tb_flush_count)
qht_reset_size(&tb_ctx.htable, CODE_GEN_HTABLE_SIZE);
page_flush_tb();
- tcg_region_reset_all();
+ tcg_region_reset_all(tb_destroy);
/* XXX: flush processor icache at this point if cache flush is
expensive */
atomic_mb_set(&tb_ctx.tb_flush_count, tb_ctx.tb_flush_count + 1);
@@ -1886,6 +1892,7 @@ TranslationBlock *tb_gen_code(CPUState *cpu,
orig_aligned -= ROUND_UP(sizeof(*tb), qemu_icache_linesize);
atomic_set(&tcg_ctx->code_gen_ptr, (void *)orig_aligned);
+ tb_destroy(tb);
return existing_tb;
}
tcg_tb_insert(tb);
@@ -2235,6 +2242,7 @@ void cpu_io_recompile(CPUState *cpu, uintptr_t retaddr)
tb_phys_invalidate(tb->orig_tb, -1);
}
tcg_tb_remove(tb);
+ tb_destroy(tb);
}
/* TODO: If env->pc != tb->pc (i.e. the faulting instruction was not
@@ -815,8 +815,9 @@ void *tcg_malloc_internal(TCGContext *s, int size);
void tcg_pool_reset(TCGContext *s);
TranslationBlock *tcg_tb_alloc(TCGContext *s);
+typedef void (*tb_destroy_func)(TranslationBlock *tb);
void tcg_region_init(void);
-void tcg_region_reset_all(void);
+void tcg_region_reset_all(tb_destroy_func tb_destroy);
size_t tcg_code_size(void);
size_t tcg_code_capacity(void);
@@ -502,7 +502,16 @@ size_t tcg_nb_tbs(void)
return nb_tbs;
}
-static void tcg_region_tree_reset_all(void)
+static gboolean tcg_region_tree_traverse(gpointer k, gpointer v, gpointer data)
+{
+ TranslationBlock *tb = v;
+ tb_destroy_func tb_destroy = data;
+
+ tb_destroy(tb);
+ return FALSE;
+}
+
+static void tcg_region_tree_reset_all(tb_destroy_func tb_destroy)
{
size_t i;
@@ -510,6 +519,10 @@ static void tcg_region_tree_reset_all(void)
for (i = 0; i < region.n; i++) {
struct tcg_region_tree *rt = region_trees + i * tree_size;
+ if (tb_destroy != NULL) {
+ g_tree_foreach(rt->tree, tcg_region_tree_traverse, tb_destroy);
+ }
+
/* Increment the refcount first so that destroy acts as a reset */
g_tree_ref(rt->tree);
g_tree_destroy(rt->tree);
@@ -586,7 +599,7 @@ static inline bool tcg_region_initial_alloc__locked(TCGContext *s)
}
/* Call from a safe-work context */
-void tcg_region_reset_all(void)
+void tcg_region_reset_all(tb_destroy_func tb_destroy)
{
unsigned int n_ctxs = atomic_read(&n_tcg_ctxs);
unsigned int i;
@@ -603,7 +616,7 @@ void tcg_region_reset_all(void)
}
qemu_mutex_unlock(®ion.lock);
- tcg_region_tree_reset_all();
+ tcg_region_tree_reset_all(tb_destroy);
}
#ifdef CONFIG_USER_ONLY