qemu-devel
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Qemu-devel] [PATCH 3/5] colo-compare: release all unhandled packets in


From: zhanghailiang
Subject: [Qemu-devel] [PATCH 3/5] colo-compare: release all unhandled packets in finalize function
Date: Wed, 15 Feb 2017 16:34:15 +0800

We should release all unhandled packets before finalize colo compare.
Besides, we need to free connection_track_table, or there will be
a memory leak bug.

Signed-off-by: zhanghailiang <address@hidden>
---
 net/colo-compare.c | 20 ++++++++++++++++++++
 1 file changed, 20 insertions(+)

diff --git a/net/colo-compare.c b/net/colo-compare.c
index a16e2d5..809bad3 100644
--- a/net/colo-compare.c
+++ b/net/colo-compare.c
@@ -676,6 +676,23 @@ static void colo_compare_complete(UserCreatable *uc, Error 
**errp)
     return;
 }
 
+static void colo_release_packets(void *opaque, void *user_data)
+{
+    CompareState *s = user_data;
+    Connection *conn = opaque;
+    Packet *pkt = NULL;
+
+    while (!g_queue_is_empty(&conn->primary_list)) {
+        pkt = g_queue_pop_head(&conn->primary_list);
+        compare_chr_send(&s->chr_out, pkt->data, pkt->size);
+        packet_destroy(pkt, NULL);
+    }
+    while (!g_queue_is_empty(&conn->secondary_list)) {
+        pkt = g_queue_pop_head(&conn->secondary_list);
+        packet_destroy(pkt, NULL);
+    }
+}
+
 static void colo_compare_class_init(ObjectClass *oc, void *data)
 {
     UserCreatableClass *ucc = USER_CREATABLE_CLASS(oc);
@@ -707,9 +724,12 @@ static void colo_compare_finalize(Object *obj)
     g_main_loop_quit(s->compare_loop);
     qemu_thread_join(&s->thread);
 
+    /* Release all unhandled packets after compare thead exited */
+    g_queue_foreach(&s->conn_list, colo_release_packets, s);
 
     g_queue_free(&s->conn_list);
 
+    g_hash_table_destroy(s->connection_track_table);
     g_free(s->pri_indev);
     g_free(s->sec_indev);
     g_free(s->outdev);
-- 
1.8.3.1





reply via email to

[Prev in Thread] Current Thread [Next in Thread]