traffic: generate sparser descriptions of conversations
authorDouglas Bagnall <douglas.bagnall@catalyst.net.nz>
Fri, 14 Dec 2018 01:26:34 +0000 (14:26 +1300)
committerDouglas Bagnall <dbagnall@samba.org>
Tue, 8 Jan 2019 22:55:33 +0000 (23:55 +0100)
Rather than building all the packets at this point, we stick to the
barest details of the packets (which is all the model gives us
anyway).

The advantage is that will take a lot less memory, which matters
because this process forks into many clients that were sharing and
mutate the conversation list.

Signed-off-by: Douglas Bagnall <douglas.bagnall@catalyst.net.nz>
Reviewed-by: Andrew Bartlett <abartlet@samba.org>
python/samba/emulate/traffic.py
python/samba/tests/blackbox/traffic_replay.py
script/traffic_replay

index 807fa8244e2c13836bb6bc46387eca0116f2859c..3310ce768bd1ce9a87ef115c1398c0479721db69 100644 (file)
@@ -1293,9 +1293,8 @@ class TrafficModel(object):
 
         return c
 
-    def generate_conversations(self, scale, duration, replay_speed=1,
-                               server=1, client=2):
-        """Generate a list of conversations from the model."""
+    def generate_conversation_sequences(self, scale, duration, replay_speed=1):
+        """Generate a list of conversation descriptions from the model."""
 
         # We run the simulation for ten times as long as our desired
         # duration, and take the section at the end.
@@ -1319,7 +1318,7 @@ class TrafficModel(object):
                % (n_packets, target_packets, len(conversations), scale)),
               file=sys.stderr)
         conversations.sort()  # sorts by first element == start time
-        return seq_to_conversations(conversations)
+        return conversations
 
 
 def seq_to_conversations(seq, server=1, client=2):
index a84d1a423e4e6c220c1e2e737b504f220e451855..4e9783ec51561fff36e579d4c397d9c6d2449fe9 100644 (file)
@@ -70,11 +70,6 @@ class TrafficLearnerTests(BlackboxTestCase):
 
         for i, opts in enumerate((["--random-seed=3"],
                                   ["--random-seed=4"],
-                                  ["--random-seed=3",
-                                   "--conversation-persistence=0.5"],
-                                  ["--random-seed=3",
-                                   "--old-scale",
-                                   "--conversation-persistence=0.95"],
                                   )):
             with temp_file(self.tempdir) as output:
                 command = ([SCRIPT, MODEL,
index 6fbbe4f44255b6c4b354375c28577026984b9c01..83b7041f63548074592aeb00970837bfe032ea88 100755 (executable)
@@ -250,9 +250,9 @@ def main():
             logger.info(("Using the specified model file to "
                          "generate conversations"))
 
-            conversations = model.generate_conversations(opts.scale_traffic,
-                                                         opts.duration,
-                                                         opts.replay_rate)
+            conversations = model.generate_conversation_sequences(opts.scale_traffic,
+                                                                  opts.duration,
+                                                                  opts.replay_rate)
         except ValueError:
             logger.error(("Could not parse %s, which does not seem to be "
                           "a model generated by script/traffic_learner."
@@ -263,7 +263,7 @@ def main():
         conversations = []
 
     if debuglevel > 5:
-        for c in conversations:
+        for c in traffic.seq_to_conversations(conversations):
             for p in c.packets:
                 print("    ", p, file=sys.stderr)
 
@@ -350,7 +350,7 @@ def main():
 
         logger.info("Writing traffic summary")
         summaries = []
-        for c in conversations:
+        for c in traffic.seq_to_conversations(conversations):
             summaries += c.replay_as_summary_lines()
 
         summaries.sort()
@@ -359,7 +359,8 @@ def main():
 
         exit(0)
 
-    traffic.replay(conversations, host,
+    traffic.replay(traffic.seq_to_conversations(conversations),
+                   host,
                    lp=lp,
                    creds=creds,
                    accounts=accounts,