[pLog-svn] r3422 - plog/trunk/tools

oscar at devel.lifetype.net oscar at devel.lifetype.net
Tue May 16 15:08:30 GMT 2006


Author: oscar
Date: 2006-05-16 15:08:29 +0000 (Tue, 16 May 2006)
New Revision: 3422

Modified:
   plog/trunk/tools/play.py
Log:
added support for logging data to .csv files via the -o parameter. It will log both global stats in the file output.csv (assuming the command line was "-o output") and output_X.csv, where 'X' is the thread id.


Modified: plog/trunk/tools/play.py
===================================================================
--- plog/trunk/tools/play.py	2006-05-15 04:42:30 UTC (rev 3421)
+++ plog/trunk/tools/play.py	2006-05-16 15:08:29 UTC (rev 3422)
@@ -69,17 +69,25 @@
             if req_time_diff > self.max_request_time:
                 self.max_request_time = req_time_diff
                 
+            # save some data about this request
+            line['id'] = self.num_requests
+            line['req_time'] = req_time_diff
+            line['http_response'] = response.status
+            line['timestamp'] = time.asctime(time.localtime())
+                
             # save the time it took to perform this request
             self.total_time = self.total_time + req_time_diff
                 
             # check if we have to wait
             if self.wait > 0.0:
-                time.sleep( self.wait )
+                time.sleep( self.wait )            
             
         # save the average time
         self.avg_time_per_request = self.total_time / self.num_requests
+        
+        #print self.data
             
-        print "requests = " + str( self.num_requests ) + ", total = " + str( self.total_time ) + ", avg = " + str( self.avg_time_per_request ) + ", max = " +  str( self.max_request_time ) + ", min = " + str( self.min_request_time )
+        #print "requests = " + str( self.num_requests ) + ", total = " + str( self.total_time ) + ", avg = " + str( self.avg_time_per_request ) + ", max = " +  str( self.max_request_time ) + ", min = " + str( self.min_request_time )
 
 #
 # class that loads a script file and parses it
@@ -138,7 +146,7 @@
 #parser.add_option( "-r", "--number-of-runs", type="int", dest="num_runs", default=1 )
 parser.add_option( "-w", "--wait", type="int", dest="milliseconds_wait", default=0 )
 parser.add_option( "-f", "--file", type="string", dest="filename" )
-#parser.add_option( "-o", "--outfile", type="string", dest="outfile" )
+parser.add_option( "-o", "--outfile", type="string", dest="outfile" )
 (options, args ) = parser.parse_args()
 
 if options.filename == "":
@@ -160,6 +168,12 @@
 print "  Milliseconds to wait between request: " +  str(options.milliseconds_wait)
 seconds_wait = options.milliseconds_wait / 1000.0
 
+# check if dumpding thread data to a file
+logdata = False
+if options.outfile:
+    print "  Logging thread data to file: " + options.outfile
+    logdata = True
+
 # mark the time when we start
 time_start = time.time()
 
@@ -184,6 +198,16 @@
 time_end = time.time()
     
 # process statistics from all threads
+if logdata:
+    # log
+    try:
+        print "Writing global data to file " + options.outfile + ".csv"
+        f = open( options.outfile + ".csv", "w" )
+        f.write( "ThreadId,NumReqs,NumGet,NumPost,TotalTime,AvgTime,MaxTime,MinTime\n" )
+    except:
+        print "Could not log thread data to file!" 
+        logdata = false
+
 for t in pool:
     total_average_time = total_average_time + t.avg_time_per_request
     total_requests = total_requests + t.num_requests
@@ -193,7 +217,29 @@
         total_min_time =  t.min_request_time
     if t.max_request_time > total_max_time:
         total_max_time = t.max_request_time
+    
+    if logdata:
+        # log global thread data
+        line = str( t.id ) + "," + str( t.num_requests ) + "," + str( t.num_gets) + "," + \
+               str( t.num_posts ) + "," + str( t.total_time ) + "," + \
+               str( t.avg_time_per_request ) + "," + str( t.max_request_time ) + "," + \
+               str( t.min_request_time ) + "\n"
+        f.write( line )
+        # and thread-specific times
+        filename = options.outfile + "_" + str( t.id ) + ".csv"
+        print "Writing thread " + str( t.id) + " data to file " + filename
+        td = open( filename, "w" )
+        td.write( "ReqId,Timestamp,Url,HttpResponse,Type,Time\n")
+        for req in t.data:
+            string = str( req['id'] ) + "," + str( req['timestamp'] ) + "," + \
+                     str( req['url'] ) + "," + str( req['http_response'] ) + "," + \
+                     str( req['type'] ) + "," + str( req['req_time'] ) + "\n"
+            td.write( string )
+        td.close()
         
+if logdata:
+    f.close()
+        
 # calculate the total average
 total_average_time = total_average_time / options.num_threads
 
@@ -207,8 +253,8 @@
 # output final information
 print "total = " + str( total_time )
 print "req = " + str( total_requests )
-print "gets = " + str( total_gets ) + " (" + str(( total_gets / total_requests ) *  100 ) + "%)"
-print "posts = " + str( total_posts ) + " (" + str(( total_posts / total_requests ) *  100 ) + "%)"
+print "gets = " + str( total_gets ) + " (" + str(( float(total_gets) / float(total_requests)) *  100 ) + "%)"
+print "posts = " + str( total_posts ) + " (" + str(( float(total_posts) / float(total_requests)) *  100 ) + "%)"
 print "reqs/sec = " + str( reqs_per_sec )
 print "avg = " + str( total_average_time )
 print "min = " + str( total_min_time )



More information about the pLog-svn mailing list