[Erp5-report] r45941 arnaud.fontaine - /erp5/trunk/utils/erp5.utils.benchmark/src/erp5/util...

nobody at svn.erp5.org nobody at svn.erp5.org
Thu Jun 23 07:46:40 CEST 2011


Author: arnaud.fontaine
Date: Thu Jun 23 07:46:39 2011
New Revision: 45941

URL: http://svn.erp5.org?rev=45941&view=rev
Log:
Move creation/closing of Benchmark Result document to ERP5BenchmarkResult

Modified:
    erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/benchmark.py
    erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/performance_tester.py

Modified: erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/benchmark.py
URL: http://svn.erp5.org/erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/benchmark.py?rev=45941&r1=45940&r2=45941&view=diff
==============================================================================
--- erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/benchmark.py [utf8] (original)
+++ erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/benchmark.py [utf8] Thu Jun 23 07:46:39 2011
@@ -313,6 +313,7 @@ class CSVBenchmarkResult(BenchmarkResult
 from cStringIO import StringIO
 
 import xmlrpclib
+import datetime
 
 class ERP5BenchmarkResult(BenchmarkResult):
   def __init__(self, *args, **kwargs):
@@ -348,6 +349,28 @@ class ERP5BenchmarkResult(BenchmarkResul
   def __exit__(self, exc_type, exc_value, traceback):
     super(ERP5BenchmarkResult, self).__exit__(exc_type, exc_value, traceback)
 
+  @staticmethod
+  def createResultDocument(publish_url, publish_project, repeat, nb_users):
+    test_result_module = xmlrpclib.ServerProxy(publish_url,
+                                               verbose=True,
+                                               allow_none=True)
+
+    # TODO: range of users?
+    benchmark_result = test_result_module.TestResultModule_addBenchmarkResult(
+      '%d repeat with %d concurrent users' % (repeat, nb_users),
+      publish_project, ' '.join(sys.argv), datetime.datetime.now())
+
+    return benchmark_result['id']
+
+  @staticmethod
+  def closeResultDocument(publish_document_url, error_message_set):
+    result = xmlrpclib.ServerProxy(publish_document_url,
+                                   verbose=True,
+                                   allow_none=True)
+
+    result.BenchmarkResult_completed(error_message_set and 'FAIL' or 'PASS',
+                                     error_message_set)
+
 import multiprocessing
 import csv
 import traceback

Modified: erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/performance_tester.py
URL: http://svn.erp5.org/erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/performance_tester.py?rev=45941&r1=45940&r2=45941&view=diff
==============================================================================
--- erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/performance_tester.py [utf8] (original)
+++ erp5/trunk/utils/erp5.utils.benchmark/src/erp5/utils/benchmark/performance_tester.py [utf8] Thu Jun 23 07:46:39 2011
@@ -32,10 +32,9 @@ import argparse
 import os
 import sys
 import multiprocessing
-import datetime
 import xmlrpclib
 
-from benchmark import ArgumentType, BenchmarkProcess
+from benchmark import ArgumentType, BenchmarkProcess, ERP5BenchmarkResult
 
 class PerformanceTester(object):
   def __init__(self, namespace=None):
@@ -177,39 +176,18 @@ class PerformanceTester(object):
     if not self._argument_namespace.erp5_publish_url:
       return
 
-    test_result_module = xmlrpclib.ServerProxy(
-      self._argument_namespace.erp5_publish_url, verbose=True, allow_none=True)
-
-    # TODO: range of users?
-    benchmark_result = test_result_module.TestResultModule_addBenchmarkResult(
-      '%d repeat with %d concurrent users' % (self._argument_namespace.repeat,
-                                              self._argument_namespace.users),
-      self._argument_namespace.erp5_publish_project,
-      ' '.join(sys.argv),
-      datetime.datetime.now())
-
-    try:
-      benchmark_result_id = benchmark_result['id']
-    except:
-      raise RuntimeError, "Cannot create the benchmark result"
-
     self._argument_namespace.erp5_publish_url += \
-        'test_result_module/%s' % benchmark_result_id
+        ERP5BenchmarkResult.createResultDocument(self._argument_namespace.erp5_publish_url,
+                                                 self._argument_namespace.erp5_publish_project,
+                                                 self._argument_namespace.repeat,
+                                                 self._argument_namespace.users)          
 
   def postRun(self, error_message_set):
     if not self._argument_namespace.erp5_publish_url:
       return
 
-    if error_message_set:
-      result = 'FAIL'
-    else:
-      result = 'PASS'
-
-    benchmark_result = xmlrpclib.ServerProxy(
-      self._argument_namespace.erp5_publish_url,
-      verbose=True, allow_none=True)
-
-    benchmark_result.BenchmarkResult_completed(result, error_message_set)
+    ERP5BenchmarkResult.closeResultDocument(self._argument_namespace.erp5_publish_url,
+                                            error_message_set)
 
   def _run_constant(self, nb_users):
     process_list = []



More information about the Erp5-report mailing list