Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

How to read the test result #102

Open
nguyenminhduc9988 opened this issue Apr 27, 2016 · 1 comment
Open

How to read the test result #102

nguyenminhduc9988 opened this issue Apr 27, 2016 · 1 comment

Comments

@nguyenminhduc9988
Copy link

I ran the spark-perf test but how do I interpret the result. I see an array of time estimate, how do I interpret it

results:{  
   "testName":"scheduling-throughput",
   "options":{  
      "num-tasks":"10000",
      "num-trials":"10",
      "random-seed":"5",
      "num-jobs":"1",
      "inter-trial-wait":"3",
      "closure-size":"0"
   },
   "sparkConf":{  
      "spark.serializer":"org.apache.spark.serializer.JavaSerializer",
      "spark.driver.host":"172.16.30.5",
      "spark.driver.port":"40017",
      "spark.jars":"file:/home/zeus/temp/spark-perf/spark-tests/target/spark-perf-tests-assembly.jar",
      "spark.app.name":"TestRunner: scheduling-throughput",
      "spark.storage.memoryFraction":"0.66",
      "spark.locality.wait":"60000000",
      "spark.driver.memory":"512m",
      "spark.executor.id":"driver",
      "spark.submit.deployMode":"client",
      "spark.master":"spark://testbed5.jvn.edu.vn:7077",
      "spark.fileserver.uri":"http://172.16.30.5:49201",
      "spark.externalBlockStore.folderName":"spark-2211ef9a-d22b-41a2-a5a4-310443980c95",
      "spark.app.id":"app-20160427111045-0000"
   },
   "sparkVersion":"1.5.2",
   "systemProperties":{  
      "java.io.tmpdir":"/tmp",
      "spark.serializer":"org.apache.spark.serializer.JavaSerializer",
      "line.separator":"\n",
      "path.separator":":",
      "sun.management.compiler":"HotSpot 64-Bit Tiered Compilers",
      "SPARK_SUBMIT":"true",
      "sun.cpu.endian":"little",
      "java.specification.version":"1.8",
      "java.vm.specification.name":"Java Virtual Machine Specification",
      "java.vendor":"Oracle Corporation",
      "java.vm.specification.version":"1.8",
      "user.home":"/home/zeus",
      "file.encoding.pkg":"sun.io",
      "sun.nio.ch.bugLevel":"",
      "sun.arch.data.model":"64",
      "sun.boot.library.path":"/usr/lib/jvm/java-8-oracle/jre/lib/amd64",
      "user.dir":"/home/zeus/temp/spark-perf",
      "spark.jars":"file:/home/zeus/temp/spark-perf/spark-tests/target/spark-perf-tests-assembly.jar",
      "java.library.path":"/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib",
      "sun.cpu.isalist":"",
      "os.arch":"amd64",
      "java.vm.version":"25.66-b17",
      "spark.app.name":"spark.perf.TestRunner",
      "java.endorsed.dirs":"/usr/lib/jvm/java-8-oracle/jre/lib/endorsed",
      "java.runtime.version":"1.8.0_66-b17",
      "java.vm.info":"mixed mode",
      "sparkperf.commitSHA":"unknown",
      "java.ext.dirs":"/usr/lib/jvm/java-8-oracle/jre/lib/ext:/usr/java/packages/lib/ext",
      "spark.storage.memoryFraction":"0.66",
      "java.runtime.name":"Java(TM) SE Runtime Environment",
      "spark.locality.wait":"60000000",
      "spark.driver.memory":"512m",
      "file.separator":"/",
      "java.class.version":"52.0",
      "java.specification.name":"Java Platform API Specification",
      "sun.boot.class.path":"/usr/lib/jvm/java-8-oracle/jre/lib/resources.jar:/usr/lib/jvm/java-8-oracle/jre/lib/rt.jar:/usr/lib/jvm/java-8-oracle/jre/lib/sunrsasign.jar:/usr/lib/jvm/java-8-oracle/jre/lib/jsse.jar:/usr/lib/jvm/java-8-oracle/jre/lib/jce.jar:/usr/lib/jvm/java-8-oracle/jre/lib/charsets.jar:/usr/lib/jvm/java-8-oracle/jre/lib/jfr.jar:/usr/lib/jvm/java-8-oracle/jre/classes",
      "file.encoding":"UTF-8",
      "user.timezone":"Asia/Ho_Chi_Minh",
      "java.specification.vendor":"Oracle Corporation",
      "sun.java.launcher":"SUN_STANDARD",
      "os.version":"3.19.0-25-generic",
      "sun.os.patch.level":"unknown",
      "spark.submit.deployMode":"client",
      "java.vm.specification.vendor":"Oracle Corporation",
      "spark.master":"spark://testbed5.jvn.edu.vn:7077",
      "user.country":"US",
      "sun.jnu.encoding":"UTF-8",
      "user.language":"en",
      "java.vendor.url":"http://java.oracle.com/",
      "java.awt.printerjob":"sun.print.PSPrinterJob",
      "java.awt.graphicsenv":"sun.awt.X11GraphicsEnvironment",
      "awt.toolkit":"sun.awt.X11.XToolkit",
      "java.class.path":"/home/zeus/temp/spark-1.5.2-bin-hadoop2.6/conf/:/home/zeus/temp/spark-1.5.2-bin-hadoop2.6/lib/spark-assembly-1.5.2-hadoop2.6.0.jar:/home/zeus/temp/spark-1.5.2-bin-hadoop2.6/lib/datanucleus-rdbms-3.2.9.jar:/home/zeus/temp/spark-1.5.2-bin-hadoop2.6/lib/datanucleus-core-3.2.10.jar:/home/zeus/temp/spark-1.5.2-bin-hadoop2.6/lib/datanucleus-api-jdo-3.2.6.jar",
      "os.name":"Linux",
      "java.vm.vendor":"Oracle Corporation",
      "java.vendor.url.bug":"http://bugreport.sun.com/bugreport/",
      "user.name":"zeus",
      "java.vm.name":"Java HotSpot(TM) 64-Bit Server VM",
      "sun.java.command":"org.apache.spark.deploy.SparkSubmit --master spark://testbed5.jvn.edu.vn:7077 --conf spark.driver.memory=512m --class spark.perf.TestRunner /home/zeus/temp/spark-perf/spark-tests/target/spark-perf-tests-assembly.jar scheduling-throughput --num-trials=10 --inter-trial-wait=3 --num-tasks=10000 --num-jobs=1 --closure-size=0 --random-seed=5",
      "java.home":"/usr/lib/jvm/java-8-oracle/jre",
      "java.version":"1.8.0_66",
      "sun.io.unicode.encoding":"UnicodeLittle"
   },
   "results":[  
      {  
         "time":5.715
      },
      {  
         "time":2.052
      },
      {  
         "time":1.531
      },
      {  
         "time":1.449
      },
      {  
         "time":1.285
      },
      {  
         "time":1.46
      },
      {  
         "time":1.33
      },
      {  
         "time":1.274
      },
      {  
         "time":1.232
      },
      {  
         "time":1.246
      }
   ]
}

@JoshRosen
Copy link
Contributor

The numbers in the results array are the raw timing measurements for the runs of the test, ordered in the order that the tests run (which is why the first time is a lot slower (due to warmup / JIT)).

There's some really old documentation on consuming these JSON files at https://github.com/databricks/spark-perf/wiki

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants