using reflectionfunction, cleanups, results in milliseconds

This commit is contained in:
Jakub Míšek 2020-05-22 15:12:23 +02:00
Родитель 39e39bb440
Коммит cd1dc96cc3
4 изменённых файлов: 107 добавлений и 79 удалений

Просмотреть файл

@ -1,3 +1,3 @@
*.Dockerfile
Run.ps1
*.dockerfile
run.ps1
results.json

Просмотреть файл

@ -2,7 +2,7 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<StartupObject>run.php</StartupObject>
</PropertyGroup>

Просмотреть файл

@ -7,4 +7,4 @@ RUN echo "{ \"msbuild-sdks\": { \"Peachpie.NET.Sdk\": \"0.9.981\" } }" > global.
RUN dotnet restore
RUN dotnet build -c Release
ENTRYPOINT ["dotnet", "run", "-c", "Release"]
ENTRYPOINT ["dotnet", "run", "--no-build", "-c", "Release"]

Просмотреть файл

@ -1,96 +1,124 @@
<?php
// Arguments: <benchmarkFilter> <iterationCount>
// Arguments: <benchmarkFilter> <iterations>
// Example: "functions/*" 1000000
const EMPTY_BENCHMARK = 'emptyBenchmark';
class Runner
{
const CHUNK = 100;
const EMPTY_FUNCTION = "empty_func";
const CHUNK_COUNT = 100;
const WARMUP_CHUNK_COUNT = 20;
static function run(string $benchmarkFilter, int $iterations): array
{
$benchmarks = self::collectBechmarks($benchmarkFilter);
$results = [];
$benchmarkFilter = $argv[1];
$iterationCount = $argv[2];
runAll($benchmarkFilter, $iterationCount);
// run benchmarks, collect results
foreach ($benchmarks as $benchmark) {
function runAll(string $benchmarkFilter, int $iterationCount) {
// Include all the files with benchmarks according to the filter
foreach (glob($benchmarkFilter, GLOB_BRACE) as $file) {
if (pathinfo($file, PATHINFO_EXTENSION) == "php") {
require_once $file;
// warmup
$time = self::runSingle($benchmark, self::CHUNK);
//
$time = self::runSingle($benchmark, $iterations);
$times = [];
$blocks = $iterations / self::CHUNK;
for ($i = 0; $i < $blocks; $i++)
{
$times[] = self::runSingle($benchmark, self::CHUNK) * $blocks;
}
sort($times);
$results[$benchmark->getName()] =
[
"time" => $time,
//"avg" => $time / $iterations,
"min" => $times[0],
"max" => end($times),
"med" => $times[count($times) / 2],
];
}
// Clean up the results by calculating the overhead for test runs and deducing it
$empty = $results[self::EMPTY_FUNCTION]["time"];
foreach ($results as $name => $r)
{
$results[$name] =
[
"iterations" => $iterations,
"time_ms" => (int)(($r["time"] - $empty) / 1000000),
//"avg_ms" => (int)(($r["time"] - $empty) / $iterations / 1000000 ),
"min_ms" => (int)(($r["min"] - $empty) / 1000000 ),
"max_ms" => (int)(($r["max"] - $empty) / 1000000 ),
"med_ms" => (int)(($r["med"] - $empty) / 1000000 ),
];
}
//
return $results;
}
// Find all the benchmarking functions in the included files
$benchmarks = [];
foreach (get_defined_functions()['user'] as $fnName) {
$fn = new ReflectionFunction($fnName);
// From this file, include only EMPTY_BENCHMARK
if ($fn->getFileName() == __FILE__ && $fn->getName() != EMPTY_BENCHMARK) {
continue;
static function collectBechmarks(string $benchmarkFilter)
{
// Include all the files with benchmarks according to the filter
$files = [];
foreach (glob($benchmarkFilter, GLOB_BRACE) as $file)
{
if (pathinfo($file, PATHINFO_EXTENSION) == "php")
{
if (false !== require_once $file)
{
$files[] = realpath($file);
}
}
}
// Exclude helper functions starting with _
if ($fn->getShortName()[0] == '_') {
continue;
}
$benchmarks[] = $fn->getName();
}
// Warm-up opcache, JIT, call-sites etc.
foreach ($benchmarks as $benchmark) {
runSingle($benchmark, 1);
}
// Run benchmark themselves and gather the rough results
$results = [];
foreach ($benchmarks as $benchmark) {
// Perform all the measurements in chunks
$chunkIterationCount = $iterationCount / CHUNK_COUNT; // TODO: Consider finding the iteration count dynamically
$chunkAvgs = [];
for ($i = 0; $i < WARMUP_CHUNK_COUNT + CHUNK_COUNT; $i++) {
$chunkAvgs[] = runSingle($benchmark, $chunkIterationCount) / $chunkIterationCount;
}
// Remove warmup data and find average time
$chunkAvgs = array_slice($chunkAvgs, WARMUP_CHUNK_COUNT);
$avg = array_sum($chunkAvgs) / count($chunkAvgs);
$results[$benchmark] = [
'iterations' => $iterationCount,
'rough_avg_ns' => $avg,
'rough_std_dev_ns' => getStandardDeviation($chunkAvgs, $avg)
// Find all the benchmarking functions in the included files
$benchmarks = [
new ReflectionFunction(self::EMPTY_FUNCTION)
];
foreach (get_defined_functions()['user'] as $fnName)
{
$fn = new ReflectionFunction($fnName);
if (in_array($fn->getFileName(), $files) && $fn->getShortName()[0] != '_')
{
$benchmarks[] = $fn;
}
}
//
return $benchmarks;
}
// Clean up the results by calculating the overhead for test runs and deducing it
$overheadAvg = $results[EMPTY_BENCHMARK]['rough_avg_ns'];
foreach ($benchmarks as $benchmark) {
$results[$benchmark]['clean_avg_ns'] = $results[$benchmark]['rough_avg_ns'] - $overheadAvg;
private static function runSingle(ReflectionFunction $benchmark, int $iterations): int {
$start = hrtime(true);
// Perform the operation repeatively,
// measuring the total time of the whole batch
for ($i = 0; $i < $iterations; $i++) {
$benchmark->invoke(null);
}
return hrtime(true) - $start;
}
echo json_encode($results);
private static function getStandardDeviation($values, $avg) {
$variance = 0.0;
foreach ($values as $value) {
$variance += ($value - $avg) * ($value - $avg);
}
return sqrt($variance / count($values));
}
}
function runSingle(string $benchmark, int $iterationCount) : float {
$start = hrtime(true);
// Perform the operation repeatively, measuring the total time of the whole batch
for ($i = 0; $i < $iterationCount; $i++) {
$benchmark();
}
return hrtime(true) - $start;
function empty_func()
{
}
function getStandardDeviation(array $values, float $avg) : float {
$variance = 0.0;
foreach ($values as $value) {
$variance += ($value - $avg) * ($value - $avg);
}
return sqrt($variance / count($values));
}
function emptyBenchmark() {}
//
echo json_encode(Runner::run(@$argv[1] ?? "*/*", @$argv[2] ?? 100000));