/usr/bin/bench is in ruby-active-model-serializers 0.9.7-1.
This file is owned by root:root, with mode 0o755.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 | #!/usr/bin/ruby
# ActiveModelSerializers Benchmark driver
# Adapted from
# https://github.com/ruby-bench/ruby-bench-suite/blob/8ad567f7e43a044ae48c36833218423bb1e2bd9d/rails/benchmarks/driver.rb
require 'bundler'
Bundler.setup
require 'json'
require 'pathname'
require 'optparse'
require 'digest'
require 'pathname'
require 'shellwords'
require 'logger'
require 'English'
class BenchmarkDriver
ROOT = Pathname File.expand_path(File.join('..', '..'), __FILE__)
BASE = ENV.fetch('BASE') { ROOT.join('test', 'benchmark') }
ESCAPED_BASE = Shellwords.shellescape(BASE)
def self.benchmark(options)
new(options).run
end
def self.parse_argv_and_run(argv = ARGV, options = {})
options = {
repeat_count: 1,
pattern: [],
env: 'CACHE_ON=on'
}.merge!(options)
OptionParser.new do |opts|
opts.banner = 'Usage: bin/bench [options]'
opts.on('-r', '--repeat-count [NUM]', 'Run benchmarks [NUM] times taking the best result') do |value|
options[:repeat_count] = value.to_i
end
opts.on('-p', '--pattern <PATTERN1,PATTERN2,PATTERN3>', 'Benchmark name pattern') do |value|
options[:pattern] = value.split(',')
end
opts.on('-e', '--env <var1=val1,var2=val2,var3=vale>', 'ENV variables to pass in') do |value|
options[:env] = value.split(',')
end
end.parse!(argv)
benchmark(options)
end
attr_reader :commit_hash, :base
# Based on logfmt:
# https://www.brandur.org/logfmt
# For more complete implementation see:
# see https://github.com/arachnid-cb/logfmtr/blob/master/lib/logfmtr/base.rb
# For usage see:
# https://blog.codeship.com/logfmt-a-log-format-thats-easy-to-read-and-write/
# https://engineering.heroku.com/blogs/2014-09-05-hutils-explore-your-structured-data-logs/
# For Ruby parser see:
# https://github.com/cyberdelia/logfmt-ruby
def self.summary_logger(device = 'output.txt')
require 'time'
logger = Logger.new(device)
logger.level = Logger::INFO
logger.formatter = proc { |severity, datetime, progname, msg|
msg = "'#{msg}'"
"level=#{severity} time=#{datetime.utc.iso8601(6)} pid=#{Process.pid} progname=#{progname} msg=#{msg}#{$INPUT_RECORD_SEPARATOR}"
}
logger
end
def self.stdout_logger
logger = Logger.new(STDOUT)
logger.level = Logger::INFO
logger.formatter = proc { |_, _, _, msg| "#{msg}#{$INPUT_RECORD_SEPARATOR}" }
logger
end
def initialize(options)
@writer = ENV['SUMMARIZE'] ? self.class.summary_logger : self.class.stdout_logger
@repeat_count = options[:repeat_count]
@pattern = options[:pattern]
@commit_hash = options.fetch(:commit_hash) { `git rev-parse --short HEAD`.chomp }
@base = options.fetch(:base) { ESCAPED_BASE }
@env = Array(options[:env]).join(' ')
@rubyopt = options[:rubyopt] # TODO: rename
end
def run
files.each do |path|
next if !@pattern.empty? && /#{@pattern.join('|')}/ !~ File.basename(path)
run_single(Shellwords.shellescape(path))
end
end
private
def files
Dir[File.join(base, 'bm_*')]
end
def run_single(path)
script = "RAILS_ENV=production #{@env} ruby #{@rubyopt} #{path}"
environment = `ruby -v`.chomp.strip[/\d+\.\d+\.\d+\w+/]
runs_output = measure(script)
if runs_output.empty?
results = { error: :no_results }
return
end
results = {}
results['commit_hash'] = commit_hash
results['version'] = runs_output.first['version']
results['rails_version'] = runs_output.first['rails_version']
results['benchmark_run[environment]'] = environment
results['runs'] = []
runs_output.each do |output|
results['runs'] << {
'benchmark_type[category]' => output['label'],
'benchmark_run[result][iterations_per_second]' => output['iterations_per_second'].round(3),
'benchmark_run[result][total_allocated_objects_per_iteration]' => output['total_allocated_objects_per_iteration']
}
end
ensure
results && report(results)
end
def report(results)
@writer.info { 'Benchmark results:' }
@writer.info { JSON.pretty_generate(results) }
end
def summarize(result)
puts "#{result['label']} #{result['iterations_per_second']}/ips; #{result['total_allocated_objects_per_iteration']} objects"
end
# FIXME: ` provides the full output but it'll return failed output as well.
def measure(script)
results = Hash.new { |h, k| h[k] = [] }
@repeat_count.times do
output = sh(script)
output.each_line do |line|
next if line.nil?
begin
result = JSON.parse(line)
rescue JSON::ParserError
result = { error: line } # rubocop:disable Lint/UselessAssignment
else
summarize(result)
results[result['label']] << result
end
end
end
results.map do |_, bm_runs|
bm_runs.sort_by do |run|
run['iterations_per_second']
end.last
end
end
def sh(cmd)
`#{cmd}`
end
end
BenchmarkDriver.parse_argv_and_run if $PROGRAM_NAME == __FILE__
|