Skip to content

Commit b32130f

Browse files
add tables, and fix bug in helper
1 parent 83779e3 commit b32130f

File tree

66 files changed

+1710
-45
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

66 files changed

+1710
-45
lines changed

.github/workflows/automated-experiment-result-checker.yml

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -23,44 +23,48 @@ jobs:
2323
ref: ${{ github.event.pull_request.head.sha }}
2424
fetch-depth: 0
2525

26-
- name: Check for updated experiment result graphs
26+
- name: Check for updated experiment result graphs and tables
2727
run: |
2828
set -e
2929
cd "$(git rev-parse --show-toplevel)"
3030
3131
# TODO: Include lower bound windup experiment once we have a way to make it run in a reasonable time.
32-
# Find all PNGs, excluding those with "windup" in their filename
32+
# Find all PNGs and TXT files, excluding those with "windup" in their filename
3333
mapfile -t all_pngs < <(find experiments/results/main_graphs experiments/results/throughput_graphs experiments/results/duration_graphs -type f -name '*.png' ! -name '*windup*.png' | sort)
34+
mapfile -t all_txts < <(find experiments/results/tables -type f -name '*.txt' ! -name '*windup*.txt' 2>/dev/null | sort)
3435
35-
# Find all changed PNGs in the latest commit
36-
mapfile -t changed_pngs < <(git diff --name-only --diff-filter=AM HEAD~1..HEAD | grep -E '^experiments/results/(main_graphs|throughput_graphs|duration_graphs)/.*\.png$' | grep -v windup | sort)
36+
# Combine all result files
37+
all_files=("${all_pngs[@]}" "${all_txts[@]}")
3738
38-
# Report any PNGs that are not updated in the latest commit
39+
# Find all changed PNGs and TXTs in the latest commit
40+
mapfile -t changed_files < <(git diff --name-only --diff-filter=AM HEAD~1..HEAD | grep -E '^experiments/results/(main_graphs|throughput_graphs|duration_graphs)/.*\.png$|^experiments/results/tables/.*\.txt$' | grep -v windup | sort)
41+
42+
# Report any files that are not updated in the latest commit
3943
declare -a not_updated=()
40-
for file in "${all_pngs[@]}"; do
41-
if ! printf "%s\n" "${changed_pngs[@]}" | grep -qx "$file"; then
44+
for file in "${all_files[@]}"; do
45+
if ! printf "%s\n" "${changed_files[@]}" | grep -qx "$file"; then
4246
not_updated+=("$file")
4347
fi
4448
done
4549
4650
if [ ${#not_updated[@]} -gt 0 ]; then
47-
echo "❌ The following result graph PNG files have NOT been updated in the latest commit:"
51+
echo "❌ The following result files have NOT been updated in the latest commit:"
4852
for f in "${not_updated[@]}"; do
4953
echo " - $f"
5054
done
5155
echo ""
52-
echo "Every commit must update all non-windup experiment result graphs. You may be missing updates."
56+
echo "Every commit must update all non-windup experiment result graphs and tables. You may be missing updates."
5357
echo "Run:"
5458
echo ""
5559
echo " cd experiments"
5660
echo " bundle install"
5761
echo " bundle exec ruby run_all_experiments.rb"
5862
echo ""
59-
echo "Commit the updated graphs to resolve this check."
63+
echo "Commit the updated graphs and tables to resolve this check."
6064
exit 1
6165
fi
6266
63-
echo "✅ All non-windup experiment result graphs are up to date for this commit!"
67+
echo "✅ All non-windup experiment result graphs and tables are up to date for this commit!"
6468
6569
6670

experiments/experiment_helpers.rb

Lines changed: 108 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,11 @@ def initialize(
4646
@main_results_path = File.join(File.dirname(__FILE__), "results/main_graphs")
4747
@duration_results_path = File.join(File.dirname(__FILE__), "results/duration_graphs")
4848
@throughput_results_path = File.join(File.dirname(__FILE__), "results/throughput_graphs")
49+
@table_results_path = File.join(File.dirname(__FILE__), "results/tables")
4950
FileUtils.mkdir_p(@main_results_path) unless File.directory?(@main_results_path)
5051
FileUtils.mkdir_p(@duration_results_path) unless File.directory?(@duration_results_path)
5152
FileUtils.mkdir_p(@throughput_results_path) unless File.directory?(@throughput_results_path)
53+
FileUtils.mkdir_p(@table_results_path) unless File.directory?(@table_results_path)
5254
@num_threads = num_threads
5355
@requests_per_second = requests_per_second
5456
@x_axis_label_interval = x_axis_label_interval || phase_duration
@@ -62,6 +64,8 @@ def initialize(
6264
base_error_rate
6365
end
6466
@with_max_threads = with_max_threads
67+
@table_filename = graph_filename ? graph_filename.sub(/\.png$/, ".txt") : "#{resource_name}.txt"
68+
@table_outputs = []
6569
end
6670

6771
def run
@@ -137,12 +141,14 @@ def start_threads
137141
end
138142

139143
def start_request_arrival
140-
@request_queue = Concurrent::Array.new
141-
Thread.new do
144+
@request_queue = Queue.new
145+
@arrival_thread = Thread.new do
142146
until @done
143147
sleep(1.0 / @requests_per_second)
144148
@request_queue << true
145149
end
150+
# Push sentinel values to wake up all worker threads on shutdown
151+
@num_threads.times { @request_queue << :shutdown }
146152
end
147153
end
148154

@@ -155,8 +161,11 @@ def start_processing_threads
155161
thread_id = Thread.current.object_id
156162
@thread_timings[thread_id] = { samples: [] }
157163

158-
until @done
159-
@request_queue.shift
164+
loop do
165+
# Block waiting for a request token
166+
token = @request_queue.pop
167+
break if token == :shutdown || @done
168+
160169
service = @services.sample
161170
# technically, we are creating a new resource instance on every request.
162171
# But the resource class is pretty much only a wrapper around things that are longer-living.
@@ -293,6 +302,7 @@ def execute_phases
293302
def wait_for_completion
294303
@done = true
295304
puts "\nWaiting for all request threads to finish..."
305+
@arrival_thread.join if @arrival_thread # Wait for arrival thread to push shutdown tokens
296306
@request_threads.each(&:join)
297307

298308
Semian.unsubscribe(@subscription) if @subscription
@@ -309,14 +319,33 @@ def wait_for_completion
309319
end
310320

311321
def generate_analysis
312-
puts "\n\n=== Experiment Complete ==="
313-
puts "Actual duration: #{(@end_time - @start_time).round(2)} seconds"
314-
puts "\nGenerating analysis..."
322+
output_lines = []
323+
output_lines << "\n\n=== Experiment Complete ==="
324+
output_lines << "Actual duration: #{(@end_time - @start_time).round(2)} seconds"
325+
output_lines << "\nGenerating analysis..."
326+
327+
# Print to console
328+
output_lines.each { |line| puts line }
329+
330+
# Capture experiment metadata for table output
331+
@table_outputs << "=" * 80
332+
@table_outputs << "EXPERIMENT: #{@experiment_name}"
333+
@table_outputs << "=" * 80
334+
@table_outputs << "Graph Title: #{@graph_title}"
335+
@table_outputs << "Adaptive Mode: #{@is_adaptive}"
336+
@table_outputs << "Actual Duration: #{(@end_time - @start_time).round(2)} seconds"
337+
@table_outputs << "Threads: #{@num_threads}"
338+
@table_outputs << "Requests per Second: #{@requests_per_second}"
339+
@table_outputs << "Service Count: #{@service_count}"
340+
@table_outputs << ""
315341

316342
display_summary_statistics
317343
display_time_based_analysis
318344
display_thread_timing_statistics
319345
display_pid_controller_state
346+
347+
# Save table outputs to file
348+
save_table_outputs
320349
end
321350

322351
def display_summary_statistics
@@ -325,18 +354,28 @@ def display_summary_statistics
325354
total_error = @outcomes.values.sum { |data| data[:error] }
326355
total_requests = total_success + total_circuit_open + total_error
327356

328-
puts "\n=== Summary Statistics ==="
329-
puts "Total Requests: #{total_requests}"
330-
puts " Successes: #{total_success} (#{(total_success.to_f / total_requests * 100).round(2)}%)"
331-
puts " Rejected: #{total_circuit_open} (#{(total_circuit_open.to_f / total_requests * 100).round(2)}%)"
332-
puts " Errors: #{total_error} (#{(total_error.to_f / total_requests * 100).round(2)}%)"
357+
output_lines = []
358+
output_lines << "\n=== Summary Statistics ==="
359+
output_lines << "Total Requests: #{total_requests}"
360+
output_lines << " Successes: #{total_success} (#{(total_success.to_f / total_requests * 100).round(2)}%)"
361+
output_lines << " Rejected: #{total_circuit_open} (#{(total_circuit_open.to_f / total_requests * 100).round(2)}%)"
362+
output_lines << " Errors: #{total_error} (#{(total_error.to_f / total_requests * 100).round(2)}%)"
363+
364+
# Print to console and save to table output
365+
output_lines.each do |line|
366+
puts line
367+
@table_outputs << line.sub(/^\n/, "") # Remove leading newline for table output
368+
end
369+
@table_outputs << ""
333370
end
334371

335372
def display_time_based_analysis
336373
bucket_size = @phase_duration
337374
num_buckets = (@experiment_duration / bucket_size.to_f).ceil
338375

339376
puts "\n=== Time-Based Analysis (#{bucket_size}-second buckets) ==="
377+
@table_outputs << "=== Time-Based Analysis (#{bucket_size}-second buckets) ==="
378+
340379
(0...num_buckets).each do |bucket_idx|
341380
bucket_start = @outcomes.keys[0] + (bucket_idx * bucket_size)
342381
bucket_data = @outcomes.select { |time, _| time >= bucket_start && time < bucket_start + bucket_size }
@@ -355,14 +394,18 @@ def display_time_based_analysis
355394
phase_error_rate = degradation_phase.error_rate || @base_error_rate
356395
phase_label = "[Target: #{(phase_error_rate * 100).round(1)}%]"
357396

358-
puts "#{status} #{bucket_time_range} #{phase_label}: #{bucket_total} requests | Success: #{bucket_success} | Errors: #{bucket_errors} (#{error_pct}%) | Rejected: #{bucket_circuit} (#{circuit_pct}%)"
397+
output_line = "#{status} #{bucket_time_range} #{phase_label}: #{bucket_total} requests | Success: #{bucket_success} | Errors: #{bucket_errors} (#{error_pct}%) | Rejected: #{bucket_circuit} (#{circuit_pct}%)"
398+
puts output_line
399+
@table_outputs << output_line
359400
end
401+
@table_outputs << ""
360402
end
361403

362404
def display_thread_timing_statistics
363405
return if @thread_timings.empty?
364406

365407
puts "\n=== Thread Timing Statistics ==="
408+
@table_outputs << "=== Thread Timing Statistics ==="
366409

367410
total_times = @thread_timings.values.map { |t| t[:samples].sum { |s| s[:duration] } }
368411
request_counts = @thread_timings.values.map { |t| t[:samples].size }
@@ -378,18 +421,25 @@ def display_thread_timing_statistics
378421
# Calculate utilization (time spent in requests vs wall clock time)
379422
avg_utilization = (avg_thread_time / total_wall_time * 100)
380423

381-
puts "Total threads: #{@thread_timings.size}"
382-
puts "Experiment wall clock duration: #{total_wall_time.round(2)}s"
383-
puts "\nTime spent making requests per thread:"
384-
puts " Min: #{min_thread_time.round(2)}s"
385-
puts " Max: #{max_thread_time.round(2)}s"
386-
puts " Average: #{avg_thread_time.round(2)}s"
387-
puts " Total (all threads): #{sum_thread_time.round(2)}s"
388-
puts "\nThread utilization:"
389-
puts " Average: #{avg_utilization.round(2)}% (time in requests / wall clock time)"
390-
puts "\nRequests per thread:"
391-
puts " Average: #{avg_requests.round(0)} requests"
392-
puts " Average time per request: #{(avg_thread_time / avg_requests).round(4)}s" if avg_requests > 0
424+
output_lines = []
425+
output_lines << "Total threads: #{@thread_timings.size}"
426+
output_lines << "Experiment wall clock duration: #{total_wall_time.round(2)}s"
427+
output_lines << "\nTime spent making requests per thread:"
428+
output_lines << " Min: #{min_thread_time.round(2)}s"
429+
output_lines << " Max: #{max_thread_time.round(2)}s"
430+
output_lines << " Average: #{avg_thread_time.round(2)}s"
431+
output_lines << " Total (all threads): #{sum_thread_time.round(2)}s"
432+
output_lines << "\nThread utilization:"
433+
output_lines << " Average: #{avg_utilization.round(2)}% (time in requests / wall clock time)"
434+
output_lines << "\nRequests per thread:"
435+
output_lines << " Average: #{avg_requests.round(0)} requests"
436+
output_lines << " Average time per request: #{(avg_thread_time / avg_requests).round(4)}s" if avg_requests > 0
437+
438+
output_lines.each do |line|
439+
puts line
440+
@table_outputs << line
441+
end
442+
@table_outputs << ""
393443
end
394444

395445
def display_pid_controller_state
@@ -430,17 +480,24 @@ def display_pid_controller_state
430480
end
431481

432482
puts "\n=== PID Controller State Per Second (Aggregated across threads) ==="
433-
puts format("%-8s %-10s %-20s %-15s %-20s %-20s %-20s %-15s", "Window", "# Threads", "Err % (min-max)", "Ideal Err %", "Reject % (min-max)", "Integral (min-max)", "Derivative (min-max)", "Total Req Time")
434-
puts "-" * 150
483+
@table_outputs << "=== PID Controller State Per Second (Aggregated across threads) ==="
484+
485+
header = format("%-8s %-10s %-22s %-15s %-22s %-25s %-25s %-15s", "Window", "# Threads", "Err % (min-max)", "Ideal Err %", "Reject % (min-max)", "Integral (min-max)", "Derivative (min-max)", "Total Req Time")
486+
separator = "-" * 150
487+
488+
puts header
489+
puts separator
490+
@table_outputs << header
491+
@table_outputs << separator
435492

436493
aggregated_snapshots.each_with_index do |snapshot, idx|
437494
error_rate_str = format_metric_range(snapshot[:error_rate_avg], snapshot[:error_rate_min], snapshot[:error_rate_max], is_percent: true)
438495
reject_rate_str = format_metric_range(snapshot[:rejection_rate_avg], snapshot[:rejection_rate_min], snapshot[:rejection_rate_max], is_percent: true)
439496
integral_str = format_metric_range(snapshot[:integral_avg], snapshot[:integral_min], snapshot[:integral_max])
440497
derivative_str = format_metric_range(snapshot[:derivative_avg], snapshot[:derivative_min], snapshot[:derivative_max])
441498

442-
puts format(
443-
"%-8d %-10d %-20s %-15s %-20s %-20s %-20s %-15s",
499+
row = format(
500+
"%-8d %-10d %-22s %-15s %-22s %-25s %-25s %-15s",
444501
idx + 1,
445502
snapshot[:thread_count],
446503
error_rate_str,
@@ -450,13 +507,22 @@ def display_pid_controller_state
450507
derivative_str,
451508
"#{(snapshot[:total_request_time] || 0).round(2)}s",
452509
)
510+
puts row
511+
@table_outputs << row
453512
end
454513

455-
puts "\n📊 Key Observations:"
456-
puts " - Timestamps captured: #{aggregated_snapshots.length}"
457-
puts " - Max avg rejection rate: #{(aggregated_snapshots.map { |s| s[:rejection_rate_avg] }.max * 100).round(2)}%"
458-
puts " - Avg integral range: #{aggregated_snapshots.map { |s| s[:integral_avg] }.min.round(4)} to #{aggregated_snapshots.map { |s| s[:integral_avg] }.max.round(4)}"
459-
puts " - Thread counts per timestamp: min #{aggregated_snapshots.map { |s| s[:thread_count] }.min}, max #{aggregated_snapshots.map { |s| s[:thread_count] }.max}"
514+
observations = []
515+
observations << "\n📊 Key Observations:"
516+
observations << " - Timestamps captured: #{aggregated_snapshots.length}"
517+
observations << " - Max avg rejection rate: #{(aggregated_snapshots.map { |s| s[:rejection_rate_avg] }.max * 100).round(2)}%"
518+
observations << " - Avg integral range: #{aggregated_snapshots.map { |s| s[:integral_avg] }.min.round(4)} to #{aggregated_snapshots.map { |s| s[:integral_avg] }.max.round(4)}"
519+
observations << " - Thread counts per timestamp: min #{aggregated_snapshots.map { |s| s[:thread_count] }.min}, max #{aggregated_snapshots.map { |s| s[:thread_count] }.max}"
520+
521+
observations.each do |line|
522+
puts line
523+
@table_outputs << line
524+
end
525+
@table_outputs << ""
460526
end
461527

462528
def format_metric_range(avg, min, max, is_percent: false)
@@ -594,6 +660,14 @@ def add_state_transition_markers(graph, bucketed_data, bucket_size, num_buckets)
594660
}
595661
end
596662
end
663+
664+
def save_table_outputs
665+
table_path = File.join(@table_results_path, @table_filename)
666+
File.open(table_path, "w") do |file|
667+
@table_outputs.each { |line| file.puts(line) }
668+
end
669+
puts "\nTable outputs saved to #{table_path}"
670+
end
597671
end
598672
end
599673
end
-178 KB
Loading
714 Bytes
Loading
108 KB
Loading
-14.6 KB
Loading
-39.8 KB
Loading
1.13 KB
Loading
-124 KB
Loading
1.63 KB
Loading

0 commit comments

Comments
 (0)