Skip to content

Instantly share code, notes, and snippets.

@kana
Last active April 30, 2022 09:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save kana/1e96a2300f8479f67d1f208a57f2b9e7 to your computer and use it in GitHub Desktop.
Save kana/1e96a2300f8479f67d1f208a57f2b9e7 to your computer and use it in GitHub Desktop.
#!/usr/bin/env ruby
require 'date'
require 'json'
# cstimer data format:
#
# {
# "session1": [Solve, ...],
# "session2": [Solve, ...],
# ...
# "session{N}": [Solve, ...],
# "properties": {
# "sessionData": {
# "1": SessionMeta,
# "2": SessionMeta,
# ...
# "{N}": SessionMeta
# },
# "sessionN": {N}, # Number of sessions. (*A)
# "session": 3, # Rank of the current session. (*A)
# <global options...>
# }
# }
#
# (*A) Must be recalculated after merging sessions.
#
# SessionMeta = {
# "name": "2.23 444wca", # session name
# "opt": {
# "scrType": "444wca", # scrType does not exist for WCA 3×3×3.
# <per-session options...>
# },
# "rank": 1, # Display order. Might be different from data order.
# # (For example, rank of session3 might be 5.)
# "date": [
# 1637158953, # When the first solve was started. (*B) (*C)
# 1640939074 # When the last solve was started. (*B) (*C)
# ],
# "stat": [
# 2457, # Number of solves. (*B)
# 36, # Number of DNF. (*B)
# 37128 # Mean of all solves, in milliseconds. (*B) (*D)
# ]
# }
#
# (*B) Must be recalculated after merging/tweaking solves.
# (*C) Null if a session has no solve.
# (*D) -1 if a session has no solve.
#
# Solve = [
# [
# -1, # Flag. 0 = Ok, 2000 = +2, -1 = DNF.
# 66914 # Time in milliseconds.
# ], # ... so that this solve is DNF (1:06.914).
# "B2 L2 U' ...", # Scramble
# "", # Comment. Empty string in most cases.
# 1637158953 # When this solve (not inspection) was started. Unix time.
# ]
DNF = Float::INFINITY
class Array
# Solve
def xtime
self[0] # [flag, time]
end
def scramble
self[1]
end
def comment
self[2]
end
def started_at
self[3]
end
# xtime
def flag
self[0]
end
def time
self[0]
end
end
def is_dnf(solve)
solve.xtime.flag == -1
end
def normalize_time_to_int(solve)
flag, time = solve.xtime
if flag == -1
DNF
else
time + flag
end
end
def normalize_time_to_float(solve)
int_time = normalize_time_to_int(solve)
return DNF if int_time == DNF
int_time / 1000.0
end
def format_int_time(int_time)
sprintf('%.3f', int_time.to_f / 1000)
end
def ao(solves, n)
if solves.count < n
return DNF
end
best = DNF
cut = (n * 0.05).ceil
(0..(solves.count - n)).each do |i|
target_times = solves.slice(i, n)
.map { |solve| normalize_time_to_int(solve) }
.sort
.slice(cut, n - cut * 2)
average = (target_times.sum.to_f / (n - cut * 2))
average = average.round unless average == DNF
best = average if average < best
end
best
end
def single(solves)
solves.map { |solve| normalize_time_to_int(solve) }.min
end
def session_mean(solves)
valid_solves = solves.filter { |solve| !is_dnf(solve) }
return -1 if valid_solves.count == 0
sum = valid_solves.map { |solve| normalize_time_to_int(solve) }.sum
(sum.to_f / valid_solves.count).round
end
def count_within(solves, range)
solves.map { |solve| normalize_time_to_float(solve) }
.filter { |time| range.include?(time) }
.count
end
def summarize_per_session(data)
puts %w[
Session
single
ao5
ao12
ao25
ao50
ao100
Note
].join("\t")
session_metas = JSON.load(data['properties']['sessionData'])
session_metas.keys.sort_by { |key| session_metas[key]['rank'] }.each do |key|
solves = data["session#{key}"]
name = session_metas[key]['name']
date, _, note = name.split(' ', 3)
puts [
date
].concat([
single(solves),
ao(solves, 5),
ao(solves, 12),
ao(solves, 25),
ao(solves, 50),
ao(solves, 100)
].map { |time|
if time == DNF
'DNF'
else
format_int_time(time.to_f)
end
}).concat([
'',
note
]).join("\t")
end
end
def is_valid_333_session?(session_meta)
name = session_meta['name']
!session_meta['opt'].has_key?('scrType') &&
!name.match?(/\btry\b/) &&
!name.match?(/\bImpossible\b/) &&
!name.match?(/\bOH\b/)
end
def summarize_333_sessions_by_solve_date(data)
session_meta_map = JSON.load(data['properties']['sessionData'])
solves = session_meta_map.filter { |_, session_meta|
is_valid_333_session?(session_meta)
}.flat_map { |key, session_meta|
p [key, session_meta] if data["session#{key}"] == nil
data["session#{key}"]
}
puts %w[
Date
single
ao5
ao12
ao25
ao50
ao100
total
sub-10
10+
12+
14+
16+
18+
20+
22+
24+
26+
28+
30+
32+
34+
36+
38+
40+
].join("\t")
solves
.sort_by { |solve| solve.started_at }
.group_by { |solve| Time.at(solve.started_at).strftime('%Y-%m-%d') }
.each do |date, solves|
next if solves.count < 50
puts [
date
].concat([
single(solves),
ao(solves, 5),
ao(solves, 12),
ao(solves, 25),
ao(solves, 50),
ao(solves, 100),
].map { |time|
if time == DNF
''
else
format_int_time(time)
end
}).concat([
solves.count,
count_within(solves, (0...10)),
count_within(solves, (10...12)),
count_within(solves, (12...14)),
count_within(solves, (14...16)),
count_within(solves, (16...18)),
count_within(solves, (18...20)),
count_within(solves, (20...22)),
count_within(solves, (22...24)),
count_within(solves, (24...26)),
count_within(solves, (26...28)),
count_within(solves, (28...30)),
count_within(solves, (30...32)),
count_within(solves, (32...34)),
count_within(solves, (34...36)),
count_within(solves, (36...38)),
count_within(solves, (38...40)),
count_within(solves, (40..)),
]).join("\t")
end
end
def summarize_333_pb_progress_by_solve_date(data)
session_meta_map = JSON.load(data['properties']['sessionData'])
solves = session_meta_map.filter { |_, session_meta|
is_valid_333_session?(session_meta)
}.flat_map { |key, session_meta|
p [key, session_meta] if data["session#{key}"] == nil
data["session#{key}"]
}
puts %w[
Date
single
ao5
ao12
ao25
ao50
ao100
].join("\t")
best_times_per_day = solves
.sort_by { |solve| solve.started_at }
.group_by { |solve| Time.at(solve.started_at).strftime('%Y-%m-%d') }
.filter { |date, solves| solves.count >= 50 }
.to_h { |date, solves|
[
date,
{
single: single(solves),
ao5: ao(solves, 5),
ao12: ao(solves, 12),
ao25: ao(solves, 25),
ao50: ao(solves, 50),
ao100: ao(solves, 100),
}
]
}
dates = if false
# Print PB only for actual session dates.
dates = best_times_per_day.keys
else
# Always print PB even for dates without solves.
first_date = Date.parse(best_times_per_day.keys.first)
last_date = Date.parse(best_times_per_day.keys.last)
(0..).lazy
.map { |i| first_date + i }
.take_while { |d| d <= last_date }
.map { |d| d.iso8601 }
end
categories = %i[single ao5 ao12 ao25 ao50 ao100]
current_pb_hash = categories
.to_h { |category|
[
category,
# best_times_per_day.map { |x| x[category] }.filter { |time| time != DNF }.first
40000 # Fallback PB time for better rendering.
]
}
dates.each do |date|
x = best_times_per_day[date]
puts [
if true
# Alway print date
date
else
# Print only the first date of each month.
if date.end_with?('-01')
date
else
''
end
end
].concat(
categories
.map { |category|
if false
# Print only updated PB.
if x && x[category] < current_pb_hash[category]
current_pb_hash[category] = x[category]
format_int_time(x[category])
else
''
end
else
# Always print current PB.
if x && x[category] < current_pb_hash[category]
current_pb_hash[category] = x[category]
end
format_int_time(current_pb_hash[category])
end
}
).join("\t")
end
end
def pretty_time(epoch)
Time.at(epoch).strftime('%Y-%m-%d %H:%M')
end
def inspect_session_period(data)
session_meta_map = JSON.load(data['properties']['sessionData'])
session_meta_map
.sort_by { |_, session_meta| session_meta['rank'] }
.each do |key, session_meta|
solves = data["session#{key}"].sort_by { |solve| solve.started_at }
if true
printf(
"%-28s meta[%s/%s] solves[%s/%s]\n",
session_meta['name'],
pretty_time(session_meta['date'][0]),
pretty_time(session_meta['date'][1]),
pretty_time(solves.first.started_at),
pretty_time(solves.last.started_at)
)
else
printf(
"%-28s rank[%2d] key[%2d] meta[%s/%s] solves[%s/%s]\n",
session_meta['name'],
session_meta['rank'],
key,
pretty_time(session_meta['date'][0]),
pretty_time(session_meta['date'][1]),
pretty_time(solves.first.started_at),
pretty_time(solves.last.started_at)
)
end
end
end
TWEAK_TARGET_SESSION_NAMES = [
'3.12 333 rs3m ml t1',
]
TWEAK_DELTA = 1 * 60 * 60
def tweak_sessions_to_match_logical_date(data)
session_meta_map = JSON.load(data['properties']['sessionData'])
tweaked_pairs = session_meta_map
.sort_by { |_, session_meta| session_meta['rank'] }
.map { |key, session_meta|
solves = data["session#{key}"].sort_by { |solve| solve.started_at }
if !TWEAK_TARGET_SESSION_NAMES.include?(session_meta['name'])
[session_meta, solves]
else
tweaked_solves = solves.map { |xtime, scramble, comment, started_at|
[xtime, scramble, comment, started_at - TWEAK_DELTA]
}
[
session_meta.merge({
'date': [
tweaked_solves.first.started_at,
tweaked_solves.last.started_at
]
# No need to recalculate 'stat' in this case.
}),
tweaked_solves
]
end
}
tweaked_data = {}
tweaked_pairs.each do |session_meta, solves|
tweaked_data["session#{session_meta['rank']}"] = solves
end
tweaked_data['properties'] = data['properties'].merge({
'sessionData': JSON.dump(
tweaked_pairs
.map { |session_meta, _| [session_meta['rank'], session_meta] }
.to_h
)
})
print JSON.dump(tweaked_data)
end
def is_333_session_to_be_merged(session_meta, now)
return false if session_meta['opt'].has_key?('scrType')
session_started_at_epoch = session_meta['date'][0]
return false if session_started_at_epoch == nil
session_started_at = Time.at(session_started_at_epoch)
session_started_at.year < now.year ||
(session_started_at.year == now.year && session_started_at.month < now.month)
end
def extract_cube_info(comment)
# '333 2021' => ''
# '2.28 333 {cube}' => '{cube}'
tokens = comment.strip.split(/ +/, 3)
tokens[2] || ''
end
def merge_333_sessions_by_solved_year(data)
session_meta_map = JSON.load(data['properties']['sessionData'])
now = Time.new
untouched_sorted_session_meta_map = session_meta_map
.filter { |_, session_meta| !is_333_session_to_be_merged(session_meta, now) }
.sort_by { |_, session_meta| session_meta['rank'] }
tweaked_solve_groups = session_meta_map
.filter { |_, session_meta| is_333_session_to_be_merged(session_meta, now) }
.flat_map { |key, session_meta|
cube_info = extract_cube_info(session_meta['name'])
solves = data["session#{key}"]
solves.map { |xtime, scramble, comment, started_at|
[
xtime,
scramble,
if comment != '' then comment else cube_info end,
started_at
]
}
}
.sort_by { |solve| solve.started_at }
.group_by { |solve| Time.at(solve.started_at).year }
tweaked_session_data = {}
tweaked_solve_groups.each_with_index do |g, index|
year, solves = g
tweaked_session_data["#{index + 1}"] = {
'name' => "333 #{year}",
"opt" => {},
"rank" => index + 1,
"stat" => [
solves.count,
solves.filter { |solve| is_dnf(solve) }.count,
session_mean(solves)
],
"date" => [
solves.first.started_at,
solves.last.started_at
]
}
end
untouched_sorted_session_meta_map
.each_with_index do |kv, index|
key, session_meta = kv
new_rank = index + 1 + tweaked_solve_groups.count
tweaked_session_data["#{new_rank}"] = {
'name' => session_meta['name'],
"opt" => session_meta['opt'],
"rank" => new_rank,
"stat" => session_meta['stat'],
"date" => session_meta['date']
}
end
tweaked_data = {}
tweaked_solve_groups.each_with_index do |g, index|
year, solves = g
tweaked_data["session#{index + 1}"] = solves
end
untouched_sorted_session_meta_map
.each_with_index do |kv, index|
key, session_meta = kv
new_rank = index + 1 + tweaked_solve_groups.count
tweaked_data["session#{new_rank}"] = data["session#{key}"]
end
tweaked_data['properties'] = data['properties'].merge({
'sessionData' => JSON.dump(tweaked_session_data),
'sessionN' => tweaked_session_data.count,
'session' => tweaked_session_data.count
})
print JSON.dump(tweaked_data)
end
def main()
if ARGV.count == 0
fail 'Specify mode'
end
mode = ARGV.shift
data = JSON.load(ARGF.read)
if mode == '333-sessions'
summarize_333_sessions_by_solve_date(data)
elsif mode == '333-pb'
summarize_333_pb_progress_by_solve_date(data)
elsif mode == 'merge-333-sessions'
merge_333_sessions_by_solved_year(data)
elsif mode == 'per-session'
summarize_per_session(data)
elsif mode == 'debug'
inspect_session_period(data)
elsif mode == 'tweak'
tweak_sessions_to_match_logical_date(data)
else
fail 'Invalid mode'
end
end
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment