Skip to content

Instantly share code, notes, and snippets.

@markjenkins
Last active December 26, 2022 16:45
Show Gist options
  • Save markjenkins/89208994fdc78b102d8daae7703c5223 to your computer and use it in GitHub Desktop.
Save markjenkins/89208994fdc78b102d8daae7703c5223 to your computer and use it in GitHub Desktop.
Advent of Code 2022 https://adventofcode.com/2022/
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/1 Parts 1 and 2 with constant memory
# Mark Jenkins
from heapq import nlargest
def gen_elf_lines():
"""a generator of consecutive elf lines, converted to int
call multiple times to get different elves, won't yield at all
(StopIteration exception) when there are no more.
Elves line items with value zero are included, so don't assume
sum(gen_elf_lines())==0 means end of file
"""
while True:
try:
yield int(input())
# catching ValueError covers the blank lines, EOFError covers
# when we reach end of file
except (ValueError, EOFError):
break # while True
def gen_elf_sums():
"""A generator of elf sums.
Uses constant memory, one elf with many items is fine
"""
while True: # StopIteration exception will stop this loop
# we test for more input by starting with one line
# StopIteration will be raised if we're all out
#
# this apprach supports 0 as an allowable input
# otherwise we would just sum(gen_elf_lines()) and stop when 0 hit
elf_line_gen = iter(gen_elf_lines())
try:
first_elf_line = next(elf_line_gen) # raises StopIteration
except StopIteration:
break
yield sum(elf_line_gen, first_elf_line)
def part_1():
return max(gen_elf_sums())
# Part 2 code starts here
def n_largest_elves(n):
"""Find the n largest elves
Uses constant memory by using a heap priority queue of size n"""
return nlargest(n, gen_elf_sums())
def sum_n_largest_elves(n):
"""Find the sum of the n largest elves.
Uses constant memory regardless of how many elves there are and
how many items they have"""
return sum(n_largest_elves(n))
def part_2():
return sum_n_largest_elves(3)
def print_part_1_and_2_combined():
top_3_elves = n_largest_elves(3)
# use max for largest elf (part 1) because top_3_elves is not sorted
# but has heap invariant
print( "largest elf for part one: %d" % max(top_3_elves) )
print( "top three elves sum for part two: %d" % sum(top_3_elves) )
if __name__ == "__main__":
#print(part_1())
#print(part_2())
# comment this line and uncomment one of the above if you just
# want part 1 or 2
print_part_1_and_2_combined()
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/1 part one
def gen_elf_lines():
while True:
try:
yield int(input())
except (ValueError, EOFError):
break
def gen_elf_sums():
while True:
elf_line_gen = iter(gen_elf_lines())
try:
first_elf_line = next(elf_line_gen)
except StopIteration:
break
yield sum(elf_line_gen, first_elf_line)
if __name__ == "__main__":
print(max(gen_elf_sums()))
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/1 part one
(use-modules (ice-9 rdelim) (srfi srfi-1))
;;; we read all lines backwards because we can use
;;; this bare bones definition of unfold-right
;;; (simplified version of the one in the guile manual)
;;;(define (unfold-right p f g seed)
;;; (let lp ((seed seed) (lis '() ))
;;; (if (p seed) lis
;;; (lp (g seed)
;;; (cons (f seed) lis)))))
;;;
;;; The order of the lines in this problem isn't relevant
(define (read_all_lines_backwards)
(unfold-right eof-object? identity (lambda (x) (read-line)) (read-line)) )
(define (not_empty_line_or_end_of_list x)
(not (or (equal? "" x) (null? x)) ))
(define (sum_until_blankline_or_end_of_list l)
(reduce + 0 (map string->number
(take-while not_empty_line_or_end_of_list l))))
(define (skip_until_after_blankline_or_end_of_list l)
(let ((l_after_skip (drop-while not_empty_line_or_end_of_list l)))
(if (null? l_after_skip)
l_after_skip
(cdr l_after_skip) )))
(define (sum_elfs elf_lines)
;; again, unfold-right, no need for the greater sophistication of fold
;; or reversing the list from unfold-right
(unfold-right null? ; predicate p
sum_until_blankline_or_end_of_list ; next list item f
skip_until_after_blankline_or_end_of_list ; seed updater g
elf_lines )) ; seed
(define elf_sums (sum_elfs (read_all_lines_backwards)) )
(write-line (reduce max "no elves found" elf_sums) )
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/1 part two
from aoc2022_day01_p1 import gen_elf_sums
from heapq import nlargest
print( sum(nlargest(3, gen_elf_sums())) )
#!/usr/bin/env guile
!#
(use-modules (srfi srfi-1))
;;; https://adventofcode.com/2022/day/1 part two
(include "aoc2022_d01p1.scm")
(define (merge_item_into_list_and_drop_lowest i l)
(cdr (merge l (list i) <)))
(define (nlargest n l)
(fold merge_item_into_list_and_drop_lowest
(sort (take l n) <) ; initial list, first n items sorted
(drop l n) ) ) ; remaining items after n
(write-line (reduce + "no elves found" (nlargest 3 elf_sums) ) )
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/2 part one
from sys import stdin
ROCK, PAPER, SCIS = (1, 2, 3)
OP_TABLE = {"A": ROCK,
"B": PAPER,
"C": SCIS,
}
REPLY_TABLE = {"X": ROCK,
"Y": PAPER,
"Z": SCIS,
}
LOSS, DRAW, WIN = (0, 3, 6)
def decode_strat_line(line, left_table=OP_TABLE, right_table=REPLY_TABLE):
op, reply = line.strip().split(" ")
return (left_table[op], right_table[reply])
def gen_input_pairs(left_table=OP_TABLE, right_table=REPLY_TABLE):
for line in stdin:
yield decode_strat_line(
line, left_table=left_table, right_table=right_table)
def is_draw(op, reply):
return op==reply
def is_win(op, reply):
return ( (op==SCIS and reply==ROCK) or
(not (op==ROCK and reply==SCIS) and op<reply ))
def score_outcome(op, reply):
return ( DRAW if is_draw(op, reply)
else (WIN if is_win(op, reply) else LOSS) )
def score_round(op,reply):
return score_outcome(op,reply) + reply
if __name__ == "__main__":
print( sum( score_round(op, reply)
for op, reply in gen_input_pairs() ) )
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/2 part one
;;; based on this solution:
;;; https://dev.to/nickymeuleman/advent-of-code-2022-day-2-3b18
(use-modules (ice-9 rdelim) (srfi srfi-1))
;;; we read all lines backwards because we can use
;;; this bare bones definition of unfold-right
;;; (simplified version of the one in the guile manual)
;;;(define (unfold-right p f g seed)
;;; (let lp ((seed seed) (lis '() ))
;;; (if (p seed) lis
;;; (lp (g seed)
;;; (cons (f seed) lis)))))
;;;
;;; The order of the lines in this problem isn't relevant
(define (read_all_lines_backwards)
(unfold-right eof-object? identity (lambda (x) (read-line)) (read-line)) )
(define (convert_line_to_int_pair line)
(let ((line_parts (string->list line)))
(cons (- (char->integer (car line_parts) ) (char->integer #\A))
(- (char->integer (caddr line_parts) ) (char->integer #\X)) )))
(define (lines_as_int_pairs l)
(map convert_line_to_int_pair l) )
(define (outcome opponent my_reply)
(modulo (+ my_reply (- opponent) 1) 3) )
(define (outcome_score opponent my_reply)
(* 3 (outcome opponent my_reply)))
(define (score_outcome_and_my_play opponent my_reply)
(+ (+ 1 my_reply) (outcome_score opponent my_reply) ))
(define input_lines
(lines_as_int_pairs (read_all_lines_backwards) ))
(define (perform_scoring_with f input_line_pairs)
(map (lambda (x) (f (car x) (cdr x)))
input_line_pairs) )
(write-line (reduce
+
"no lines"
(perform_scoring_with score_outcome_and_my_play input_lines) ))
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/2 part two
#from sys import stdin
from aoc2022_d02p1 import \
LOSS, DRAW, WIN, \
ROCK, PAPER, SCIS, \
gen_input_pairs, score_outcome, score_round
RPS = (ROCK, PAPER, SCIS)
LDW = (LOSS, DRAW, WIN)
OUTCOME_TABLE = {"X": LOSS,
"Y": DRAW,
"Z": WIN}
def find_reply_for_outcome(op, outcome):
for reply in RPS:
if outcome == score_outcome(op, reply):
return reply
# find_reply_for_outcome(op, outcome) is fast enough, but we build
# a cache of nsted lists WINNING_REPLY_CACHE[op][outcome]
# to show how such a thing can be done
#
# because ROCK, PAPER, SCIS are 1-3, we put None in the 0th entry
WINNING_REPLY_CACHE = [None]
# then we add entries 1-3
WINNING_REPLY_CACHE.extend( [
# our inner list is indexed with all the outcomes
# because these are 0, 3, and 6 we put None in the other spots
[(None if outcome not in LDW # not 0, 3, or 6
else find_reply_for_outcome(op, outcome) )
for outcome in range(max(LDW)+1)]
for op in RPS ])
# WINNING_REPLY_CACHE[1] is [3, None, None 1, None, None 2] for example
print( sum( score_round(op, WINNING_REPLY_CACHE[op][outcome] )
for op, outcome in gen_input_pairs(right_table=OUTCOME_TABLE) ) )
# without the cache is similar
#print( sum( score_round(op, find_reply_for_outcome(op,outcome) )
# for op, outcome in gen_input_pairs(right_table=OUTCOME_TABLE) ) )
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/2 part two
;;; based on this solution:
;;; https://dev.to/nickymeuleman/advent-of-code-2022-day-2-3b18
(include "aoc2022_d02p1.scm")
(define (compute_my_play_and_score_outcome opponent outcome)
(score_outcome_and_my_play
opponent
(modulo (+ opponent -1 outcome) 3) ))
(write-line (reduce
+
"no lines"
(perform_scoring_with
compute_my_play_and_score_outcome
input_lines) ))
#!/usr/bin/env python3
from sys import stdin
def problem_input():
for line in stdin:
line = line.strip()
p = len(line)//2 # partition_index
first_h, second_h = set(line[0:p]), set(line[p:])
r = first_h.intersection(second_h)
assert(len(r)==1) # saved me during my "speed"-run
yield r.pop()
def prio(common_item):
return ord(common_item) + (
1-ord("a") if common_item.islower() else 27-ord("A") )
if __name__ == "__main__":
print( sum( prio(i) for i in problem_input() ) )
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/3 part one
(use-modules (ice-9 rdelim) (srfi srfi-1))
(define (read_all_lines_backwards)
(unfold-right eof-object? identity (lambda (x) (read-line)) (read-line)) )
;;; TODO, re-implement without (define-values) (split-at) and
;;; multiple return values concept
(define (split_at_to_pair l n)
(define-values (x y) (split-at l n))
(cons x y) )
(define (split_line_list_in_half line_as_list)
(split_at_to_pair line_as_list (/ (length line_as_list) 2)))
(define (lines_to_lists lines)
(map string->list lines) )
(define (partition_all_lines lines)
(map split_line_list_in_half (lines_to_lists lines)) )
(define (find_unique_char_to_pair p)
(let ((p_intersect (char-set-intersection (list->char-set (car p))
(list->char-set (cdr p)) )) )
(char-set-ref p_intersect (char-set-cursor p_intersect) )))
(define (find_chars_uniq_to_partition partitioned_lines)
(map find_unique_char_to_pair partitioned_lines) )
;;; after subtracting ascii codes for a or A we scale to 0
(define LOWER_CASE_FACTOR (- (char->integer #\a) 1) ) ; 0-25 -> 1-26
(define UPPER_CASE_FACTOR (- (char->integer #\A) 27) ) ; 0-25-> 27-52
(define (priority c)
(- (char->integer c)
(if (char-lower-case? c) LOWER_CASE_FACTOR UPPER_CASE_FACTOR)))
(define input_lines (read_all_lines_backwards) )
(write-line
(reduce + "no lines found"
(map priority
(find_chars_uniq_to_partition
(partition_all_lines input_lines) ) )) )
#!/usr/bin/env python3
from aoc2022_d03p1 import prio
from functools import reduce
from operator import and_ as set_intersection
from sys import stdin
def problem_input():
input_sets_iter = iter(set(line.strip()) for line in stdin)
# instead of line at a time, triplet at a time
return zip(input_sets_iter, input_sets_iter, input_sets_iter)
# original version
#def problem_input():
# while True:
# try:
# yield [set(input()) for i in range(3)]
# except EOFError:
# break # end while True
def find_common_item_in_sets(l):
return reduce(set_intersection, l).pop()
print( sum(
prio(find_common_item_in_sets(l)) for l in problem_input() ) )
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/3 part two
(include "aoc2022_d03p1.scm")
(define (combine_line_triplet t)
(let ((cs (reduce char-set-intersection #\nul
(map string->char-set t) )))
(char-set-ref cs (char-set-cursor cs) )))
(define (collapse_line_list_by_threes lines)
(unfold-right null? ; p predicate
;; f match each seed to list element
(lambda (x)
(combine_line_triplet (take x 3) ) )
(lambda (x) (drop x 3)) ; g map each seed to next seed
lines ) ) ; initial seed
(write-line
(reduce + "no lines found"
(map priority (collapse_line_list_by_threes input_lines ) )))
#!/usr/bin/env python3
# https://adventofcode.com/day/4 part 1
from sys import stdin
def problem_input():
return (
# each line of input results in a pair of pairs (nested lists)
# achieved below with nested list comprehensions
[ [int(id_num) for id_num in elf_range.split("-")]
for elf_range in line.strip().split(",") ]
for line in stdin )
def prepend_length_to_pair(id_pair):
return (id_pair[1] - id_pair[0], id_pair[0], id_pair[1])
def length_tagged_id_ranges(problem_input):
return (
sorted( (prepend_length_to_pair(line_pair[0]),
prepend_length_to_pair(line_pair[1])) )
for line_pair in problem_input )
def gen_one_for_fully_overlapping_pairs(length_tagged_pairs):
return (
1
for ( (e1_len, e1_start, e1_end),
(e2_len, e2_start, e2_end) ) in length_tagged_pairs
if e1_start >= e2_start and e1_end <= e2_end )
if __name__ == "__main__":
print( sum( gen_one_for_fully_overlapping_pairs(
length_tagged_id_ranges(problem_input()) ) ) )
#!/usr/bin/env python3
# https://adventofcode.com/day/4 part 2
from aoc2022_d04p1 import problem_input
def gen_one_for_overlapping_pairs(pairs):
return (
1
for ( (e1_start, e1_end),
(e2_start, e2_end) ) in pairs
if ( (e2_start <= e1_start <= e2_end) or
(e1_start <= e2_start <= e1_end) ) )
if __name__ == "__main__":
print( sum( gen_one_for_overlapping_pairs(problem_input()) ) )
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/5 part one
(use-modules (ice-9 rdelim) (srfi srfi-1))
(define (read_all_lines_backwards)
(unfold-right eof-object? identity (lambda (x) (read-line)) (read-line)) )
;;; TODO, re-implement without (define-values) (span) and
;;; multiple return values concept
(define (span_to_pair pred l)
(define-values (x y) (span pred l))
(cons x y) )
;;; TODO, re-implement without (define-values) (split-at) and
;;; multiple return values concept
(define (split-at-to-pair l n)
(define-values (x y) (split-at l n))
(cons x y) )
(define (convert_move_param_and_subtract move subval)
(- (string->number move) subval))
;;; parse a move line into a triplet (cons magnitude (cons start end))
;;; with each of the numbers converted and 1 subtracted from the indicies
(define (parse_move_line line)
(let ((split_move_line (string-split line #\space)))
(cons (string->number (second split_move_line))
(map (lambda (x) (- (string->number x) 1))
(list (fourth split_move_line)
(sixth split_move_line) )))))
(define move_t_magnitude first)
(define move_t_start second)
(define move_t_end third)
(define (parse_state_line state_line)
(unfold null? ; p
second ; f, function to get list value(aka cadr)
(lambda (x) (drop x 4)) ; g to get next
;; after converting our line to a list of char
;; we add a space so as to be able to iterate 4 chars at a time
(append (string->list state_line) (list #\space) )))
(define (n_empty_lists n)
;; unfold-right used on the assumption unfold-right is more performant
;; on bad implementations where unfold is defined as a reverse of unfold
(unfold-right (lambda (i) (= i n)) ; p
(lambda (i) '() ) ; f, function to create value
(lambda (i) (+ i 1) ); g function taking us to the next step
0 )); seed
(define (reconstruct_initial_stacks state_line stacks)
(unfold (lambda (x) (null? (car x))) ; p
;; function f
;; extend an individual stack with the relevant line component
;; as long as it is not a space
(lambda (x)
(let* ((line_prog (car x))
(line_prog_c (car line_prog))
(old_stacks (cdr x)) )
(if (equal? line_prog_c #\space)
(car old_stacks)
(cons line_prog_c
(car old_stacks)) )))
;; function g
;; advance our unfold state pair to have the next line component
;; and the next piece of old stack
(lambda (x)
(let ((line_prog (car x))
(old_stacks (cdr x)) )
(cons (cdr line_prog)
(cdr old_stacks) ))) ; g
;; seed pair
(cons state_line ; line_prog
stacks) )) ; old_stacks
(define (construct_initial_stacks state_lines_backwards num_col)
(fold reconstruct_initial_stacks ; proc
(n_empty_lists num_col) ; init
state_lines_backwards )) ; lst1
(define stack_adding_function append-reverse)
(define (simulate_drop_on stacks target items)
(let* ((pair_before_push (split-at-to-pair stacks target))
(first_half_of_stacks (car pair_before_push))
(second_half_of_stacks (cdr pair_before_push))
(selected_stack (car second_half_of_stacks))
(rest_of_second_half_of_stacks (cdr second_half_of_stacks)) )
(append first_half_of_stacks
(list (stack_adding_function items selected_stack))
rest_of_second_half_of_stacks) ))
(define (simulate_pop_off stacks magnitude start end)
(let* ((pair_before_pop (split-at-to-pair stacks start))
(first_half_of_stacks (car pair_before_pop))
(second_half_of_stacks (cdr pair_before_pop))
(selected_stack (car second_half_of_stacks))
(rest_of_second_half_of_stacks (cdr second_half_of_stacks))
(items_from_selected_stack (take selected_stack magnitude))
(remainder_of_selected_stack (drop selected_stack magnitude)) )
(list
; stacks
(append first_half_of_stacks
(list remainder_of_selected_stack)
rest_of_second_half_of_stacks)
end
items_from_selected_stack) ))
(define (simulate_move stacks magnitude start end)
(apply simulate_drop_on (simulate_pop_off stacks magnitude start end) ))
(define input_pair (span_to_pair (lambda (x) (string-prefix? "move" x))
(read_all_lines_backwards) ) )
;;; by using unfold-right we restore the move order
;;; could have also done
;;; (map parse_move_line (reverse (car input_pair)))
;;; to achieve the same result
(define moves (unfold-right null? ; p
(lambda (x) (parse_move_line (car x))) ; f
cdr ; g
(car input_pair) )) ; seed
;;; use cddr top drop the bottom two lines (labels and blank)
(define initial_state_lines_backwards (cddr (cdr input_pair)))
(define label_line (cadr (cdr input_pair)))
(define num_col
(string->number (cadr (reverse (string-split label_line #\space)))))
(define parsed_initial_state_lines_backwards
(map parse_state_line initial_state_lines_backwards))
(define initial_stacks (construct_initial_stacks
parsed_initial_state_lines_backwards num_col))
(define final_stacks
(fold
;; procedure proc generates a new set of stacks for a given 3-item move
(lambda (move stacks) (apply simulate_move stacks move ))
initial_stacks ; init
moves ) )
(write-line (list->string (map car final_stacks) ))
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/5 part two
(include "aoc2022_d05p1.scm")
(define (simulate_drop_on stacks target items)
(let* ((pair_before_push (split-at-to-pair stacks target))
(first_half_of_stacks (car pair_before_push))
(second_half_of_stacks (cdr pair_before_push))
(selected_stack (car second_half_of_stacks))
(rest_of_second_half_of_stacks (cdr second_half_of_stacks)) )
(append first_half_of_stacks
(list (append items selected_stack))
rest_of_second_half_of_stacks) ))
;; this is the change from part one to two, use of append
;; in building the new stack (add list on top)
;; instead of append-reverse which reverses the items before
;; putting them on top
(define stack_adding_function append)
(define final_stacks
(fold
;; procedure proc generates a new set of stacks for a given 3-item move
(lambda (move stacks) (apply simulate_move stacks move ))
initial_stacks ; init
moves ) )
(write-line (list->string (map car final_stacks) ))
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/6 part one
(use-modules (ice-9 rdelim) (srfi srfi-1))
(define problem_input (string->list (read-line)))
(define (start_condition? so_far)
(= (length so_far)
(char-set-size (list->char-set so_far) )))
(define (recursive_check so_far l count)
(if (start_condition? so_far)
count
(recursive_check (append (cdr so_far) (list (car l)))
(cdr l)
(+ count 1) )))
(write-line (recursive_check (take problem_input 4)
(drop problem_input 4)
4))
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/6 part two
(include "aoc2022_d06p1.scm")
(write-line (recursive_check (take problem_input 14)
(drop problem_input 14)
14))
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/7 part 1
# Mark Jenkins <mark@parit.ca>
from collections import namedtuple
from sys import stdin
Node = namedtuple('Node', ['dirname', 'contents'])
Leaf = namedtuple('Leaf', ['filename', 'size'])
def init_empty_node(dirname):
return Node(dirname, {})
def tokenize_line(line):
parts = line.split(" ")
return tuple( (p if not p.isdigit() else int(p))
for p in parts )
problem_input = [tokenize_line(line.strip()) for line in stdin]
problem_input.reverse()
# SantaFS traversal must start at the root
assert( ("$", "cd", "/") == problem_input.pop() ) # first list is cd
root_node = init_empty_node('')
def parse_ls_output(problem_input):
while(len(problem_input)>0):
p = problem_input.pop()
if p[0] == "$":
problem_input.append(p)
break
elif p[0] == "dir":
n = init_empty_node(p[1])
yield (n.dirname, n)
else:
l = Leaf(*reversed(p)) # reverse for filename, size
yield (l.filename, l)
def recursively_build_santa_fs(current_node, problem_input):
while(len(problem_input)>0):
p = problem_input.pop()
prompt, command = p[:2]
assert(prompt=="$")
if command == "ls":
current_node.contents.update( parse_ls_output(problem_input) )
elif ("cd", "..") == p[1:3]:
break
# a request to cd to / will break the whole recursion
# though this appears to be only on the first line
# so we could have just done an
# assert this wasn't so
elif ("cd", "/") == p[1:3]:
problem_input.append(p)
break
else:
assert( command == "cd" )
recursively_build_santa_fs( current_node.contents[p[2]],
problem_input )
# this function isn't used, but it helps illustrate things
# hoarding it in case SantaFS comes back
def recursively_pretty_print_santa_fs(current_node=root_node, pad=""):
print( pad + "dir", current_node.dirname )
for blah in current_node.contents.values():
if isinstance(blah, Node):
recursively_pretty_print_santa_fs(blah, pad + " ")
else:
assert( isinstance(blah, Leaf) )
print( pad + "file", blah)
print( pad + "done with", current_node.dirname, "going back up")
def recursively_find_dir_sums(current_node=root_node):
files_in_this_dir = [stuff
for stuff in current_node.contents.values()
if isinstance(stuff, Leaf) ]
sum_of_files_in_this_dir = sum( stuff.size
for stuff in current_node.contents.values()
if isinstance(stuff, Leaf) )
return sum( recursively_find_dir_sums(dirnode)
for dirnode in current_node.contents.values()
if isinstance(dirnode, Node) ) + sum_of_files_in_this_dir
def recursively_find_dirs_with_criteria(criteria, current_node=root_node):
if criteria(current_node):
yield current_node
for dirnode in current_node.contents.values():
if isinstance(dirnode, Node):
yield from (recursively_find_dirs_with_criteria(criteria, dirnode))
santa_fs = recursively_build_santa_fs(root_node, problem_input)
if __name__ == "__main__":
print(sum(
recursively_find_dir_sums(dirnode)
for dirnode in recursively_find_dirs_with_criteria(
# criteria
lambda node: recursively_find_dir_sums(node) <= 100000 ) ))
from aoc2022_d07p1 import (
recursively_find_dirs_with_criteria,
recursively_find_dir_sums,
)
SPACE_AVAILABLE = 70000000
UNUSED_TARGET = 30000000
CURRENT_USED = recursively_find_dir_sums()
CURRENT_UNUSED = SPACE_AVAILABLE - CURRENT_USED
FREE_TARGET = UNUSED_TARGET - CURRENT_UNUSED
print(min(
recursively_find_dir_sums(dirnode)
for dirnode in recursively_find_dirs_with_criteria(
# criteria
lambda node: recursively_find_dir_sums(node) >= FREE_TARGET ) ))
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/8 part one
(use-modules (ice-9 rdelim) (srfi srfi-1))
(define (read_all_lines_backwards)
(unfold-right eof-object? identity (lambda (x) (read-line)) (read-line)) )
(define input_lines_backwards (read_all_lines_backwards) )
(define ASCII_ZERO (char->integer #\0))
(define (input_char_to_number x)
(- (char->integer x) ASCII_ZERO))
(define (replace_list_with_empty_lists l)
(map (lambda (x) '()) l) )
(define (build_cols_out_of_row row cols_so_far)
(map (lambda (pair) (apply cons pair))
(zip row cols_so_far) ))
(define (matrix_transpose matrix)
;; use fold-right because each final row (original cols) is built backwards
;; (map reverse (fold ... )) would also work
(fold-right build_cols_out_of_row ; proc
(replace_list_with_empty_lists matrix) ; init
matrix )) ; lst
(define row_col
(unfold-right null? ; p
;; f
(lambda (seed)
(map input_char_to_number
(string->list (car seed) ) ))
cdr ;; g
input_lines_backwards )) ; seed
(define col_row (matrix_transpose row_col))
(define (item_on_edge? surroundings)
(any null? surroundings) )
(define (clear_path_to_edge? item path)
(every (lambda (x) (< x item )) path) )
(define (analyze_item item paths)
(if (item_on_edge? paths)
1
(if (any (lambda (path) (clear_path_to_edge? item path))
paths)
1
0)))
(define (analyse_row row all_cols_above all_cols_below)
(unfold (lambda (seed) (null? (fourth seed))) ; p
;; f
(lambda (seed)
(analyze_item
(car (fourth seed)) ; item
(list
(third seed) ; items left
(cdr (fourth seed)) ; items_right, exclude current
(car (first seed)) ; items_above
(car (second seed))) )) ; items_below
;; g
(lambda (seed)
(let ((cols_above (first seed))
(cols_below (second seed))
(items_before (third seed))
(item_and_items_after (fourth seed)))
;; rebuild seed
(list (cdr cols_above)
(cdr cols_below)
(cons (car item_and_items_after) items_before)
(cdr item_and_items_after) ) ))
;; seed
(list all_cols_above all_cols_below '() row) ))
(define (analyze_forest)
(unfold (lambda (seed) (null? (first seed)) ) ; p
;; f
(lambda (seed)
(analyse_row (car (first seed) ) ; row
(second seed) ; all_cols_above
(map cdr (third seed)) ) ) ; all_cols_below
;; g
(lambda (seed)
(list (cdr (first seed))
(map (lambda (x y)
(cons (car y) x)
) (second seed) (third seed) )
(map cdr (third seed)) ))
;; seed
(list row_col ; rows
(replace_list_with_empty_lists col_row)
col_row) )) ; current_cols_and_after
;;; left over from debugging
(define (pretty_print matrix)
(for-each
(lambda (row)
(write-line (string-reverse
(reduce string-append "" (map number->string row)))))
matrix ))
;;; left over from debugging
(define (pretty_print_with_spaces matrix)
(for-each write-line matrix))
(define visibility_matrix (analyze_forest) )
;;;(pretty_print visibility_matrix)
;;;(pretty_print_with_spaces visibility_matrix)
(write-line
(reduce + "no input"
(map (lambda (row) (reduce + "no input" row))
visibility_matrix)))
#!/usr/bin/env guile
!#
;;; https://adventofcode.com/2022/day/8 part two
(include "aoc2022_d08p1.scm")
(define (scenic_score item path)
(+
;; count the number of trees in the path within criteria
(length (take-while
(lambda (x) (< x item ) )
path ) )
;; add one extra if scoring iteration is stopped by a tree out of criteria
(if (find (lambda (x) (>= x item)) path) 1 0) ))
;; redefine the analysis function
(define (analyze_item item paths)
;; compute scenic scores for each path away from the item and multiply them
(reduce * 0 (map (lambda (path) (scenic_score item path))
paths) ))
(define scenic_matrix (analyze_forest) )
;;;(pretty_print scenic_matrix)
;;;(pretty_print_with_spaces scenic_matrix)
(write-line
(reduce max "no input"
(map (lambda (x) (reduce max "no input" x))
scenic_matrix)))
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/9 part one
# Mark Jenkins <mark@parit.ca>
from collections import Counter, namedtuple
from sys import stdin
Move = namedtuple('Move', ('dir', 'mag'))
def parse_line(l):
return Move({'U': (0,-1), 'D':(0,1), 'L':(-1,0), 'R':(1,0)}[l[0]],
int(l[1]))
def problem_input():
return (parse_line(line.strip().split()) for line in stdin)
def calc_move(a, b):
return tuple(sum(x) for x in zip(a,b))
SIDEWAYS_MOVES = ( (1,0), (-1,0), # left, right
(0, 1), (0,-1) ) # down, up
DIAG_MOVES = ( (1,1), (1,-1), (-1,1), (-1,-1) )
POSSIBLE_MOVES = SIDEWAYS_MOVES + DIAG_MOVES
def touching(a, b):
if a==b:
return True
return any(
a==calc_move(b,c)
for c in POSSIBLE_MOVES)
def sideways_off(head, tail):
return (head[0] == tail[0]) or (head[1]==tail[1])
def correct_tail(head, tail):
FIX = SIDEWAYS_MOVES if sideways_off(head, tail) else DIAG_MOVES
for c in FIX:
attempt = calc_move(tail,c)
if touching(head, attempt):
return attempt
else:
assert(False)
def calc_tail(head, tail):
return (tail if touching(head,tail)
else correct_tail(head, tail) )
def simulate():
head, tail = (0,0),(0,0)
tail_history = Counter( (tail,) )
for move in problem_input():
for i in range(move.mag):
head = calc_move(head, move.dir)
new_tail = calc_tail(head, tail)
if tail!=new_tail:
tail = new_tail
tail_history[tail]+=1
return head, tail, tail_history
if __name__ == "__main__":
head, tail, counts = simulate()
print(len(counts))
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/9 part two
# Mark Jenkins <mark@parit.ca>
from aoc2022_d09p1 import \
problem_input, calc_move, calc_tail as calc_follow
from collections import Counter
knots = [ (0,0) ] * 10
tail = knots[-1]
tail_history = Counter( (tail,) )
for move in problem_input():
for m in range(move.mag):
knots[0] = calc_move( knots[0], move.dir )
for i, k in enumerate(knots[1:], 1):
knots[i] = calc_follow(knots[i-1], k)
if tail != knots[-1]:
tail = knots[-1]
tail_history[ tail ] += 1
print( len(tail_history) )
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/10 parts 1 and 2
# Mark Jenkins <mark@parit.ca>
from sys import stdin
SCREEN_WIDTH = 40
def parse_lines(lines):
for line in lines:
line = line.strip()
if line=="noop":
yield (line,None)
else:
sp = line.split()
yield sp[0], int(sp[1])
def emulate(instructions):
x = 1
for inst, operand in instructions:
if inst == "noop":
yield x
else: # addx
yield x
yield x
x += operand
def gen_compute_signal_str(instructions):
for i, x in enumerate(emulate(instructions),1):
yield i, i*x
def p1answer(inst):
return sum( s for i, s in gen_compute_signal_str(inst)
if i in {20,60,100,140,180,220})
def drawscreen(instructions):
# 40 (SCREEN_WIDTH) None values
output = [None] * SCREEN_WIDTH
for i, x in enumerate(emulate(instructions), 1):
pos = (i-1) % SCREEN_WIDTH
output[pos] = "#" if (pos-1<=x<=pos+1) else "."
if (i % SCREEN_WIDTH)==0:
print(''.join(output))
if __name__ == "__main__":
instructions = list(parse_lines(stdin.readlines()))
print(p1answer(instructions))
drawscreen(instructions)
from sys import stdin
from collections import namedtuple
from operator import add, mul
from itertools import zip_longest
import heapq
# https://adventofcode.com/2022/day/11 part one
# Mark Jenkins
Mon = namedtuple('Monkey', ('id', 'starting', 'op', 'divtest',
'truethrow', 'falsethrow') )
def compile_new_assignment(expr):
operand1, operator, operand2 = expr.strip().split(" ")
if operand2.isdigit():
operand2 = int(operand2)
return operand1, operator, operand2
def parse_lines_to_mon(lines):
assert( len(lines) == 6 )
return Mon( id=int(lines[0].split(" ")[1].split(":")[0]),
starting=[int(s) for s in lines[1].split(":")[1].split(",")],
op=compile_new_assignment( lines[2].split("=")[1] ),
divtest=int(lines[3].split("by ")[1]),
truethrow=int(lines[4].split(" ")[-1]),
falsethrow=int(lines[5].split(" ")[-1]) )
def parse_lines(lines):
line_iter = iter(lines)
line_iters_to_zip = [line_iter] * 7
return [ parse_lines_to_mon(monkey[0:6]) # slice 0:6 excludes 7th line
for monkey in zip_longest( *line_iters_to_zip ) ]
def square_first_arg_ignore_second(val_to_square, ignore):
return val_to_square**2
RECALC_OPERATORS = {
("+", False): add,
("*", False): mul,
("*", True): square_first_arg_ignore_second
}
def recalc_worry_score(old, operand1, operator, operand2):
operand2_is_old_str = operand2=="old"
return RECALC_OPERATORS[operator, operand2_is_old_str](
old, operand2)
def simulate_part1(mons):
inventory = [ mon.starting for mon in mons ]
inspections = [ 0 for mon in mons ]
for r in range(20):
for i, mon in enumerate(mons):
mon_items = [ recalc_worry_score(mi, *mon.op)
for mi in inventory[i] ]
mon_items = [ mi//3 for mi in mon_items]
inventory[i].clear() # all thrown away
inspections[i] += len(mon_items)
for mi in mon_items:
if 0==(mi % mon.divtest):
inventory[mon.truethrow].append(mi)
else:
inventory[mon.falsethrow].append(mi)
return inspections
def monkey_bussiness(inspections):
# a,b = heapq.nlargest(2, inspections)
# return a*b
return mul(*heapq.nlargest(2, inspections) )
if __name__ == "__main__":
print(
monkey_bussiness(simulate_part1(parse_lines(stdin) ) ) )
from sys import stdin
from aoc2022_d11p1 import parse_lines, recalc_worry_score, monkey_bussiness
# https://adventofcode.com/2022/day/11 part two
# Mark Jenkins
# coceive of each item worry score as existing in parllel universes where
# we need to track the divisibility for each monkey, because any item could
# end up with any monkey.
#
# so for example, if there are 10 items and 4 monkeys, track the divisibility
# of forty scores throughout.
#
# with that kind of setup, we can set up modular arithmatic congruances for
# each true score and each monkey-item score pair mod monkey divisibility
#
# e.g. with 10 items (0-9) and 4 monkeys (0-3) we can say
# item a(sub i0 m0) is congruant to b(sub i0 m0) mod div(sub 0)
# item a(sub i0 m1) is congruant to b(sub i0 m1) mod div(sub 1)
# ...
# item a(sub i0 m3) is congruant to b(sub i0 m3) mod div(sub 3)
# item a(sub i1 m0) is congruant to b(sub i1 m3) mod div(sub 0)
# ...
# item a(sub i9 m3) is congruant to b(sub i9 m3) mod div(sub 3)
#
# we can add, multiply, or square the right hand of these congruances
# as long as we perform a mod operation with the matching monkey divisor
# the actual scores (a) remain congruant mod div(sub m) to their left hand
# scores (b)
def simulate_part2(mons):
inspections = [ 0 ] * len(mons)
inventory = [[] for mon in mons]
scores = []
for si, (s, m_index, monout) in enumerate( ((st, mi, mon)
for mi, mon in enumerate(mons)
for st in mon.starting) ):
scores.append( [s % mon2.divtest for mon2 in mons] )
inventory[m_index].append( si )
for r in range(10000):
for i, mon in enumerate(mons):
inspections[i] += len( inventory[i] )
for item_index in inventory[i]:
for score_index, mon_4_score in enumerate(mons):
scores[item_index][score_index] = (
recalc_worry_score(
scores[item_index][score_index], *mon.op)
% mon_4_score.divtest)
inv_update_index = (
mon.truethrow
# no need to check % mon.divtest as that reduction
# was already performed above
if scores[item_index][i]==0
else mon.falsethrow )
inventory[inv_update_index].append(item_index)
inventory[i].clear() # all thrown away
return inspections
if __name__ == "__main__":
print(
monkey_bussiness(simulate_part2(parse_lines(stdin) ) ) )
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/12 parts one and two
# Mark Jenkins
# This is a recursive depth-first search with a few minor optimizations
# * we cache the shortest path to each node found so far,
# cutting off the search if it's going to be worth
#
# * priority is given to reaching the end and going up in height,
# though this heuristic may not provide much help in a depth first
# context because found solutions may not be optimal, so we have to keep
# searching. (though TODO we could do a better job pruning)
#
# The end result performs very poorly. I blew my stack using cpython on a
# full sized (non-sample) input, but not with PyPy .
#
# There are better algorithms for shortest path in unweighted,
# undirected graphs, such as breadth first search
# https://en.wikipedia.org/wiki/Shortest_path_problem
#
# Part two would be a lot better with an approach that starts at E and finds
# the shorted path to an "a". Instead, I simply re-used our part one setup
# and looked at shortest path to E (if any) from each a. Very lazy, but worked.
#
# TODO: the best paths to (y,x) cache doesn't need to be returned through
# the recursion as it is updated in place on each call
# In-fact, there's no reason a dict needs to be used for that, another
# nestesed list in list would be fine in this co-ordinate space
from sys import stdin
def convert_map_char(c):
if c in ("S", "E"):
return c
else:
return ord(c) - ord('a')
def parse_input():
result = [
[ convert_map_char(c) for c in line.strip() ]
for line in stdin ]
# find the start and end markers, and replace them with 0 and 26
# so we can mostly treat them the same (particuarly for legal travel)
start, end = None, None
for rownum,row in enumerate(result):
for colnum, c in enumerate(row):
if c=="S":
start = rownum, colnum
result[rownum][colnum] = 0
elif c=="E":
end = rownum, colnum
result[rownum][colnum] = 26
if start!=None and end!=None:
break
if start!=None and end!=None:
break
return tuple(tuple(row) for row in result), start, end
def pretty_print_input(input_chart, start, end):
for row in input_chart:
print( ' '.join(str(c).rjust(2) for c in row ) )
print(start)
print(end)
def find_neighbours(current):
if len(current)!=2:
assert(False)
cur_row, cur_col = current
yield from ( (cur_row+1, cur_col),
(cur_row-1, cur_col),
(cur_row, cur_col+1),
(cur_row, cur_col-1) )
def filter_in_boundary_neighbours(input_grid, neigh):
row_bound = len(input_grid)
col_bound = len(input_grid[0])
return (n for n in neigh
if (0<=n[0]<row_bound and 0<=n[1]<col_bound) )
def acceptable_cost(input_grid, current, n):
current_h = input_grid[current[0]][current[1]]
n_h = input_grid[n[0]][n[1]]
return ( (n_h < current_h) # step down
or
(current_h == n_h)
or
( (current_h+1) == n_h ) )
def find_legal_neighbours(input_grid, current):
return (
n
for n in
filter_in_boundary_neighbours(input_grid, find_neighbours(current))
if acceptable_cost(input_grid, current, n)
)
def find_ranked_legal_neighbours(input_grid, current, end):
return (
# start tuple
( (1 if n==end else 1), # prioritize using end
input_grid[current[0]][current[1]], # height
n, # actual point
)
for n in find_legal_neighbours(input_grid, current)
)
def recursively_find_shortest(
input_grid, start, end,
best_paths,
current,
cost_prior_to_current,
additional_cost=1):
new_cost = cost_prior_to_current + additional_cost
if current in best_paths:
if new_cost < best_paths[current]:
best_paths[current] = new_cost
else:
return best_paths, False
else:
best_paths[current] = new_cost
if current==end:
return best_paths, True
neigh = find_ranked_legal_neighbours(input_grid, current, end)
found = False
for r1, r2, n in neigh:
if n==end:
# call to cache result with final cost
best_paths, found = recursively_find_shortest(
input_grid, start, end,
best_paths,
n,
new_cost)
assert(found)
found = True
continue
best_paths, result = recursively_find_shortest(
input_grid, start, end,
best_paths,
n,
new_cost)
if result:
found = True
else:
return best_paths, found
assert(False) # should not reach
def shortest_path(input_grid, start, end):
best_paths, shortest_found = recursively_find_shortest(
input_grid,
start,
end,
{}, # initial empty best paths
start, # current
0, # cost prior
additional_cost=0,
)
if not shortest_found:
return -1
return best_paths[end]
def part1(input_grid, start, end):
return shortest_path( input_grid, start, end )
def part2(input_grid, end):
return min(
# filter out return value -1 meaning no path found
(x for x in (
shortest_path(input_grid, (y,x), end)
for y, row in enumerate(input_grid)
for x, c in enumerate(row)
if input_grid[y][x]==0 )
if x != -1 ) )
if __name__ == "__main__":
input_grid, start, end = parse_input()
#pretty_print_input(input_grid, start, end)
print("part one", part1(input_grid, start, end))
print("part two", part2(input_grid, end) )
from sys import stdin
from itertools import zip_longest
# https://adventofcode.com/2022/day/13 part one
# Mark Jenkins
# lexical analysis
def tokenize_line(line):
charbuf = []
for c in line:
if c in (",", "[","]"): # special characters
if len(charbuf)>0:
yield int(''.join(charbuf))
charbuf.clear()
yield c
else: # integer characters
assert(c.isdigit())
charbuf.append(c)
# parsing
def parse_line_recursively(tokens_iter):
try:
next_token = next(tokens_iter)
except StopIteration:
assert( False )
return
if next_token=="[":
yield list(parse_line_recursively(tokens_iter))
yield from parse_line_recursively(tokens_iter)
elif next_token=="]":
return
elif next_token==",":
yield from parse_line_recursively(tokens_iter)
else:
yield next_token
yield from parse_line_recursively(tokens_iter)
def parse_line(line):
tokens_iter = iter(tokenize_line(line.strip()))
open_bracket = next(tokens_iter)
assert(open_bracket=="[")
ret_list = list(parse_line_recursively(tokens_iter))
return ret_list
def parse_input(linesource=stdin):
input_iter = iter(linesource)
return ( (parse_line(line_trip[0]), parse_line(line_trip[1]))
for line_trip in zip_longest(input_iter, input_iter, input_iter) )
# interpreting
def elem_pair_right_ordered(left_elem, right_elem):
if isinstance(left_elem, list) and isinstance(right_elem, list):
return list_pair_right_ordered(left_elem, right_elem)
# just left is a list
elif isinstance(left_elem, list):
return list_pair_right_ordered(left_elem, [right_elem])
elif isinstance(right_elem, list):
return list_pair_right_ordered([left_elem], right_elem)
else:
assert( isinstance(left_elem, int) and isinstance(right_elem, int) )
return (None if left_elem == right_elem
else left_elem < right_elem)
def list_pair_right_ordered(left_list, right_list):
assert( isinstance(left_list, list) and
isinstance(right_list, list) )
for left_elem, right_elem in zip_longest(left_list, right_list):
if left_elem==None:
return True
elif right_elem==None:
return False
else:
elem_pair_result = elem_pair_right_ordered(left_elem, right_elem)
if None==elem_pair_result:
continue
return elem_pair_result
else: # for loop completes
return None
if __name__ == "__main__":
print( sum(
i
for i, (left, right) in enumerate(parse_input(), 1)
if list_pair_right_ordered(left, right) ))
#!/usr/bin/env python3
# https://adventofcode.com/2022/day/13 part two
# Mark Jenkins
from sys import stdin
from operator import mul
from functools import reduce, cmp_to_key
from aoc2022_d13p1 import parse_line, list_pair_right_ordered
START_PACKET = [[2]]
END_PACKET = [[6]]
DIVIDER_PACKETS = (START_PACKET, END_PACKET)
# when python3 sort functions are customized, they use a key= argument
# instead of a cmp one. The key= functions take one argument and they convert
# a complex object into some other kind of object where <, >, >=, <=, and ==
# can be used
#
# the old cmp style functions take two arguments of things being compared
# and return negative, zero and positive ints
#
# fortunately for us, python3 includes compatility for cmp style functions
# by way of the cmp_to_key decorator function
@cmp_to_key
def list_pair_key_style_comparison(x, y):
return (0 if None==list_pair_right_ordered(x, y)
else (-1 if list_pair_right_ordered(x, y) else 1) )
# read all packets into a list
packets = [ parse_line(line) for line in stdin
if not line.isspace() ]
packets.extend(DIVIDER_PACKETS) # add the divider packers
packets.sort(key=list_pair_key_style_comparison) # sort
# extract indicies (1-indexed) in the above list
# for the divider packets and multiply them
print( reduce( mul, (i
for i, packet in enumerate(packets, 1)
if packet in DIVIDER_PACKETS) ) )
from sys import stdin
from itertools import tee, islice
def tokenize_line(line):
return ( tuple(map( int, pair.split(",") ))
for pair in line.strip().split(" -> ") )
def parse_line(line):
tokens_1, tokens_2 = tee(tokenize_line(line))
return zip( tokens_1, islice(tokens_2, 1, None) )
def parse_input(linesource=stdin):
return( parse_line(line)
for line in linesource )
def pt_add(pt1, pt2):
return (pt1[0] + pt2[0], pt1[1] + pt2[1])
def build_line(points):
# FIXME x and y are backwards
points = tuple(sorted(points))
(x1, y1), (x2, y2) = points
assert( (y1 == y2) ^ (x1 == x2) )
direction = ( (0,1) if y1<y2 else (1,0) )
pt = (x1, y1)
yield pt
while pt != (x2, y2):
pt = pt_add(pt, direction)
yield pt
def build_initial_structure(linesource=stdin):
return {
pt
for line_pairs in parse_input(linesource)
for points in line_pairs
for pt in build_line(points) }
def max_y(solids):
return max( y for x,y in solids)
def simulate_part1(solids, start=(500, 0)):
count = 0
MAX_Y = max_y(solids)
pt = start
while pt[1] <= MAX_Y:
down = pt_add(pt, (0,1))
if down in solids:
left = pt_add(pt, (-1, 1))
if left in solids:
right = pt_add(pt, (1, 1))
if right in solids:
solids.add(pt)
pt = start
count+=1
continue
else:
pt = right
else:
pt = left
else:
pt = down
return count
if __name__ == "__main__":
print(simulate_part1(build_initial_structure()))
from aoc2022_d14p1 import build_initial_structure, max_y, pt_add
def simulate_part2(solids, start=(500, 0)):
count = 0
FLOOR_Y = max_y(solids) + 2
pt = start
while True:
down = pt_add(pt, (0,1))
if down in solids or down[1]==FLOOR_Y:
left = pt_add(pt, (-1, 1))
if left in solids or left[1]==FLOOR_Y:
right = pt_add(pt, (1, 1))
if right in solids or right[1]==FLOOR_Y:
solids.add(pt)
if pt==start:
return count+1
pt = start
count+=1
continue
else:
pt = right
else:
pt = left
else:
pt = down
if __name__ == "__main__":
print(simulate_part2(build_initial_structure()))
from sys import argv
from aoc2022_d15p1 import (
parse_input,
sensor_range_includes_y,
ranges_with_same_y_overlap_on_x,
merge_two_ranges_same_y_ranges,
sensor_range_in_y,
distance
)
# https://adventofcode.com/day/15 part two
# a near brute force solution that luckilly worked for me
# not even sure if it's entirely correct
#
# Doesn't actually output tuning_frequncy, just the x, y
#
# I split the workload on a 4-core processor by invoking as follows:
# pypy3 aoc2022_d15p2.py 0 4000000 0 1000000 < d15.input.txt
# pypy3 aoc2022_d15p2.py 0 4000000 1000001 2000000 < d15.input.txt
# pypy3 aoc2022_d15p2.py 0 4000000 2000001 3000000 < d15.input.txt
# pypy3 aoc2022_d15p2.py 0 4000000 3000001 4000000 < d15.input.txt
#
# nice that this can be split into parallel work
#
# 3 out of 4 ran fast (11 minutes, 3, and 4), the other quarter for hours
# but split out a solution before finishing. Ran 10 hours in the end
#
# I wonder if splitting that quarter into quarters would have helped...
def ranges_adjasant(r1, r2):
r1, r2 = sorted( (r1, r2) )
return (r1[1][0]+1) == r2[0][0]
# rebuilt from part one implementation to merge non-overlapping neighbours
def find_exclusion_ranges_for_y(y, sensors):
ranges = { sensor_range_in_y(s, y)
for s in sensors
if sensor_range_includes_y(s, y) }
prev_ranges_len = -1
ranges_to_remove = set()
ranges_to_add = set()
# merge ranges until the set of ranges stabalizes
while len(ranges) != prev_ranges_len:
for r1 in ranges:
for r2 in ranges:
# don't bother trying to merge the same object
# to re reperform a range
if ( (r1 is not r2) and
not (r1 in ranges_to_remove and r2 in ranges_to_remove) ):
if ranges_with_same_y_overlap_on_x(r1, r2):
ranges_to_add.add(
merge_two_ranges_same_y_ranges(r1, r2) )
ranges_to_remove.add(r1)
ranges_to_remove.add(r2)
elif ranges_adjasant(r1, r2):
r1a, r2a = sorted( (r1, r2) )
ranges_to_add.add(
( (r1a[0][0], r1a[0][1]),
(r2a[1][0], r1a[0][1] ) ) )
ranges_to_remove.add(r1)
ranges_to_remove.add(r2)
prev_ranges_len = len(ranges)
ranges = ranges - ranges_to_remove
ranges = ranges | ranges_to_add
ranges_to_add.clear()
ranges_to_remove.clear()
return ranges
# was developed for part 1 but later removed
# may or may not be useful here...
# def generate_boundaries(s):
# x1_bound, x2_bound = sensor_x_wide_bounds(s)
# yield ( (x1_bound, s.y), (x2_bound, s.y) )
# for i in range(1, s.distance+1):
# yield ( (x1_bound + i, s.y+i), (x2_bound - i, s.y+i) )
# yield ( (x1_bound + i, s.y-i), (x2_bound - i, s.y-i) )
def tuning_frequency(x, y):
return 4000000*x + y
def clamp_val(v, min_val, max_val):
return min(max(v, min_val), max_val)
def clamp_range_on_x(min_cord, max_cord, r):
return ( (clamp_val(r[0][0], min_cord, max_cord), r[0][1]),
(clamp_val(r[1][0], min_cord, max_cord), r[1][1] ) )
def find_bounded_gap_in_ranges(min_cord, max_cord, ranges_s):
ranges_l = list(ranges_s)
ranges_l.sort()
for i, r in enumerate(ranges_l):
ranges_l[i] = clamp_range_on_x(min_cord, max_cord, r)
y = ranges_l[0][0][1]
minx = ranges_l[0][0][0]
if minx > min_cord:
yield ( (min_cord, y), (minx-1, y) )
maxx = ranges_l[-1][1][0]
if maxx < max_cord:
yield ( (maxx+1, y), (max_cord, y) )
if len(ranges_l)>1:
for r1, r2 in zip( ranges_l, ranges_l[1:]):
yield ( (r1[1][0]+1,y), (r2[0][0]-1,y) )
def get_sensor_set(sensors):
return { (s.x, s.y) for s in sensors }
def get_beacon_set(sensors):
return { (s.cbx, s.cby) for s in sensors }
if __name__ == "__main__":
min_cord = int(argv[1])
max_cord= int(argv[2])
min_y = int(argv[3])
max_y = int(argv[4])
if (min_y < min_cord or min_y > max_cord or
max_y < min_cord or max_y > max_cord ):
raise Exception("input args do not work")
sensors = list(parse_input())
sensor_and_beacon_set = get_sensor_set(sensors) | get_beacon_set(sensors)
for y in range(min_y, max_y+1):
for ( (x1, y1), (x2, y2) ) in find_bounded_gap_in_ranges(
min_cord, max_cord,
find_exclusion_ranges_for_y(y, sensors)):
for x in range(x1, x2+1):
if (x, y) not in sensor_and_beacon_set:
# the part I don't yet understand, here I am filtering
# candidate answers against criteria, but I don't grasp
# why all the work in find_bounded_gap_in_ranges
# doesn't already limit this to workable results and
# instead yields me answers that I need to re-check
if all( distance( x, y, s.x, s.y) > s.distance
for s in sensors):
print( x, y )
from sys import stdin
from collections import namedtuple
from functools import lru_cache
# https://adventofcode.com/2022/day/16 part one
# Mark Jenkins
#
# shortest path between all pairs of locations/valves/verticies is found
# using the Floyd Warshall algorithm
# https://en.wikipedia.org/wiki/Floyd%E2%80%93Warshall_algorithm
#
# From there I pay attention to just the useful valves (rate>0) and use
# the shortest paths from the first step to convert what was originally
# a unweighted graph to a smaller, fully connected weighted graph
#
# I had 15 non-zero valves in my original graph, which implied a
# quadrillion permutations [15 factorial (15!) ]
#
# But, by using backtracking and a greedy heuristic, I got my runtime down
# to 3 seconds. A counter shows I only considered 1.3 million calls
# to my backtracking recursion.
#
# I wrote a bound limiting function as well to backtrack sooner, but didn't
# end up using it
V = namedtuple('V', ('name', 'rate', 'neighbours'))
def parse_line(line):
parts = line.strip().split(" ")
return V( parts[1], # name
int(parts[4].split("=")[1][:-1]), # rate
tuple( (v if not v.endswith(",") # neighbours
else v[:-1])
for v in parts[9:] ) )
VALVES_LIST = [parse_line(line) for line in stdin]
VALVES_LIST.insert( 0, None ) # so we can number valves 1-N
NAMES_TO_NUMS = { v.name: i for i, v in enumerate(VALVES_LIST)
if i!= 0
}
# reverse is VALVES_LIST
VALVES = { v.name:v for v in VALVES_LIST if v is not None}
ZERO_VALVES = { v.name for v in VALVES_LIST if v is not None and v.rate == 0}
NON_ZERO_VALVES = set(VALVES) - ZERO_VALVES
def pair_directly_connected(i, j):
return VALVES_LIST[j].name in VALVES_LIST[i].neighbours
@lru_cache(maxsize=None)
def shortest_path(i, j, k):
if k==0:
return None if not pair_directly_connected(i, j) else 1
path1 = shortest_path(i, j, k-1)
path2 = shortest_path(i, k, k-1)
path3 = shortest_path(k, j, k-1)
if None is path2 or None is path3:
return path1
if path1 is None:
return path2+path3
return min( path1,
path2+path3 )
N = len(VALVES)
SHORTEST_PATHS = [ [None]*(N+1)
for i in range(N+1) ]
for i in range(1, N+1):
i_name = VALVES_LIST[i].name
for j in range(1, N+1):
j_name = VALVES_LIST[j].name
if i!=j and SHORTEST_PATHS[i][j] is None:
assert( SHORTEST_PATHS[j][i] is None )
SHORTEST_PATHS[i][j] = shortest_path(i, j, N)
#print("shortest from", i_name, "to", j_name, SHORTEST_PATHS[i][j] )
SHORTEST_PATHS[j][i] = SHORTEST_PATHS[i][j]
def rebuild_neighbours(v):
return V(
name=v.name,
rate=v.rate,
neighbours = tuple(
(n, SHORTEST_PATHS[ NAMES_TO_NUMS[v.name] ][NAMES_TO_NUMS[n] ])
for n in NON_ZERO_VALVES
if n!= v.name
)
)
NON_ZERO_VALVES_LIST = [
rebuild_neighbours(v)
for v in VALVES_LIST[1:] # skip [0] None value and [1] start valve
if v.name in NON_ZERO_VALVES
]
START = rebuild_neighbours( VALVES['AA'])
#print(START)
#for v in NON_ZERO_VALVES_LIST:
# print(v)
VALVES_OF_INTEREST = { START.name: START }
VALVES_OF_INTEREST.update(
(v.name, v) for v in NON_ZERO_VALVES_LIST )
# return True if a better solution is still possible, False if
# it's hopeless
def neighbour_bound_condition(
p, time_remaining, v_closed, best_solution):
valves_possible = time_remaining//2+1
best_case_scenario = p + sum(
rate * (time_remaining-i)
for i, rate in
enumerate(nlargest(
valves_possible,
(VALVES_OF_INTEREST[name].rate for name in v_closed) ), 1))
return best_case_scenario > best_solution
def prio_neighbours( neighbours, v_open_so_far, time_remaining):
return ( c for a,b,c in sorted( (
( n.rate * (time_remaining-distance),
-distance,
(n, distance) )
for (n, distance) in (
(VALVES_OF_INTEREST[n],d) for n,d in neighbours) ),
reverse=True
) )
def max_pressure_in_time(
v_open_so_far, valve,
p, t, time_remaining, best_solution, path_debug, first=False):
#time_remaining_2 = 30 - t
assert( time_remaining == 30-t )
path_debug.append( (valve.name, t, time_remaining) )
#print("enter", valve.name)
#time_remaining = 30-t
# could also
if t >= 30:
return max(best_solution, p)
if not first:
assert( valve.rate!=0 ) # should never return to rate 0
t+=1
time_remaining -= 1
p += valve.rate*time_remaining
v_open_so_far.add(valve.name)
# allow the current value to be the end of a solution
best_solution = max(p, best_solution)
for n, distance in prio_neighbours(
( (neigh, distance_inner)
for neigh, distance_inner in valve.neighbours
if neigh not in v_open_so_far),
v_open_so_far, time_remaining):
assert( n.rate != 0 ) # not returning to the start
new_p = max_pressure_in_time(
v_open_so_far,
n, # valve
p,
t+distance, time_remaining-distance, # t, time_remaining
best_solution, path_debug)
best_solution = max(new_p, best_solution)
if not first:
v_open_so_far.remove(valve.name)
#print("bottom exit from", valve.name, "with value", best_solution,
# time_remaining, path_debug)
path_debug.pop()
return best_solution
print( max_pressure_in_time(
set(), START,
0, # p
0, 30, # t, time_remaining
0, # best solution
[], # path_debug
first=True
) )
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment