Mercurial > repos > other > adventofcode2023
comparison day22.rb @ 31:47dc75915e91
Implement falling/destroying blocks
Implemented part 1 with supports/supported by logic.
Implemented part 2 with cascades and summing counts. Code has
caching so it is quick _but_ it gives an answer that's too low.
author | IBBoard <dev@ibboard.co.uk> |
---|---|
date | Wed, 03 Jan 2024 17:00:56 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
30:6de4f4d5404d | 31:47dc75915e91 |
---|---|
1 #! /usr/bin/env ruby | |
2 | |
3 require 'set' | |
4 | |
5 if ARGV.length != 1 | |
6 abort("Incorrect arguments - needs input file") | |
7 elsif not File.exist? (ARGV[0]) | |
8 abort("File #{ARGV[0]} did not exist") | |
9 end | |
10 | |
11 file = ARGV[0] | |
12 | |
13 Block = Struct.new(:id, :x, :y, :z) | |
14 SupportedBy = Struct.new(:blocks, :z) | |
15 supports = Hash.new | |
16 supported_by = Hash.new | |
17 | |
18 blocks = File.open(file, "r").each_line(chomp: true).with_index.map do |line, id| | |
19 block_start, block_end = line.split("~") | |
20 start_x, start_y, start_z = block_start.split(",") | |
21 end_x, end_y, end_z = block_end.split(",") | |
22 supports[id] = [] | |
23 Block.new(id, start_x.to_i..end_x.to_i, start_y.to_i..end_y.to_i, start_z.to_i..end_z.to_i) | |
24 end | |
25 | |
26 def height_sort(a,b) | |
27 if a.z.first == b.z.first | |
28 # Make it a stable sort | |
29 a.y.first == b.y.first ? a.x.first <=> b.x.first : a.y.first <=> b.y.first | |
30 else | |
31 a.z.first <=> b.z.first | |
32 end | |
33 end | |
34 | |
35 blocks.sort! {|a,b| height_sort(a, b)} | |
36 | |
37 def overlap?(range_1, range_2) | |
38 range_1.include?(range_2.first) || range_1.include?(range_2.last) || range_2.include?(range_1.first) || range_2.include?(range_1.last) | |
39 end | |
40 | |
41 blocks.reduce([]) do |lower_blocks, block| | |
42 support_blocks = lower_blocks.reduce(SupportedBy.new([], 0)) do |candidate_supports, lower_block| | |
43 if overlap?(block.x, lower_block.x) && overlap?(block.y, lower_block.y) | |
44 if lower_block.z.last > candidate_supports.z | |
45 SupportedBy.new([lower_block], lower_block.z.last) | |
46 elsif lower_block.z.last == candidate_supports.z | |
47 candidate_supports.blocks << lower_block | |
48 candidate_supports | |
49 else | |
50 candidate_supports | |
51 end | |
52 else | |
53 candidate_supports | |
54 end | |
55 end | |
56 drop = block.z.first - support_blocks.z - 1 | |
57 block.z = (support_blocks.z+1)..(support_blocks.z+block.z.size) | |
58 support_blocks.blocks.each {|support| supports[support.id] << block} | |
59 supported_by[block.id] = support_blocks.blocks | |
60 lower_blocks << block | |
61 lower_blocks | |
62 end | |
63 | |
64 disintegratable = supports.filter {|block_id, supports_list| supports_list.all? {|supported| supported_by[supported.id].length > 1} } | |
65 | |
66 puts "#{disintegratable.length} disintegratable blocks" | |
67 | |
68 def find_droppers(id, supports, supported_by, dropping, cache) | |
69 new_drops = supports[id].filter do |block| | |
70 (Set.new(supported_by[block.id].map(&:id)) - dropping).length == 0 | |
71 end | |
72 dropping = dropping + new_drops.map(&:id) | |
73 new_drops.each do |supported_block| | |
74 cache_key = [supported_block.id, Set.new(supported_by[supported_block.id].map(&:id)) & dropping] | |
75 if cache.include?(cache_key) | |
76 dropping += cache[cache_key] | |
77 #puts "Got #{cache[cache_key]} for #{cache_key}" | |
78 else | |
79 supported_droppers = find_droppers(supported_block.id, supports, supported_by, dropping, cache) | |
80 cache[cache_key] = supported_droppers | |
81 #puts "Caching #{supported_droppers} for #{cache_key} while dropping #{dropping}" | |
82 dropping += supported_droppers | |
83 end | |
84 end | |
85 dropping - [id] | |
86 end | |
87 | |
88 cascade = Hash.new | |
89 cache = Hash.new | |
90 | |
91 blocks.reverse.each do |block| | |
92 id = block.id | |
93 cascade[id] = find_droppers(id, supports, supported_by, Set.new([id]), cache) | |
94 end | |
95 | |
96 puts "Dropping a total of #{cascade.values.map{|val| val.length}.sum} blocks" |