Compare commits

..

112 Commits

Author SHA1 Message Date
efim
572bffc845 removing inputs, didn't know they are secret 2023-12-25 09:39:34 +00:00
efim
b879b6541a day25: wow, part 1 2023-12-25 09:37:10 +00:00
efim
a9caa4c8f1 day25: removing cycles 2023-12-25 08:44:16 +00:00
efim
dce2d06602 day25: cleanup 2023-12-25 07:53:45 +00:00
efim
9177d35caf day25: initial graph stuff,
with cycles removal not working
2023-12-25 07:53:17 +00:00
efim
665987395f day24: input system for python z3 2023-12-24 12:36:08 +00:00
efim
5b03b8f156 day24: adding python z3, example 2023-12-24 11:47:22 +00:00
efim
bea82cb548 day24: why slope&shift work, while points dont? 2023-12-24 09:01:27 +00:00
efim
d749979aae day24, already bad 2023-12-24 08:15:18 +00:00
efim
b6a56554af day24, example 2023-12-24 07:42:04 +00:00
efim
2f6120fbd8 day23, part2 2023-12-23 15:56:25 +00:00
efim
0c31596018 day23, still bad 2023-12-23 12:54:55 +00:00
efim
28cf35e0e4 day23, example second 2023-12-23 12:00:48 +00:00
efim
7ebb6dee2c day23: started hardcode of part2, way too slow 2023-12-23 10:19:25 +00:00
efim
44de1377ca day23, part1 2023-12-23 09:16:04 +00:00
efim
c3acf211c3 day22, rewrite with Set 2023-12-22 13:01:58 +00:00
efim
29528f23ac day22: bug: not using Set - duplicate supports 2023-12-22 12:45:51 +00:00
efim
8be2fa3844 day22, part2 2023-12-22 12:21:49 +00:00
efim
45d03e5ab3 day22, part1, struggle 2023-12-22 12:19:12 +00:00
efim
2b3c7f4ca6 day22, example 2023-12-22 09:59:41 +00:00
efim
3ede691333 day22: initial block setting 2023-12-22 09:24:43 +00:00
efim
7b34b52e5e day22, simple block code 2023-12-22 08:44:10 +00:00
efim
99c2269df8 day21, part2, did not like 2023-12-21 13:44:01 +00:00
efim
b10a6250b1 day21: wrong answer AND slow. need to scrap 2023-12-21 12:36:07 +00:00
efim
4cb35dca33 day21: saturation logic, but removing points to early 2023-12-21 12:13:01 +00:00
efim
840773fd16 day21: factor for same coord struct 2023-12-21 10:19:37 +00:00
efim
9a22efd4b3 day21: choking on example 2023-12-21 09:27:41 +00:00
efim
f5ea9e725e day21: yuck 2023-12-21 08:39:27 +00:00
efim
6a7378c265 day1, part1 2023-12-21 08:30:44 +00:00
efim
5b0f1ab750 day21: example 2023-12-21 08:29:08 +00:00
efim
53930e66ac day20: more diagramming 2023-12-20 13:52:29 +00:00
efim
727099dbd9 day20, small cleanup 2023-12-20 13:26:34 +00:00
efim
98206fe6d4 day20, part2 done with online LCM calculator 2023-12-20 13:16:13 +00:00
efim
57fdfb01cb day20: whelp. 2023-12-20 11:14:45 +00:00
efim
1e32ec0988 day20, part1 2023-12-20 10:19:24 +00:00
efim
6c061375ce day20, examples work in test 2023-12-20 10:16:43 +00:00
efim
f538945dff day20, modules state comparations 2023-12-20 09:58:40 +00:00
efim
00e60657fa day20, example 2 first four steps pass 2023-12-20 09:46:22 +00:00
efim
1d7a0ef7b8 day20, receive functions 2023-12-20 08:54:12 +00:00
efim
4974127cef day20: more reading 2023-12-20 08:10:10 +00:00
efim
9dbc2ca205 day20, starting to read in data
with using tests as entry points for checking things
2023-12-20 07:35:51 +00:00
efim
e771ac9d9b day19, part2, finally 2023-12-19 11:46:16 +00:00
efim
52beb4196f day19, struggling part2, not quite yet 2023-12-19 11:00:59 +00:00
efim
5f62ea45f7 day19, example 2023-12-19 09:03:03 +00:00
efim
6efd55ae6a day18, example second parallel 2023-12-19 06:53:49 +00:00
efim
568fdd9a70 day18, example for part 2 2023-12-18 19:46:38 +00:00
efim
6dabe8bc66 day18, another example working 2023-12-18 19:37:17 +00:00
efim
86c9ad7653 day18, struggling 2023-12-18 14:50:51 +00:00
efim
4fc5caf228 day18: example optimized even more 2023-12-18 12:10:22 +00:00
efim
b831e92e1f day18: example optimized 2023-12-18 11:41:55 +00:00
efim
d799b122ce day18: reading instructions & hanging 2023-12-18 10:48:27 +00:00
efim
1d027d57fc day18, part1 2023-12-18 10:30:40 +00:00
efim
49fc57029f day18, example 2023-12-18 09:41:22 +00:00
efim
955bdc78c1 day17, example2 2023-12-17 13:58:34 +00:00
efim
a7e06e7a6e day17, part 1 wow. 2023-12-17 13:29:22 +00:00
efim
abca885f20 day17, so many tries for part 1 2023-12-17 13:28:18 +00:00
efim
81b8ddc8b0 day17, example attempt 2 2023-12-17 11:18:48 +00:00
efim
41e32d405b day17, example 2023-12-17 10:48:51 +00:00
efim
08c20ea6e0 day17, dijkstra doesn't work
right? just because if found a shorter path, doesn't mean i don't need
to check other directions.
i have, because maybe they are not blocked and will provide better
solution.
so, yikes
2023-12-17 09:22:31 +00:00
efim
ed4abd2d7e day16: part 2 2023-12-16 07:34:26 +00:00
efim
ee9c2c1ca0 day16, example two 2023-12-16 07:33:28 +00:00
efim
8436426d3a day16, part1 2023-12-16 07:11:15 +00:00
efim
cb973d60cc day16, example 2023-12-16 07:09:42 +00:00
efim
d09a8af5db day15, part2 2023-12-15 15:05:19 +00:00
efim
6e623b617e day15, example two 2023-12-15 15:04:50 +00:00
efim
bd45c334b0 day15, part1 2023-12-15 14:15:18 +00:00
efim
192ff3878e day15, example 2023-12-15 14:13:36 +00:00
efim
b5cb827be2 day14, part2 2023-12-14 11:57:31 +00:00
efim
149b753d22 day14, example two 2023-12-14 11:56:42 +00:00
efim
709f4c0532 day14, too slow. need memo? 2023-12-14 11:47:03 +00:00
efim
fde1415f34 day14, example 2023-12-14 11:30:03 +00:00
efim
3128ab673f day13, part2
had a bug in 'recording' the failed lines for Vertical.
debugged at second field in input, which was marking only 1 column as
having 1 smudge
2023-12-13 15:05:46 +00:00
efim
85dd9a31e3 day13, example two 2023-12-13 14:33:14 +00:00
efim
d4c162db33 day13, part1 2023-12-13 14:11:02 +00:00
efim
adcb2417ff day13, example 2023-12-13 14:09:58 +00:00
efim
4d346a23db day12, part2 2023-12-13 02:41:08 +00:00
efim
6398a4d468 day12, counts but too slow 2023-12-13 02:35:41 +00:00
efim
42b587918e day12, example part 2 2023-12-12 15:47:48 +00:00
efim
c187a03076 day12, part1 2023-12-12 15:33:56 +00:00
efim
cdf5a38512 day12, example optimized 2023-12-12 15:07:31 +00:00
efim
3a43f90431 day12, example 2023-12-12 15:05:09 +00:00
efim
7032666476 day11, part2 2023-12-11 11:23:06 +00:00
efim
65d6c13016 day1, example 2023-12-11 11:15:19 +00:00
efim
37ee3e99da day10, part2 2023-12-10 17:49:26 +00:00
efim
69cf6b9aaf day10, example last 2023-12-10 17:44:47 +00:00
efim
71262cabe7 day10, still not working 2023-12-10 17:03:40 +00:00
efim
742786af47 day10, doesn't work again.
pipes go into filling all 3x3 and all is lost
2023-12-10 15:54:01 +00:00
efim
e4afe55a1f day10, All Is Broken 2023-12-10 14:47:40 +00:00
efim
0b6c521b5b day10, marking the path 2023-12-10 12:39:41 +00:00
efim
76be5c45c3 day10, part1 2023-12-10 12:07:32 +00:00
efim
1626bd0be9 day10, example1 2023-12-10 12:03:57 +00:00
efim
c2091b49fd day10, reading & printing 2023-12-10 09:01:59 +00:00
efim
3919a70d09 day9, part2 2023-12-09 16:16:18 +00:00
efim
162d0d9ebf day9, part1 2023-12-09 15:59:51 +00:00
efim
554a3cb389 day9, example 2023-12-09 15:57:51 +00:00
efim
77d7355eb7 day8, part2 2023-12-09 06:39:43 +00:00
efim
3300a8bf3e day8, example3 new approach 2023-12-08 09:53:26 +00:00
efim
dea9d15c66 day8, example3 2023-12-08 07:18:59 +00:00
efim
414757f3ea day8, part1 2023-12-08 07:00:39 +00:00
efim
f0c7f9815e day8, example 2023-12-08 06:57:24 +00:00
efim
d959473a29 day7, part2 2023-12-07 07:37:43 +00:00
efim
28aef4c64c day7, input 2023-12-07 07:00:18 +00:00
efim
b112460875 day7, example 2023-12-07 06:57:20 +00:00
efim
85c2000591 day6, part2 2023-12-06 12:59:20 +00:00
efim
06b555d7f9 day6, part1 2023-12-06 12:47:37 +00:00
efim
a7d13ce84f day6, example 2023-12-06 12:46:02 +00:00
efim
7e6a543790 day5: parallelized 2023-12-05 11:37:55 +00:00
efim
7dbc09d3f8 day5, part2 2023-12-05 11:21:23 +00:00
efim
02abc9d0e8 day5, part2. making map efficient 2023-12-05 09:50:19 +00:00
efim
cebdd72171 day5, example 2023-12-05 09:33:03 +00:00
efim
e05226f6aa day4, part2. here come off-by-one errors 2023-12-04 09:14:51 +00:00
efim
3253a8f963 day4, example 2023-12-04 08:18:34 +00:00
130 changed files with 12701 additions and 1249 deletions

2
.gitignore vendored
View File

@@ -1,2 +1,4 @@
/.direnv/
/.go
input
*.png

1000
day1/input

File diff suppressed because it is too large Load Diff

450
day10/dayTen.go Normal file
View File

@@ -0,0 +1,450 @@
package day10
import (
"errors"
"fmt"
"log"
"os"
"slices"
"strings"
)
func Run() int {
fmt.Println("hello day 10")
// filename := "day10/example2noisy"
filename := "day10/input"
fieldMap := Read(filename)
fmt.Println(fieldMap.BeastCoord)
// fmt.Println(fieldMap.String())
// fmt.Printf("%+v\n", fieldMap.Cells)
// log.Printf(">> does Equals work? {1,2} == {1,2} is %t\n", (Coord{1,2} == Coord{1,2}))
// log.Printf(">> does Index work? {1,2} in [{2,2}, {1,2}] is %d \n", slices.Index([]Coord{{2,2}, {1,2}}, Coord{1,2}))
// fieldMap.checkDirectionFromBeast(Coord{1,2})
beastNeighborCoords := fieldMap.Cells[fieldMap.BeastCoord].Neighbords
// len := 0
for _, coord := range beastNeighborCoords {
log.Printf("checking neighbor %v\n", coord)
isCycle, _ := fieldMap.checkDirectionFromBeast(coord)
if isCycle {
log.Printf("found cycle through %v\n", coord)
// len = curLen
break
}
}
// fmt.Println("beore marking:")
fieldMap.markMainLoop()
// fmt.Println("after marking loop:")
// fmt.Println(fieldMap.String())
fmt.Println("beore marking closest Outer:")
// now main loop is closed with regards to 'S' neighbors
// TODO Hardcode change S to correct Title
fixedBeast := fieldMap.Cells[fieldMap.BeastCoord]
// fmt.Printf("figuring out how to fix beast:\n%+v", fixedBeast)
fixedBeast.Tile = '|'
fieldMap.Cells[fieldMap.BeastCoord] = fixedBeast
fieldMap.countIntersectionsTopDown()
// fieldMap.initialMarkOuter()
fmt.Println("after marking closest Outer:")
fmt.Println(fieldMap.String())
// now let's coiunt the inner
result := 0
for _, cell := range fieldMap.Cells {
if !cell.IsOnMainPath && !cell.IsOuter {
result += 1
}
}
// prevResult := (len / 2) + (len % 2)
return result
}
// so do i work with just [][]rune ?
// func Next(from Coord, through Coord) (Coord, error) ?
// and here check that 'from' has exit into 'through'
// and check that 'through' has entrance from 'from'
// so, i guess i could do 'exit direction' and 'entrance direction'
// then compare 'exit direction' with what's available on 'from'
//
// or i can just have function 'canExit(from, to Coord)' and canEnter(from, to Coord)
// i suppose it would be nice to just create Cell(Coord, Type) and
// cell would map 'from' to 'to'
type Cell struct {
Coord Coord
Tile rune
Neighbords []Coord
IsOnMainPath bool
IsOuter bool
}
func (c *Cell) String() string {
if c.Tile == 'S' {
return "S"
}
if c.IsOuter {
return "O"
}
if !c.IsOnMainPath && !c.IsOuter {
return "I"
}
switch c.Tile {
case '7':
return "⌝"
case 'J':
return "⌟"
case 'F':
return "⌜"
case 'L':
return "⌞"
case '.':
return " "
default:
return string(c.Tile)
}
}
type Coord struct {
X, Y int
}
func (c Coord) Equal(other Coord) bool {
return c.X == other.X && c.Y == other.Y
}
type Direction int
const (
UP Direction = iota
DOWN
LEFT
RIGHT
)
func (d Direction)String() string {
names := []string{"UP", "DOWN", "LEFT", "RIGHT"}
return names[d]
}
func (c Coord) Shift(d Direction) Coord {
x, y := c.X, c.Y
result := Coord{}
switch d {
case UP:
result = Coord{x, y - 1}
case DOWN:
result = Coord{x, y + 1}
case LEFT:
result = Coord{x - 1, y}
case RIGHT:
result = Coord{x + 1, y}
}
return result
}
type Map struct {
Cells map[Coord]Cell
Height, Width int
BeastCoord Coord
}
func (m *Map) String() string {
result := ""
result += fmt.Sprintf("map of height %d and with %d\n", m.Height, m.Width)
for y := 0; y < m.Height; y++ {
for x := 0; x < m.Width; x++ {
cell := m.Cells[Coord{x, y}]
result += cell.String()
}
result += "\n"
}
return result
}
func (m *Map) markMainLoop() {
start := m.Cells[m.BeastCoord]
start.IsOnMainPath = true
m.Cells[m.BeastCoord] = start
previous := start
currentCell := m.Cells[previous.Neighbords[0]]
// log.Printf("starting marking of main loop from %+v through %+v\n", start, currentCell)
for currentCell.Tile != 'S' {
currentCell.IsOnMainPath = true
m.Cells[currentCell.Coord] = currentCell
// log.Printf("marking loop on %+v (%s)\n", currentCell, currentCell.String())
nextCoord, _, err := currentCell.Next(previous.Coord)
// log.Printf("next coord will be %v %s\n", nextCoord, err)
if err != nil {
return
}
previous = currentCell
currentCell = m.Cells[nextCoord]
}
}
func (m *Map) initialMarkOuter() {
// for start point let's take my highest on main path and one above
// and will have a runner pointer to the cell on the outside
var pathCunner Cell
outer:
for y := 0; y < m.Height; y++ {
for x := 0; x < m.Width; x++ {
if cell := m.Cells[Coord{x, y}]; cell.IsOnMainPath {
pathCunner = cell
m.markOuter(Coord{x, y - 1})
break outer
}
}
}
startPoint := pathCunner
previous := startPoint
firstDirection := startPoint.OutDirections()[0]
nextCoord := previous.Coord.Shift(firstDirection)
currentCell := m.Cells[nextCoord]
for currentCell.Coord != startPoint.Coord {
// looping once. and need to operae on the outer runner
// and i don't have the direction? well, i guess i could use direction
// outerRunner = m.markOuterAndMove(previous, outerRunner, exitingPreviousBy)
// m.markOuterAroundPathElem(currentCell)
var err error
nextCoord, _, err = currentCell.Next(previous.Coord)
if err != nil {
panic("initial mark cycle can't get next")
}
previous = currentCell
currentCell = m.Cells[nextCoord]
}
}
func (m *Map) markOuter(outerPointerCoord Coord) {
if !m.isValidCoord(outerPointerCoord) {
log.Printf("non valid %+v to mark as Outer", outerPointerCoord)
return
}
outerPointer := m.Cells[outerPointerCoord]
if outerPointer.IsOnMainPath {
return
}
outerPointer.IsOuter = true
m.Cells[outerPointer.Coord] = outerPointer
}
// call for each direction from beast.
// will run the path until it loops back at best, or terminates
func (m *Map) checkDirectionFromBeast(through Coord) (isCycle bool, len int) {
// defer log.Printf("about to return check from beast %v, isCycle : %t. len is %d", through, isCycle, len)
len = 1
previous := m.Cells[m.BeastCoord]
currentCell, found := m.Cells[through]
// log.Printf("check direction init for %+v\n", currentCell)
for found && currentCell.Tile != 'S' {
// log.Printf("check direction loop for %+v (%s)\n", currentCell, currentCell.String())
len += 1
nextCoord, _, err := currentCell.Next(previous.Coord)
// log.Printf("next coord will be %v %s\n", nextCoord, err)
if err != nil {
return
}
previous = currentCell
currentCell, found = m.Cells[nextCoord]
}
if currentCell.Tile == 'S' {
// log.Printf("found cycle, len is %d\n", len)
isCycle = true
// let's close the loop now.
beastCell := m.Cells[m.BeastCoord]
beastCell.Neighbords = []Coord{previous.Coord, through}
m.Cells[m.BeastCoord] = beastCell
// log.Printf("cells are not %+v", m.Cells)
}
return
}
func (m *Map) isValidCoord(c Coord) bool {
if c.X < 0 || c.Y < 0 || c.X >= m.Width || c.Y >= m.Height {
return false
}
return true
}
func Read(filename string) Map {
result := Map{}
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("cannot read file ", filename))
}
text := string(bytes)
text = strings.TrimSpace(text)
lines := strings.Split(text, "\n")
result.Height = len(lines)
result.Width = len(lines[0])
result.Cells = map[Coord]Cell{}
for y, line := range lines {
for x, symb := range line {
coord := Coord{X: x, Y: y}
if symb == 'S' {
result.BeastCoord = coord
}
cell := Cell{
Coord: coord,
Tile: symb,
}
cell.Neighbords = cell.GetNeighbors()
result.Cells[coord] = cell
}
}
return result
}
func (c *Cell) GetNeighbors() []Coord {
result := make([]Coord, 0)
for _, direction := range c.OutDirections() {
result = append(result, c.Coord.Shift(direction))
}
return result
}
// doesn't check whether 'from' has exit into c
// only whether c can accept conntion from that direction
// - check if 'from' is in neighbors
// if it is - then take another neighbor
// wouldn't work for 'S' but we don't need it to
func (c *Cell) Next(from Coord) (to Coord, cameFrom Direction, err error) {
if len(c.Neighbords) != 2 {
return Coord{}, 0, errors.New(fmt.Sprintf("not 2 neighbors: cannot get next from %v through %c", from, c))
}
i := slices.Index(c.Neighbords, from)
if i == -1 {
return Coord{}, 0, errors.New(fmt.Sprintf("cannot find next from %v through %+v", from, c))
}
var nextDirection Direction
for _, direction := range c.OutDirections() {
if c.Coord.Shift(direction) != from {
nextDirection = direction
}
}
otherIndex := 1 - i
return c.Neighbords[otherIndex], nextDirection, nil
}
// x from left to right; y from top to bottom
func (c *Cell) OutDirections() []Direction {
switch c.Tile {
case '|':
return []Direction{UP, DOWN}
case '-':
return []Direction{LEFT, RIGHT}
case 'L':
return []Direction{UP, RIGHT}
case 'J':
return []Direction{UP, LEFT}
case 'F':
return []Direction{RIGHT, DOWN}
case '7':
return []Direction{LEFT, DOWN}
case 'S': // all
return []Direction{UP, DOWN, LEFT, RIGHT}
default:
return []Direction{}
}
}
func (m *Map)countIntersectionsTopDown() {
stacks := make([][]rune, m.Width) // stack for each X
for y := 0; y < m.Height; y++ {
for x := 0; x < m.Width; x++ {
stack := stacks[x]
len := len(stack)
cell := m.Cells[Coord{x, y}]
if cell.IsOnMainPath {
if len == 0 {
stack = append(stack, cell.Tile)
} else {
top := stack[len-1]
if isOpposite(top, cell.Tile) {
stack = stack[:len-1]
} else if isScrunching(top, cell.Tile) {
stack = stack[:len-1]
stack = append(stack, '-')
stack, _ = popTwoHorizontals(stack)
} else if cell.Tile != '|' {
stack = append(stack, cell.Tile)
}
}
stacks[x] = stack
} else {
if len == 0 {
m.markOuter(cell.Coord)
}
}
}
}
for x := 0; x < m.Width; x++ {
stack := stacks[x]
fmt.Println(string(stack))
}
fmt.Print(stacks)
}
func popTwoHorizontals(stack []rune) ([]rune, bool) {
len := len(stack)
if len >= 2 {
top := stack[len-1]
prev := stack[len-2]
if top == '-' && prev == '-' {
return stack[:len-2], true
}
}
return stack, false
}
func isScrunching(pipe, other rune) bool {
switch pipe {
case 'F':
return other == 'J'
case '7':
return other == 'L'
}
return false
}
func isOpposite(pipe, other rune) bool {
switch pipe {
case '-':
return other == '-'
case 'L':
return other == 'F'
case 'J':
return other == '7'
case 'F':
return other == 'L'
case '7':
return other == 'J'
}
return false
}

5
day10/example1 Normal file
View File

@@ -0,0 +1,5 @@
.....
.S-7.
.|.|.
.L-J.
.....

5
day10/example2noisy Normal file
View File

@@ -0,0 +1,5 @@
-L|F7
7S-7|
L|7||
-L-J|
L|-JF

5
day10/example3 Normal file
View File

@@ -0,0 +1,5 @@
..F7.
.FJ|.
SJ.L7
|F--J
LJ...

5
day10/example4noisy Normal file
View File

@@ -0,0 +1,5 @@
7-F7-
.FJ|7
SJLL7
|F--J
LJ.LJ

7
day10/example5 Normal file
View File

@@ -0,0 +1,7 @@
.......
...F7..
..FJ|..
.SJ.L7.
.|F--J.
.LJ....
.......

10
day10/example6 Normal file
View File

@@ -0,0 +1,10 @@
FF7FSF7F7F7F7F7F---7
L|LJ||||||||||||F--J
FL-7LJLJ||||||LJL-77
F--JF--7||LJLJ7F7FJ-
L---JF-JLJ.||-FJLJJ7
|F|F-JF---7F7-L7L|7|
|FFJF7L7F-JF7|JL---7
7-L-JL7||F7|L7F-7F7|
L.L7LFJ|||||FJL7||LJ
L7JLJL-JLJLJL--JLJ.L

168
day10/notes.org Normal file
View File

@@ -0,0 +1,168 @@
#+title: Notes
* part 2.
how would i even try to check of 'in' or 'out' of the loop?
i guess i could mark things while i go through the loop?
or, i could count manually?
i could change neighbors of the beast point.
then do one more pass around, setting bool 'onMainPath' to true
and then change all else to .
i guess i could then do one more pass, and mark as 'I' and 'O' closest line of .
another idea i don't really want to pursue - 'color fill from some corner'
this is also painful due to pipes touching still need to let the 'O' filling in.
but yeah, if i make initial filling in of the I, then i could just fill in all . with I until nothing changes.
and count I
sounds like a plan
** allright, i found main path, display all not on main path as ' '
and the thing is a mess
i don't know how to select a point to mark as 'I'
but! if i select a point to mark as 'O'
and then go around the main path, marking things on the side as 'O'
then i would be able to fill in all which are neighbors of 'O' as 'O'
and that would leave only 'I' to be counted
so.
how would i monitor consistent 'side' of the pipe during the walkhrough?
if we go down - we color as 'O' one down.
if we go angle - color two - one down and one to the right.
and only color if it's not already on main path.
i suppose so
** so a new method for initial pass of 'O'
well, i'll need access to the direction, in which the neighbor is taken?
nooo. not direction, but the symbol. ok, this is easier
but i'll need to mutate the field
no, i do need direction.
ok, let's go lunch maybe, and maybe it will be a place with a power outlet as well
** all is bad.
i guess my model is unfit.
what if. i had only 'forward' movement,
and then Next would sometimes return 'forward', and sometimes 'right', 'forward'
then.
i'll need to store current directoin,
have function to rotate it.
and do 'forward' on coords based on the current direction
and that would be NextPathCell()
but from same how would i distinguish between being on top or bottom for -J
i'll need to remember whether outer is to the 'left' or 'right' of the path
and then if path turns into outer - just turn, don't do 'forward'
and if path torns out of outer - do "big turn"
maybe there's an easy check here?
if path
** and one more idea.
i could do without 'walking' the outer pointer.
i could mark all outer around the path element.
based on where the outer is located
and there are two possibilities.
i could even maybe mark them as 3x3
i stay on the path element.
i check what are the 'O' in it's 3x3
and fill in other of same area as 'O' and move on to next
yeah, i think i like that.
i will have a function that takes the path element.
select the template
then go through 3x3 around the path element, if sees element with .isOuter == true saves the name of the area,
then for each found out area mark them all as 'O'
AAA
.-.
BBB
A.B
A|B
A.B
A.B
A⌞.
AAA
B.A
.⌟A
AAA
AAA
.⌝A
B.A
AAA
A⌜.
A.B
i guess i could just code that.
** it also doesn't work. pipes to into place where whole 3x3 is path pipes and info on 'outer' is lost
** will it work to cound intersections between a point and Top \ Left direction?
** hypothesis - from the point, count up, down, left & right - intersections with the golden path.
if into U&D or L&R both are odd - then inside.
short circuit - if into 1 direction number is 0 - definitely outside
and that's square? could kube? well, there shouldn't be too many points without the pipe already
maybe i can somehow count for the whole map?
if element on path - increase by 1 value from top
*** also, what if i could consturct couter example before i start coding, you know
with the point that is outside, but has odd in two sides?
yes, i think there's a counter example with a point which has 3 on all sides because of path going directly to that side
but maybe we don't count that as 'crossing'? only if we what? cross non-parallel line?
that could be it
** ok, last idea, yes, count intersections
one additonal [][]int
from top to bottom. take value from top, if it's pipe (and doesn't have down movemnt) increase by 1
if it's not on path and has Even number, already count as Outer
** oh, come on .
7
|
L is 1
but
7
|
J is 0
how to account for that?
monitor the left and right somehow.
have another map, store left or right in previous cell if we are in top thingy?
if we're on path, and previous is empty - store left or right?
so, copy previous
and if we're counting up - store nothing.
or just monitor angles?
maybe have stack? of seen path elements?
and some elements take off?
** ok, one more attempt?
not counting, but maintaining a stack?
and opposite angles take the element off the stack?
so - takes off -
and any left braket takes off any right bracket
and two bottom angles shouldn't be allowed to counteract one another, but top & bottom should be balanced anyway
** uh. so many more updates.
and i'll need to owerwrite S to it's value somehow
i will change the S myself. by hardcode

109
day11/dayEleven.go Normal file
View File

@@ -0,0 +1,109 @@
package day11
import (
"fmt"
"log"
"os"
"strings"
)
func Run() int64 {
fmt.Println("hello day 11")
filename := "day11/input"
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("cannot read file ", filename))
}
text := strings.TrimSpace( string(bytes) )
starCoords := make([]StarCoord, 0)
lines := strings.Split(text, "\n")
nonEmptyRows := make(map[int]any)
nonEmptyCols := make(map[int]any)
for rowNum, line := range lines {
for colNum, symb := range line {
if symb == '#' {
starCoords = append(starCoords, StarCoord{rowNum, colNum})
nonEmptyCols[colNum] = struct{}{}
nonEmptyRows[rowNum] = struct{}{}
}
}
}
emptyRowsAbove := make([]int, len(lines))
emptyColsToTheLeft := make([]int, len(lines))
for rowNum, _ := range lines {
if rowNum > 0 {
emptyRowsAbove[rowNum] = emptyRowsAbove[rowNum-1]
}
_, isRowNonempty := nonEmptyRows[rowNum]
if !isRowNonempty {
emptyRowsAbove[rowNum] += 1
}
}
for colNum, _ := range lines[0] {
if colNum > 0 {
emptyColsToTheLeft[colNum] = emptyColsToTheLeft[colNum-1]
}
_, isColNonempty := nonEmptyCols[colNum]
if !isColNonempty {
emptyColsToTheLeft[colNum] += 1
}
}
var distanceSum int64
for i := 0; i < len(starCoords); i++ {
for j := i+1; j < len(starCoords); j++ {
// calc distance between stars i and j
starA := starCoords[i]
starB := starCoords[j]
maxRow := starA.Row
minRow := starB.Row
if maxRow < minRow {
maxRow, minRow = minRow, maxRow
}
var multiplier int64
multiplier = 1000000 - 1
emptyRowsBetween := int64(emptyRowsAbove[maxRow]) - int64(emptyRowsAbove[minRow])
rowDistance := int64(maxRow) - int64(minRow) + emptyRowsBetween*multiplier
maxCol := starA.Col
minCol := starB.Col
if maxCol < minCol {
maxCol, minCol = minCol, maxCol
}
emptyColsBetween := int64(emptyColsToTheLeft[maxCol]) - int64(emptyColsToTheLeft[minCol])
colDistance := int64(maxCol) - int64(minCol) + emptyColsBetween*multiplier
distance := rowDistance + colDistance
log.Printf("between stars %d %+v and %d %+v distance is %d. emptyColsBetween %d ; emptyRowsBetween %d\n", i, j, starA, starB, distance, emptyColsBetween, emptyRowsBetween)
distanceSum += int64(distance)
}
}
// oh, i have list of all stars, i can just iterate over them and
// only keep rowNums for which there are stars. yeah
fmt.Println(starCoords)
fmt.Println(emptyRowsAbove)
fmt.Println(emptyColsToTheLeft)
return distanceSum
}
type StarCoord struct {
Row, Col int
}

10
day11/example Normal file
View File

@@ -0,0 +1,10 @@
...#......
.......#..
#.........
..........
......#...
.#........
.........#
..........
.......#..
#...#.....

184
day12/dayTwelve.go Normal file
View File

@@ -0,0 +1,184 @@
package day12
import (
"fmt"
"log"
"os"
"strconv"
"strings"
"sync"
"time"
)
func Run() int {
fmt.Println("hello day 12.")
start := time.Now()
filename := "day12/input"
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprintf("error reading file %s\n", filename))
}
result := 0
text := string(bytes)
text = strings.TrimSpace(text)
// testMask, testBlocks := ReadLine(".??..??...?##. 1,1,3")
// blocksSum := 0
// for _, block := range testBlocks {
// blocksSum += block
// }
// testVariants := generatePermutations("", len(testMask), testBlocks, blocksSum, testMask)
// fmt.Printf("for mask %s and blocks %+v\n", testMask, testBlocks)
// for _, variant := range testVariants {
// fmt.Println(variant)
// }
var wg sync.WaitGroup
lines := strings.Split(text, "\n")
wg.Add(len(lines))
matches := make(chan int)
go func() {
wg.Wait()
close(matches)
}()
for i, line := range lines {
go func(line string, lineNum int){
mask, blockLengs := ReadLine(line)
blockLengthSum := 0
for _, blockLen := range blockLengs {
blockLengthSum += blockLen
}
memo := make(map[string]int)
variantsCount := generatePermutations("", len(mask), blockLengs, blockLengthSum, mask, memo)
log.Printf("%d : for line %s blocks %+v matches %d\n", lineNum, mask, blockLengs, variantsCount)
matches <- variantsCount
wg.Done()
}(line, i)
}
num := 0
for match := range matches {
num += 1
result += match
log.Printf("%d. intermediate: %d\n", num, result)
fmt.Printf("%d\n", result)
}
end := time.Now()
diff := end.Sub(start)
log.Printf("> calculated for %s", diff.String())
return result
}
func myRepeat(line, sep string, amount int) string {
acc := ""
for i := 0; i < amount; i++ {
acc += sep
acc += line
}
acc, _ = strings.CutPrefix(acc, sep)
return acc
}
// ???.### 1,1,3
func ReadLine(line string) (string, []int) {
firstSplit := strings.Split(line, " ")
if len(firstSplit) != 2 {
panic(fmt.Sprintf("error splitting %s into 2", line))
}
mask := firstSplit[0]
mask = myRepeat(mask, "?", 5)
blocks := firstSplit[1]
blocks = myRepeat(blocks, ",", 5)
// log.Printf(">> repeating blocks %s", blocks)
blockLengthStrings := strings.Split(blocks, ",")
blockLengs := make([]int, len(blockLengthStrings))
for i, blockLenStr := range blockLengthStrings {
num, err := strconv.Atoi(blockLenStr)
if err != nil {
panic(fmt.Sprintf("error extracting num %s from %s\n", blockLenStr, line))
}
blockLengs[i] = num
}
return mask, blockLengs
}
func generatePermutations(curString string, targetLength int, blockLengths []int, blockLengthsSum int, mask string, memo map[string]int) int {
memoKey := fmt.Sprintf("%+v|%d", blockLengths, len(curString))
memoized, memoFound := memo[memoKey]
if memoFound {
return memoized
}
// fmt.Printf("> entering with \n%s\nfor map \n%s\n\n", curString, mask)
// time.Sleep(time.Second)
if !isVariantMatchesMask(curString, mask) {
return 0
}
// log.Printf("> entering with %s\n", curString)
if len(blockLengths) == 0 {
if len(curString) > targetLength {
return 0
}
variant := curString + strings.Repeat(".", targetLength-len(curString))
if !isVariantMatchesMask(variant, mask) {
return 0
}
memo[memoKey] = 1
return 1
}
nextBlock := blockLengths[0]
restBlocks := blockLengths[1:]
if len(curString) + blockLengthsSum + len(blockLengths) - 1 > targetLength {
return 0
}
isLast := len(restBlocks) == 0
rightPointRepeat := 1
if isLast {
rightPointRepeat = 0
}
whenPass := curString + "."
whenAdd := curString + strings.Repeat("#", nextBlock) + strings.Repeat(".", rightPointRepeat)
passCount := generatePermutations(whenPass, targetLength, blockLengths, blockLengthsSum, mask, memo)
addCount := generatePermutations(whenAdd, targetLength, restBlocks, blockLengthsSum-nextBlock, mask, memo)
memo[memoKey] = passCount + addCount
return passCount + addCount
}
func isVariantMatchesMask(variant, mask string) bool {
if len(mask) < len(variant) {
log.Printf("mask %s is less than variant %s\n", mask, variant)
}
maskRunes := []rune(mask)
for i, symb := range variant {
if maskRunes[i] == '?' {
continue
}
if maskRunes[i] != symb {
return false
}
}
return true
}

6
day12/example Normal file
View File

@@ -0,0 +1,6 @@
#.#.### 1,1,3
.#...#....###. 1,1,3
.#.###.#.###### 1,3,1,6
####.#...#... 4,1,1
#....######..#####. 1,6,5
.###.##....# 3,2,1

6
day12/example1 Normal file
View File

@@ -0,0 +1,6 @@
???.### 1,1,3
.??..??...?##. 1,1,3
?#?#?#?#?#?#?#? 1,3,1,6
????.#...#... 4,1,1
????.######..#####. 1,6,5
?###???????? 3,2,1

104
day12/notes.org Normal file
View File

@@ -0,0 +1,104 @@
#+title: Notes
* i guess let's generate all possible? and short circuit when they are not matching mask
how do i generate all possible?
i take length of the mask, that's max size
then for each step, either put . or put n # from the input.
add to current string, and do 2 recursive calls, one with diminished 'queue', one with same
* wrong answer on input
it's too high
and log shows:
2023/12/12 15:07:52 for line ???#?.?#?#.?#???#..? blocks [4 4 5 1] matches 2
and it should be 0
** huh, nope this looks good:
testMask := "???#?.?#?#.?#???#..?"
testBlocks := []int{4,4,5,1}
testVariants := generatePermutations("", len(testMask), testBlocks, 14, testMask)
fmt.Printf("for mask %s and blocks %+v\n", testMask, testBlocks)
fmt.Println(testVariants)
for mask ???#?.?#?#.?#???#..? and blocks [4 4 5 1]
[####..####..#####..# .####.####..#####..#]
** let's check this : for line ??????#???????? blocks [7 2] matches 21
** or this for line ?????.??#????? blocks [3 3 2 1] matches 3
looks ok
** this for line ??..??#?????#?##? blocks [1 1 1 1 4] matches 15
looks good
** for line ?#??#??#???.??.??.? blocks [1 2 3 1 1 1] matches 20
seems ok
** for line ???????#??.????####? blocks [1 1 1 1 1 6] matches 58
bingo?
for mask ???????#??.????####? and blocks [1 1 1 1 1 6]
#.#.#..#.#.######...
#.#.#..#.#..######..
#.#.#..#.#...######.
#.#.#..#.#....######
#.#.#..#...#.######.
#.#.#..#...#..######
#.#.#..#....#.######
#.#..#.#.#.######...
#.#..#.#.#..######..
#.#..#.#.#...######.
#.#..#.#.#....######
#.#..#.#...#.######.
#.#..#.#...#..######
#.#..#.#....#.######
#.#....#.#.#.######.
#.#....#.#.#..######
#.#....#.#..#.######
#..#.#.#.#.######...
#..#.#.#.#..######..
#..#.#.#.#...######.
#..#.#.#.#....######
#..#.#.#...#.######.
#..#.#.#...#..######
#..#.#.#....#.######
#..#...#.#.#.######.
#..#...#.#.#..######
#..#...#.#..#.######
#...#..#.#.#.######.
#...#..#.#.#..######
#...#..#.#..#.######
#....#.#.#.#.######.
#....#.#.#.#..######
#....#.#.#..#.######
.#.#.#.#.#.######...
.#.#.#.#.#..######..
.#.#.#.#.#...######.
.#.#.#.#.#....######
.#.#.#.#...#.######.
.#.#.#.#...#..######
.#.#.#.#....#.######
.#.#...#.#.#.######.
.#.#...#.#.#..######
.#.#...#.#..#.######
.#..#..#.#.#.######.
.#..#..#.#.#..######
.#..#..#.#..#.######
.#...#.#.#.#.######.
.#...#.#.#.#..######
.#...#.#.#..#.######
..#.#..#.#.#.######.
..#.#..#.#.#..######
..#.#..#.#..#.######
..#..#.#.#.#.######.
..#..#.#.#.#..######
..#..#.#.#..#.######
...#.#.#.#.#.######.
...#.#.#.#.#..######
...#.#.#.#..#.######
* well, maybe overnight will calculate.
but i guess i needed to check whether blocks are 'always' taking full width
then i'll only need to calculate once, and then multiply
** for example
2023/12/12 20:40:41 699 : for line ??#?????#???.? ???#?????#???.????#?????#???.????#?????#???.????#?????#???.? blocks [3 1 2 1 3 1 2 1 3 1 2 1 3 1 2 1 3 1 2 1] matches 38294856
??#?? ???#?? ?.?
3,1,2,1 - 10+3 = 13
lowest s ###.#.##.#..... - plenty of space for additional

226
day13/dayThirteen.go Normal file
View File

@@ -0,0 +1,226 @@
package day13
import (
"fmt"
"log"
"os"
"strings"
)
func Run() int {
filename := "day13/input"
fmt.Println("hello day 13.", filename)
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprintf("error reading file %s", filename))
}
allText := string(bytes)
fieldTexts := strings.Split(allText, "\n\n")
result := 0
for _, fieldText := range fieldTexts {
field := ReadField(fieldText)
result += Calc(field)
}
return result
}
func Calc(field Field) int {
verticals, horizontals := field.initMirrors()
fmt.Println(field.String())
fmt.Printf("field width %d and height %d\n", len(field.Symbols[0]), len(field.Symbols))
for rowNum, row := range field.Symbols {
for colNum, symb := range row {
for _, horizontalMirrorUnderCheck := range horizontals {
// if horizontalMirrorUnderCheck.Smaller != 4 {
// continue
// }
mirroredRow, shouldCheck := horizontalMirrorUnderCheck.reflectCoord(rowNum)
// log.Println("for mirror", horizontalMirrorUnderCheck.String())
// log.Printf("> checking row %d and mirrored %d; should %t\n", rowNum, mirroredRow, shouldCheck)
if shouldCheck {
// log.Printf("checking horizontal mirror %+v", horizontalMirrorUnderCheck)
// log.Printf("in should check for row %d, col %d, mirrored row %d\n", rowNum, colNum, mirroredRow)
mirroredSymb := field.Symbols[mirroredRow][colNum]
isMirrored := symb == mirroredSymb
if !isMirrored {
// log.Printf("found not mirrored : %s != %s\n", string(symb), string(mirroredSymb))
horizontalMirrorUnderCheck.FailedLineChecks[rowNum] += 1
}
}
}
}
// whole row got checked.
// let's mark successful line check for all that didn't fail this line check
for _, horizontalMirror := range horizontals {
_, failedCheckReported := horizontalMirror.FailedLineChecks[rowNum]
if !failedCheckReported {
horizontalMirror.SuccessfulLineChecks[rowNum] = struct{}{}
}
}
}
rowsAboveHorizontals := 0
for _, mirr := range horizontals {
fmt.Println("horizontal: ", mirr.String())
if mirr.isFullMirror() {
log.Printf(">> found perfect Horizontal %+v\n", mirr)
rowsAboveHorizontals += (mirr.Smaller + 1)
}
}
for colNum, _ := range field.Symbols[0] {
for rowNum, row := range field.Symbols {
symb := row[colNum]
for _, verticalMirrorUnderCheck := range verticals {
// if verticalMirrorUnderCheck.Smaller != 8 {
// continue
// }
mirroredCol, shouldCheck := verticalMirrorUnderCheck.reflectCoord(colNum)
if shouldCheck {
// log.Printf("checking vertical mirror %+v", verticalMirrorUnderCheck)
// log.Printf("in should check for row %d, col %d, mirrored col %d\n", rowNum, colNum, mirroredCol)
mirroredSymb := field.Symbols[rowNum][mirroredCol]
isMirrored := symb == mirroredSymb
if !isMirrored {
// log.Printf("found not mirrored : %s != %s\n", string(symb), string(mirroredSymb))
verticalMirrorUnderCheck.FailedLineChecks[colNum] += 1
}
}
}
}
// whole row got checked.
// let's mark successful line check for all that didn't fail this line check
for _, verticalMirror := range verticals {
_, failedCheckReported := verticalMirror.FailedLineChecks[colNum]
if !failedCheckReported {
verticalMirror.SuccessfulLineChecks[colNum] = struct{}{}
}
}
}
colsToLeftOfHorizontals := 0
for _, mirr := range verticals {
fmt.Println("vertical: ", mirr.String())
if mirr.isFullMirror() {
log.Printf(">> found perfect Vertical %+v\n", mirr)
colsToLeftOfHorizontals += (mirr.Smaller + 1)
}
}
result := colsToLeftOfHorizontals + 100*rowsAboveHorizontals
return result
}
type Field struct {
Symbols [][]rune
}
func ReadField(fieldText string) Field {
fieldText = strings.TrimSpace(fieldText)
lines := strings.Split(fieldText, "\n")
symbols := make([][]rune, len(lines))
for i, line := range lines {
symbols[i] = []rune(line)
}
return Field{
Symbols: symbols,
}
}
func (f *Field) String() string {
text := "\n"
for _, row := range f.Symbols {
text += string(row)
text += "\n"
}
return text
}
func (f *Field) initMirrors() (vertical []Mirror, horizontal []Mirror) {
height := len(f.Symbols)
width := len(f.Symbols[0])
amountHorizontal := height - 1
amountVertical := width - 1
horizontal = make([]Mirror, amountHorizontal)
vertical = make([]Mirror, amountVertical)
for rowNum := 0; rowNum < amountHorizontal; rowNum++ {
maxDist := min(rowNum, height - 1 - (rowNum+1))
// log.Println("maxDist ", maxDist, "for rowNum ", rowNum)
horizontal[rowNum] = Mirror{
Smaller: rowNum,
Bigger: rowNum + 1,
SuccessfulLineChecks: make(map[int]any),
FailedLineChecks: make(map[int]int),
MaxDistToCheck: maxDist,
}
}
for colNum := 0; colNum < amountVertical; colNum++ {
maxDist := min(colNum, width - 1 - (colNum+1))
vertical[colNum] = Mirror{
Smaller: colNum,
Bigger: colNum + 1,
SuccessfulLineChecks: make(map[int]any),
FailedLineChecks: make(map[int]int),
MaxDistToCheck: min(colNum, maxDist),
}
}
return
}
type Mirror struct {
// located between lines
Smaller, Bigger int
MaxDistToCheck int // how many steps from mirrow have to be checked to confirm
// i.e if mirror between 0 and 1 - only rows 0 & 1 have to be checked, row 2 is 'mirrored' outside of the field and 'ok'
// value 0 means one step from 'mirror' so rows 0 and 1
SuccessfulLineChecks map[int]any
FailedLineChecks map[int]int // from line num, to amount of errors in that line
}
func (m *Mirror)isFullMirror() bool {
correctFailedLinesCount := len(m.FailedLineChecks) == 2
if correctFailedLinesCount {
for failedLine, failedSymbols := range m.FailedLineChecks {
reflectedLine, _ := m.reflectCoord(failedLine)
doublyReflected, _ := m.reflectCoord(reflectedLine)
// log.Printf(">>>> checking failed line %d, reflected is %d; doubly %d. amount failed is %d\n", failedLine, reflectedLine, doublyReflected, failedSymbols)
if failedSymbols == 1 && (doublyReflected == failedLine) {
return true
}
}
}
return false
}
func (m *Mirror)String() string {
return fmt.Sprintf("Mirror (full %t) between %d and %d. successful lines: %+v ; failed lines: %+v. Max check dist: %d\n",
m.isFullMirror(), m.Smaller, m.Bigger, m.SuccessfulLineChecks, m.FailedLineChecks, m.MaxDistToCheck)
}
func (m *Mirror) reflectCoord(coord int) (reflected int, shouldCheck bool) {
dist := m.Smaller - coord
// _, distConfirmed := m.SuccessfulLineChecks[dist]
// if distConfirmed {
// // log.Printf("> getting dist confirmed for coord %d ; dist %d\n", coord, dist)
// return 0, false // either line already fully confirmed, or failed. no need for additional checks
// }
reflected = m.Bigger + dist
if dist < 0 {
dist = coord - m.Bigger
reflected = m.Smaller - dist
}
shouldCheck = dist <= m.MaxDistToCheck
return reflected, shouldCheck
}

15
day13/example Normal file
View File

@@ -0,0 +1,15 @@
#.##..##.
..#.##.#.
##......#
##......#
..#.##.#.
..##..##.
#.#.##.#.
#...##..#
#....#..#
..##..###
#####.##.
#####.##.
..##..###
#....#..#

7
day13/example1 Normal file
View File

@@ -0,0 +1,7 @@
#.##..##.
..#.##.#.
##......#
##......#
..#.##.#.
..##..##.
#.#.##.#.

7
day13/example2 Normal file
View File

@@ -0,0 +1,7 @@
#...##..#
#....#..#
..##..###
#####.##.
#####.##.
..##..###
#....#..#

15
day13/example3 Normal file
View File

@@ -0,0 +1,15 @@
.#..#..
.##.###
..####.
##.##.#
#.####.
#.#.##.
##.##.#
..####.
.##.###
.#..#..
##.....
#.###.#
##.....
##.....
#.###.#

17
day13/example4 Normal file
View File

@@ -0,0 +1,17 @@
...#...####...#..
.....##.##.##....
##....######....#
..#.##.#..#.##...
##.###.####.###.#
..###...##...###.
#####.##..##.####
#######....######
###...#.##.#...##
....###.##.###...
##.####.##.####.#
..###...##...###.
##.#.##....##.#.#
##..#.#....#.#..#
##.###.#..#.###.#
###.#...##...#.##
..####.####.####.

72
day13/notes.org Normal file
View File

@@ -0,0 +1,72 @@
#+title: Notes
* part 2 problems
making example 3 from first field of my input
.#..#..
.##.###
..####.
##.##.#
#.####.
#.#.##.
##.##.#
..####.
.##.###
.#..#..
##.....
#.###.#
##.....
##.....
#.###.#
* the mirror should be between 4 & 5
but my output is
horizontal: Mirror (full false) between 4 and 5. successful lines: map[0:{} 1:{} 2:{} 3:{} 4:{} 6:{} 7:{} 8:{} 9:{} 10:{} 11:{} 12:{} 13:{} 14:{}] ; failed lines: map[5:1]. Max check dist: 4
why is line 4 marked as successful?
** let's turn off verticals, and only look at checks for horizontal 4
** why do i have 'row 4, mirrored 0'?
because of 'should check false' i guess
** now example 3 works, but some other still don't find the mirror
* another example
error should be on line 2
...#...####...#..
.....##.##.##....
##....######....#
..#.##.#..#.##...
##.###.####.###.#
..###...##...###.
#####.##..##.####
#######....######
###...#.##.#...##
....###.##.###...
##.####.##.####.#
..###...##...###.
##.#.##....##.#.#
##..#.#....#.#..#
##.###.#..#.###.#
###.#...##...#.##
..####.####.####.
** deleting around (8,9)
.....
.....
##..#
..#..
##..#
..##.
#####
#####
#####
.....
##..#
..##.
##..#
##..#
##..#
#####
..##.
error should be (2, 3)
** let's only keep vertical, with Smaller 8
oh, there should be 'line 3, 1 error', but there also should be 'line 2, 1 error'
why don't we have this?

216
day14/dayFourteen.go Normal file
View File

@@ -0,0 +1,216 @@
package day14
import (
"fmt"
"log"
"os"
"strings"
)
func Run() int {
fmt.Println("hello day 14")
field := ReadPlatform("day14/input")
fmt.Println(field.String())
// fmt.Printf("> lines for field %+v\n", field.UpIndices())
// field.Move(field.Height(), field.UpIndices())
cycles := 1000000000
states := make(map[string]int)
// 2023/12/14 11:50:32 >>> found loop. known state after 10 equal to one after 3
var loopLen, initialStretch int
for i := 1; i <= cycles; i++ {
field.DoSpinCycle()
// fmt.Println(field.String())
stringRepr := field.String()
prevIter, known := states[stringRepr]
if known {
log.Printf(">>> found loop. known state after %d equal to one after %d", i, prevIter)
initialStretch = prevIter
loopLen = i - prevIter
break
}
states[stringRepr] = i
if i % 100000 == 0 {
log.Print("done ", i, " cycles")
}
}
// field is already in a 'loop' state.
// so we've already done 'initial stretch' so to make field in same state as after 'cycles'
// i only need to check rest of (cycles - initialStretch)
movesToMake := (cycles - initialStretch)%loopLen
log.Printf(">>> data: initial steps %d, loop len %d. to do same as %d iterations i need %d", initialStretch, loopLen, cycles, movesToMake)
for i := 1; i <= movesToMake; i++ {
field.DoSpinCycle()
// fmt.Println(field.String())
}
// north rock load
return field.NorthLoad()
}
const Rock rune = 'O'
const Wall rune = '#'
const Space rune = '.'
type Platform struct {
Rocks [][]rune
}
func (p *Platform) Height() int {
return len(p.Rocks)
}
func (p *Platform) Width() int {
return len(p.Rocks[0])
}
func ReadPlatform(filename string) Platform {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("cannot read file: ", filename))
}
text := string(bytes)
text = strings.TrimSpace(text)
lines := strings.Split(text, "\n")
rocks := make([][]rune, len(lines))
for i, line := range lines {
rocks[i] = []rune(line)
}
return Platform{
Rocks: rocks,
}
}
func (p *Platform) String() string {
text := "\n"
for _, row := range p.Rocks {
text += string(row)
text += "\n"
}
return text
}
type Coord struct{ Row, Col int }
// indices for moving UP, from down to up
func (p *Platform) UpIndices() [][]Coord {
lines := make([][]Coord, 0)
for col := 0; col < p.Width(); col++ {
line := make([]Coord, 0)
for row := 0; row < p.Height(); row++ {
line = append(line, Coord{Row: row, Col: col})
}
lines = append(lines, line)
}
return lines
}
// indices for moving DOWN, from up to down
func (p *Platform) DownIndices() [][]Coord {
lines := make([][]Coord, 0)
for col := 0; col < p.Width(); col++ {
line := make([]Coord, 0)
for row := p.Height() - 1; row >= 0; row-- {
line = append(line, Coord{Row: row, Col: col})
}
lines = append(lines, line)
}
return lines
}
// indices for moving RIGHT from right to left
func (p *Platform) RightIndices() [][]Coord {
lines := make([][]Coord, 0)
for row := 0; row < p.Height(); row++ {
line := make([]Coord, 0)
for col := p.Width() - 1; col >= 0; col-- {
line = append(line, Coord{Row: row, Col: col})
}
lines = append(lines, line)
}
return lines
}
// indices for moving LEFT, from left to right
func (p *Platform) LeftIndices() [][]Coord {
lines := make([][]Coord, 0)
for row := 0; row < p.Height(); row++ {
line := make([]Coord, 0)
for col := 0; col < p.Width(); col++ {
line = append(line, Coord{Row: row, Col: col})
}
lines = append(lines, line)
}
return lines
}
func (p *Platform) SymbAt(coord Coord) rune {
return p.Rocks[coord.Row][coord.Col]
}
func (p *Platform) SetSymbAt(coord Coord, symb rune) {
p.Rocks[coord.Row][coord.Col] = symb
}
func (p *Platform) Move(n int, lines [][]Coord) {
for _, line := range lines {
moveSize := 0
for i, coord := range line {
symb := p.SymbAt(coord)
switch symb {
case Space:
moveSize += 1
if moveSize > n {
moveSize = n
}
case Wall:
moveSize = 0
case Rock:
if moveSize == 0 {
continue
}
// get coord for moveSize back. and set that to 'o'
// and set current to '.'
// panic if that place is not '.' i guess
moveTo := line[i-moveSize]
symbAtTarget := p.SymbAt(moveTo)
if symbAtTarget != Space {
panic(fmt.Sprintf("attempting to move %+v to %+v, target symbol is %s, not '.'",
coord, moveTo, string(symbAtTarget)))
}
p.SetSymbAt(moveTo, Rock)
p.SetSymbAt(coord, Space)
}
}
}
}
func (p *Platform) NorthLoad() int {
total := 0
height := p.Height()
for i, row := range p.Rocks {
for _, symb := range row {
if symb == Rock {
total += (height - i)
}
}
}
return total
}
func (p *Platform) DoSpinCycle() {
// north, west, south, east - till the end
p.Move(p.Height(), p.UpIndices())
p.Move(p.Width(), p.LeftIndices())
p.Move(p.Height(), p.DownIndices())
p.Move(p.Width(), p.RightIndices())
}

10
day14/example Normal file
View File

@@ -0,0 +1,10 @@
O....#....
O.OO#....#
.....##...
OO.#O....O
.O.....O#.
O.#..O.#.#
..O..#O..O
.......O..
#....###..
#OO..#....

148
day15/dayFifteen.go Normal file
View File

@@ -0,0 +1,148 @@
package day15
import (
"fmt"
"log"
"os"
"regexp"
"slices"
"strconv"
"strings"
)
func Run() int {
fmt.Println("hello day 15")
log.Println("hello day 15")
filename := "day15/input"
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("error reading file ", filename))
}
text := string(bytes)
text = strings.TrimSpace(text)
instructions := strings.Split(text, ",")
result := 0
boxes := make([]Box, 256)
for i, box := range boxes {
box.Focals = make(map[string]int)
boxes[i] = box
}
for _, instructionStr := range instructions {
i := ReadInstruction(instructionStr)
box := boxes[i.Box]
box.Act(i)
boxes[i.Box] = box
// result += ASCIIStringHash(instruction)
}
for i, box := range boxes {
if len(box.Labels) != 0 {
log.Printf("%d box %+v final state\n", i, box)
}
result += (i + 1) * box.FocusingPower()
}
return result
}
type Box struct {
Labels []string
Focals map[string]int
}
func (b *Box)Act(i Instruction) {
log.Printf("for box %+v instruction \n%s\n", b, i.String())
switch i.Action {
case Put:
_, found := b.Focals[i.Label]
if !found {
b.Labels = append(b.Labels, i.Label)
}
b.Focals[i.Label] = i.LensFocal
case Remove:
_, found := b.Focals[i.Label]
if !found {
return
}
index := slices.Index(b.Labels, i.Label)
delete(b.Focals, i.Label)
b.Labels = slices.Delete(b.Labels, index, index+1)
}
log.Printf("result : %+v\n", b)
return
}
func (b *Box)FocusingPower() int {
result := 0
for i, label := range b.Labels {
result += (i + 1) * b.Focals[label]
}
return result
}
type Action rune
const (
Put Action = '='
Remove = '-'
)
type Instruction struct {
Label string
Box int
Action Action
LensFocal int
}
func (i *Instruction) String() string {
operation := ""
switch i.Action {
case Put:
operation = "put into"
case Remove:
operation = "remove from"
}
return fmt.Sprintf("%s\t\t%d of focal %d %s", operation, i.Box, i.LensFocal, i.Label)
}
func ReadInstruction(str string) Instruction {
result := Instruction{}
re := regexp.MustCompile(`(?P<label>\D+)(?P<operation>[=\-])(?P<focal>\d*)`)
// log.Println("in str ", str)
fields := re.FindStringSubmatch(str)
// log.Printf("in %s found %+v", str, fields)
operation := fields[2]
operationRune := []rune(operation)[0]
result.Action = Action(operationRune)
if operationRune == '=' {
focalStr := fields[3]
focal, err := strconv.Atoi(focalStr)
if err != nil {
panic(fmt.Sprint("error reading focal from ", str))
}
result.LensFocal = focal
}
result.Label = fields[1]
result.Box = ASCIIStringHash(result.Label)
return result
}
func ASCIIStringHash(str string) int {
result := 0
for _, symb := range str {
result += int(symb)
result *= 17
result %= 256
}
return result
}

1
day15/example Normal file
View File

@@ -0,0 +1 @@
rn=1,cm-,qp=3,cm=2,qp-,pc=4,ot=9,ab=5,pc-,pc=6,ot=7

10
day16/example Normal file
View File

@@ -0,0 +1,10 @@
.|...\....
|.-.\.....
.....|-...
........|.
..........
.........\
..../.\\..
.-.-/..|..
.|....-|.\
..//.|....

306
day16/floorWillBeLava.go Normal file
View File

@@ -0,0 +1,306 @@
package day16
import (
"fmt"
"log"
"math"
"os"
"strings"
"sync"
)
func Run() int {
fmt.Println("hello from day 16")
log.Println("starting")
filename := "day16/input"
field := ReadField(filename)
startPoints := StartPoints(&field)
var startPointsWaitGroup sync.WaitGroup
startPointsWaitGroup.Add(len(startPoints))
results := make(chan int)
go func() {
startPointsWaitGroup.Wait()
close(results)
}()
for _, start := range startPoints {
go func(start MovementPoint) {
cleanField := ReadField(filename)
cleanField.StartTraversal(start)
thisResult := cleanField.CountEnergized()
results <- thisResult
startPointsWaitGroup.Done()
}(start)
}
max := math.MinInt
for energized := range results {
if energized > max {
max = energized
log.Println("found new max: ", max)
}
}
// fmt.Println(field.String())
// field.StartTraversal()
// fmt.Println(field.ShowEnergyzed())
return max
}
func StartPoints(f *Field) []MovementPoint {
result := make([]MovementPoint, 0)
for rowNum, row := range f.cells {
result = append(result,
MovementPoint{Row: rowNum, Col: 0, Direction: Rightward},
MovementPoint{Row: rowNum, Col: len(row) - 1, Direction: Leftward})
}
for colNum, _ := range f.cells[0] {
result = append(result,
MovementPoint{Row: 0, Col: colNum, Direction: Downward},
MovementPoint{Row: len(f.cells) - 1, Col: colNum, Direction: Upward})
}
return result
}
// have shared field
// running traversal recursive function per ray
// exit if going out of field or visiting cell that already had light in this direction
// (i.e encountering loop)
type CellType rune
const (
Empty CellType = '.'
SplitterNS = '|'
SplitterEW = '-'
MirrorBackslash = '\\'
MirrorSlash = '/'
)
type Direction int
const (
Upward Direction = iota
Downward
Leftward
Rightward
)
type Cell struct {
CellType CellType
KnownBeams map[Direction]any
}
type Field struct {
cells [][]*Cell
}
func (f *Field) isValid(mp MovementPoint) bool {
if mp.Row < 0 || mp.Col < 0 {
return false
}
if mp.Row >= len(f.cells) || len(f.cells) == 0 || mp.Col >= len(f.cells[0]) {
return false
}
return true
}
func ReadField(filename string) Field {
result := Field{}
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("cannot read file: ", filename))
}
text := string(bytes)
text = strings.TrimSpace(text)
for _, line := range strings.Split(text, "\n") {
rowCells := make([]*Cell, 0)
for _, symb := range line {
rowCells = append(rowCells, &Cell{
CellType: CellType(symb),
KnownBeams: make(map[Direction]any),
})
}
result.cells = append(result.cells, rowCells)
}
return result
}
func (f *Field) String() string {
result := "\n"
for _, row := range f.cells {
for _, cell := range row {
result += string(cell.CellType)
}
result += "\n"
}
return result
}
func (f *Field) ShowEnergyzed() string {
result := "\n"
for _, row := range f.cells {
for _, cell := range row {
if len(cell.KnownBeams) > 0 {
result += "#"
} else {
result += string(cell.CellType)
}
}
result += "\n"
}
return result
}
type MovementPoint struct {
Row, Col int
Direction Direction
}
func (f *Field) StartTraversal(startPoint MovementPoint) {
reportedVisits := make(chan MovementPoint)
var wg sync.WaitGroup
go f.RecordVisits(reportedVisits)
wg.Add(1)
go f.TraverseFrom(startPoint, reportedVisits, &wg)
wg.Wait()
close(reportedVisits)
}
func (f *Field) CountEnergized() (result int) {
for _, row := range f.cells {
for _, cell := range row {
if len(cell.KnownBeams) > 0 {
result += 1
}
}
}
return
}
func (f *Field) RecordVisits(reportedPoints <-chan MovementPoint) {
for point := range reportedPoints {
cell := f.cells[point.Row][point.Col]
// log.Printf("recording visit %+v to %+v at row %d col %d\n", point, cell, point.Row, point.Col)
cell.KnownBeams[point.Direction] = struct{}{}
}
}
// starting at point, mark as visited
// move (concurrently if required) into next points
// ends - when out of the field, or if encountering a cycle
func (f *Field) TraverseFrom(current MovementPoint, reportVisits chan<- MovementPoint, wg *sync.WaitGroup) {
// log.Printf("> starting traverse through %+v", current)
if !f.isValid(current) {
log.Println("invalid current ", current, " should be impossible")
wg.Done()
return
}
cell := f.cells[current.Row][current.Col]
_, knownDirection := cell.KnownBeams[current.Direction]
if knownDirection {
// log.Printf("found cycle at %+v in %+v", current, cell)
wg.Done()
return
}
reportVisits <- current
nextPoints := NextPoints(f, current)
// log.Printf("for current %+v next are: %+v\n", current, nextPoints)
switch len(nextPoints) {
case 0:
wg.Done()
return
case 1:
f.TraverseFrom(nextPoints[0], reportVisits, wg)
return
case 2:
wg.Add(1)
go f.TraverseFrom(nextPoints[0], reportVisits, wg)
f.TraverseFrom(nextPoints[1], reportVisits, wg)
return
}
}
func NextPoints(f *Field, current MovementPoint) []MovementPoint {
cell := f.cells[current.Row][current.Col]
nextDirections := cell.CellType.NextDirections(current.Direction)
nextCells := make([]MovementPoint, 0)
for _, direction := range nextDirections {
nextMovementPoint := current.ApplyDirection(direction)
if f.isValid(nextMovementPoint) {
nextCells = append(nextCells, nextMovementPoint)
}
}
return nextCells
}
// value receiver, can safely modify incoming mp
// doesn't know about Field dimentions
func (mp MovementPoint) ApplyDirection(d Direction) MovementPoint {
switch d {
case Upward:
mp.Row -= 1
case Downward:
mp.Row += 1
case Leftward:
mp.Col -= 1
case Rightward:
mp.Col += 1
}
mp.Direction = d
return mp
}
func (ct CellType) NextDirections(currentDirection Direction) (nextDirections []Direction) {
switch ct {
case Empty:
nextDirections = []Direction{currentDirection}
case SplitterNS:
if currentDirection == Rightward || currentDirection == Leftward {
nextDirections = []Direction{Upward, Downward}
} else {
nextDirections = []Direction{currentDirection}
}
case SplitterEW:
if currentDirection == Downward || currentDirection == Upward {
nextDirections = []Direction{Leftward, Rightward}
} else {
nextDirections = []Direction{currentDirection}
}
case MirrorBackslash:
// mirror symbol is \
directionMappings := map[Direction]Direction{
Leftward: Upward,
Rightward: Downward,
Upward: Leftward,
Downward: Rightward,
}
nextDirections = []Direction{directionMappings[currentDirection]}
case MirrorSlash:
// mirrow symbol is /
directionMappings := map[Direction]Direction{
Leftward: Downward,
Rightward: Upward,
Upward: Rightward,
Downward: Leftward,
}
nextDirections = []Direction{directionMappings[currentDirection]}
}
return
}

321
day17/clumsyCrucible.go Normal file
View File

@@ -0,0 +1,321 @@
package day17
import (
"fmt"
"log"
"math"
"os"
"slices"
"strings"
)
func Run() int {
fmt.Println("hello from day 17")
filename := "day17/input"
field := NewField(filename)
log.Printf("%+v\n", field)
field.RunDijkstra()
lenToEnd := field.Paths[field.Finish].totalLength
fmt.Println("check visually:")
// fmt.Println(field.Paths[end].stringPathSoFar)
fmt.Println(field.Paths[field.Finish].stringPathSoFar)
return lenToEnd
}
// let's do dijkstra. it also needs a priority queue
// priority queue would be over vertice. and would have to have enough information to
// calc the distance from neighbors.
// how to check condition of max 3 in one row?
// with each vertice store [horizontal:n|vertical:n] and if it's 3 just dont consider?
// so in iteration, i have some vertice, with horizontal:2 for example,
// i check all neighbors, if path through 'this' is shorter, set that as path,
// but also mark the path with len of straight.
//
// so priority queue is with 'path to next'
// or rather 'path to i,j'
// then check for neighbors (non finished), calc distance to them through this
// checking neighbors via 'path get directions' 'path get geighbors from directions'
// if shorter - update
// mark current as 'finished'
// so, i'll be checking cost to enter directly from this table,
// but check path len
func ReadEnterCosts(filename string) [][]int {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("error reading file ", filename))
}
text := strings.TrimSpace(string(bytes))
result := make([][]int, 0)
for _, line := range strings.Split(text, "\n") {
numbers := make([]int, 0)
for _, digit := range line {
num := int(digit - '0')
numbers = append(numbers, num)
}
result = append(result, numbers)
}
return result
}
type Coord struct {
Row, Col int
}
func (c Coord) applyDirection(d Direction) (result Coord) {
result = c
switch d {
case Upward:
result.Row -= 1
case Downward:
result.Row += 1
case Leftward:
result.Col -= 1
case Rightward:
result.Col += 1
}
return
}
func (c Coord)String() string {
return fmt.Sprintf("(%d,%d)", c.Row, c.Col)
}
type Direction int
const (
Upward Direction = iota
Downward
Leftward
Rightward
)
func (d Direction) String() string {
strings := []string{"Up", "Down", "Left", "Right"}
return strings[d]
}
func (d Direction) AsSymbol() string {
strings := []string{"^", "v", "<", ">"}
return strings[d]
}
func (d Direction) GetPerpendicular() (directions []Direction) {
switch d {
case Upward:
directions = []Direction{Leftward, Rightward}
case Downward:
directions = []Direction{Leftward, Rightward}
case Leftward:
directions = []Direction{Upward, Downward}
case Rightward:
directions = []Direction{Upward, Downward}
}
return
}
type PathSegmentEnd struct {
endsAt Coord
totalLength int
lastSteps map[Direction]int
lastDirection Direction
stringPathSoFar string
done bool
}
func (p *PathSegmentEnd) NextDirections2() (next []Direction) {
// last steps of 2 is max allowed 3 tiles in row
lastSteps := p.lastSteps[p.lastDirection]
if lastSteps < 4 {
return []Direction{p.lastDirection}
}
next = append(next, p.lastDirection.GetPerpendicular()...)
if lastSteps < 10 {
next = append(next, p.lastDirection)
}
// log.Printf("getting directions from %+v they are %+v", p, next)
return
}
func (p *PathSegmentEnd) NextDirections() (next []Direction) {
next = append(next, p.lastDirection.GetPerpendicular()...)
// last steps of 2 is max allowed 3 tiles in row
lastSteps := p.lastSteps[p.lastDirection]
if lastSteps < 3 {
next = append(next, p.lastDirection)
}
// log.Printf("getting directions from %+v they are %+v", p, next)
return
}
type Field struct {
Paths map[Coord]*PathSegmentEnd
Costs [][]int
Height, Width int
Start Coord
Finish Coord
}
func NewField(filename string) Field {
enterCosts := ReadEnterCosts(filename)
startSegment := PathSegmentEnd{
endsAt: Coord{0, 0},
totalLength: 0,
lastSteps: make(map[Direction]int),
done: true,
lastDirection: Downward, // fake, need to init direct neighbors also
}
initialPaths := make(map[Coord]*PathSegmentEnd)
initialPaths[Coord{0, 0}] = &startSegment
height := len(enterCosts)
width := len(enterCosts[0])
return Field{
Paths: initialPaths,
Costs: enterCosts,
Height: height,
Width: width,
Start: Coord{0, 0},
Finish: Coord{height - 1, width - 1},
}
}
func (f *Field) isValid(c Coord) bool {
return c.Col >= 0 && c.Row >= 0 && c.Row < f.Height && c.Col < f.Width
}
// presupposes that direction is valid
func (f *Field) continuePathInDirection(curPath PathSegmentEnd, d Direction) (result PathSegmentEnd) {
// curPath := f.Paths[from]
from := curPath.endsAt
nextCoord := from.applyDirection(d)
moveCost := f.Costs[nextCoord.Row][nextCoord.Col]
newCost := curPath.totalLength + moveCost
lastSteps := make(map[Direction]int)
curPathStepsIntoThisDirection, found := curPath.lastSteps[d]
if !found {
lastSteps[d] = 1
} else {
lastSteps[d] = curPathStepsIntoThisDirection + 1
}
return PathSegmentEnd{
endsAt: nextCoord,
totalLength: newCost,
lastDirection: d,
lastSteps: lastSteps,
stringPathSoFar: curPath.stringPathSoFar + d.AsSymbol(),
}
}
func (p *PathSegmentEnd)StringKey() string {
return fmt.Sprintf("%s from %s with len %+v", p.endsAt.String(), p.lastDirection, p.lastSteps)
}
func (f *Field) RunDijkstra() {
checking := make([]PathSegmentEnd, 0)
distancesMap := make(map[string]int, 0)
startingPath := f.Paths[f.Start]
anotherStartingPath := PathSegmentEnd{
endsAt: Coord{0, 0},
totalLength: 0,
lastSteps: make(map[Direction]int),
done: true,
lastDirection: Rightward, // fake, need to init direct neighbors also
stringPathSoFar: ".",
}
checking = append(checking, *startingPath, anotherStartingPath)
distancesMap[startingPath.StringKey()] = 0
distancesMap[anotherStartingPath.StringKey()] = 0
for len(checking) > 0 {
var currentPath PathSegmentEnd
selectingMinDistanceOfVisited := math.MaxInt
for _, path := range checking {
if path.totalLength < selectingMinDistanceOfVisited {
currentPath = path
selectingMinDistanceOfVisited = path.totalLength
}
}
currentCoord := currentPath.endsAt
directions := currentPath.NextDirections2()
// fmt.Printf("> one more iteration for %+v ; directions will check %+v\n", currentPath, directions)
for _, direction := range directions {
neighborCoord := currentCoord.applyDirection(direction)
if !f.isValid(neighborCoord) {
continue // prevent going off the grid
}
// fmt.Printf("from %+v will examine in direction %s to %+v %+v\n", currentCoord, direction, neighborCoord, currentPath)
neighborPathSoFar, found := f.Paths[neighborCoord]
if !found {
neighborPathSoFar = &PathSegmentEnd{
totalLength: math.MaxInt,
}
f.Paths[neighborCoord] = neighborPathSoFar
}
pathIfWeGoFromCurrent := f.continuePathInDirection(currentPath, direction)
if pathIfWeGoFromCurrent.endsAt == f.Finish {
if pathIfWeGoFromCurrent.lastSteps[pathIfWeGoFromCurrent.lastDirection] < 4 {
continue
}
}
distFromThatSide, isKnown := distancesMap[pathIfWeGoFromCurrent.StringKey()]
if !isKnown {
distancesMap[pathIfWeGoFromCurrent.StringKey()] = pathIfWeGoFromCurrent.totalLength
// log.Printf("not known for %s \n", pathIfWeGoFromCurrent.StringKey())
checking = append(checking, pathIfWeGoFromCurrent)
}
if pathIfWeGoFromCurrent.totalLength < distFromThatSide {
f.Paths[neighborCoord] = &pathIfWeGoFromCurrent
// log.Printf("got update for %s \n", pathIfWeGoFromCurrent.StringKey())
distancesMap[pathIfWeGoFromCurrent.StringKey()] = pathIfWeGoFromCurrent.totalLength
checking = append(checking, pathIfWeGoFromCurrent)
} else {
continue // this path is better than existing
}
}
// f.Paths[currentCoord].done = true
checking = slices.DeleteFunc(checking, func (other PathSegmentEnd) bool { return other.stringPathSoFar == currentPath.stringPathSoFar })
storedPath, found := f.Paths[currentCoord]
if !found || storedPath.totalLength > currentPath.totalLength {
f.Paths[currentCoord] = &currentPath
}
// time.Sleep(time.Microsecond)
// fmt.Print(f.printLastDirection())
// time.Sleep(time.Second)
}
}
func (f *Field) printLastDirection() (result string) {
result += "\n"
for rowNum := 0; rowNum < f.Height; rowNum++ {
for colNum := 0; colNum < f.Width; colNum++ {
path, found := f.Paths[Coord{rowNum, colNum}]
if !found {
result += "."
} else {
result += path.lastDirection.AsSymbol()
}
}
result += "\n"
}
return
}

13
day17/example Normal file
View File

@@ -0,0 +1,13 @@
2413432311323
3215453535623
3255245654254
3446585845452
4546657867536
1438598798454
4457876987766
3637877979653
4654967986887
4564679986453
1224686865563
2546548887735
4322674655533

5
day17/example2 Normal file
View File

@@ -0,0 +1,5 @@
111111111111
999999999991
999999999991
999999999991
999999999991

10
day17/notes.org Normal file
View File

@@ -0,0 +1,10 @@
#+title: Notes
* so, just traversal doesn' work,
and it's easy to imagine why.
my guess is that i really should put 'paths to explore' into priority queue
and select new ones not only by their length, but also by how far they go from the goal
* lot's of time for no result
* so, for 'dijksra' don't store set of vertices,
but of ways we've entered them

14
day18/example Normal file
View File

@@ -0,0 +1,14 @@
R 6 (#70c710)
D 5 (#0dc571)
L 2 (#5713f0)
D 2 (#d2c081)
R 2 (#59c680)
D 2 (#411b91)
L 5 (#8ceee2)
U 2 (#caa173)
L 1 (#1b58a2)
U 2 (#caa171)
R 2 (#7807d2)
U 3 (#a77fa3)
L 2 (#015232)
U 2 (#7a21e3)

8
day18/example2 Normal file
View File

@@ -0,0 +1,8 @@
R 6 (#70c710)
D 5 (#0dc571)
L 2 (#5713f0)
U 2 (#d2c081)
L 2 (#59c680)
D 2 (#411b91)
L 2 (#8ceee2)
U 5 (#d2c081)

538
day18/lagoon.go Normal file
View File

@@ -0,0 +1,538 @@
package day18
import (
"fmt"
"log"
"os"
"slices"
"strconv"
"strings"
"sync"
)
func Run() int {
log.Println("hello day 18")
log.Println("problem of lagoon bgins")
filename := "day18/input"
instructions := ReadInstructionas2(filename)
h, w := calcHeightWidth(instructions)
log.Printf("read %+v instructions", instructions)
field := CreateField(h, w)
// fmt.Println(field.String())
borderAmount := field.digByInstructions(instructions)
// log.Println(">>> created field", field.BordersFromLeft)
// fmt.Println(field.String())
// WriteToFile("borders.txt", field.String())
// convert -size 3000x6000 xc:white -font "FreeMono" -pointsize 13 -fill black -draw @borders.txt borders.png
log.Printf("starting dig inside for cols %d-%d and rows %d-%d ", field.MinCol, field.MaxCol, field.MinRow, field.MaxRow)
insideAmount := field.digInsides()
log.Printf("border is %d; inside is %d", borderAmount, insideAmount)
// fmt.Println(field.String())
// fmt.Println(field.Height, field.Width)
// WriteToFile("fulldug.txt", field.String())
// convert -size 3000x6000 xc:white -font "FreeMono" -pointsize 13 -fill black -draw @fulldug.txt fulldug.png
// field.countDugOut()
return borderAmount + insideAmount
}
// determine size of field. max(sum(up), sum(down)) for height,
// same for left and right,
// translate (0,0) into center of the field
//
// have cells, with coord. and i guess four sides, with color.
// i guess have directions, map[direction]color
// and have 'opposite' on directoin.
// for each direction apply it to cell coord, get cell, get opposite directoin and color it
//
// then have method on field and cell that excavates cell and colors all neighbors
//
// last part is filling in isides, should be ok with horizontal scans from left by even crossings
type Direction int
const (
Upward Direction = iota
Downward
Leftward
Rightward
)
func (d Direction) opposite() Direction {
switch d {
case Upward:
return Downward
case Downward:
return Upward
case Leftward:
return Rightward
case Rightward:
return Leftward
}
panic("unaccounted direction")
}
var DirectionNames []string = []string{"U", "D", "L", "R"}
func (d Direction) String() string {
return DirectionNames[d]
}
func DirectionFromString(s string) Direction {
index := slices.Index(DirectionNames, s)
if index == -1 {
panic(fmt.Sprint("bad direction", s))
}
return Direction(index)
}
type Instruction struct {
Direction Direction
Steps int
Color string
}
func ReadInstructionas(filename string) (result []Instruction) {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("error reading file: ", filename))
}
text := strings.TrimSpace(string(bytes))
for _, line := range strings.Split(text, "\n") {
result = append(result, ReadInstruction(line))
}
return
}
func ReadInstruction(line string) Instruction {
fields := strings.Fields(line)
direction := DirectionFromString(fields[0])
steps, err := strconv.Atoi(fields[1])
if err != nil {
panic(fmt.Sprint("bad steps in line: ", line))
}
color := fields[2][1 : len(fields[2])-1]
return Instruction{Direction: direction, Steps: steps, Color: color}
}
func ReadInstructionas2(filename string) (result []Instruction) {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("error reading file: ", filename))
}
text := strings.TrimSpace(string(bytes))
for _, line := range strings.Split(text, "\n") {
result = append(result, ReadInstruction2(line))
}
return
}
func ReadInstruction2(line string) Instruction {
fields := strings.Fields(line)
hexDist := fields[2][2 : len(fields[2])-2]
hexDirection := fields[2][len(fields[2])-2 : len(fields[2])-1]
var direction Direction
switch hexDirection {
case "0":
direction = Rightward
case "1":
direction = Downward
case "2":
direction = Leftward
case "3":
direction = Upward
}
dist, err := strconv.ParseUint(hexDist, 16, 64)
if err != nil {
panic(err)
}
return Instruction{
Steps: int(dist),
Direction: direction,
}
}
func calcHeightWidth(instructions []Instruction) (height, width int) {
movements := make(map[Direction]int)
for _, instr := range instructions {
movements[instr.Direction] += instr.Steps
}
if movements[Downward] > movements[Upward] {
height = 2 * movements[Downward]
} else {
height = 2 * movements[Upward]
}
if movements[Leftward] > movements[Rightward] {
width = 2 * movements[Leftward]
} else {
width = 2 * movements[Rightward]
}
height += 10
width += 10
return
}
type Coord struct {
Col, Row int
}
func (c Coord) applyDirection(d Direction) Coord {
switch d {
case Upward:
c.Row -= 1
case Downward:
c.Row += 1
case Leftward:
c.Col -= 1
case Rightward:
c.Col += 1
}
return c
}
type Cell struct {
IsDug bool
ToBeDug bool
Coord Coord
}
type BorderSymbol rune
// ” always left to right
const (
Vertical BorderSymbol = '|'
ToDown BorderSymbol = '7'
ToUp BorderSymbol = 'J'
FromUp BorderSymbol = 'F'
FromDown BorderSymbol = 'L'
)
type Field struct {
Height, Width int
// Cells [][]*Cell
Cells map[Coord]*Cell
MinRow, MaxRow, MinCol, MaxCol int
BordersFromLeft map[int]map[int]BorderSymbol
}
func (f *Field) confirmCoord(c Coord) {
// log.Printf("configming coord %+v", c)
if c.Row-3 < f.MinRow {
f.MinRow = c.Row - 3
}
if c.Row+3 > f.MaxRow {
f.MaxRow = c.Row + 3
}
if c.Col-3 < f.MinCol {
f.MinCol = c.Col - 3
}
if c.Col+3 > f.MaxCol {
f.MaxCol = c.Col + 3
}
}
func CreateField(height, width int) Field {
return Field{
Height: height, Width: width,
Cells: make(map[Coord]*Cell),
BordersFromLeft: make(map[int]map[int]BorderSymbol),
}
}
func PutSymbIntoMMMMap(mmmap map[int]map[int]BorderSymbol, row, col int, symb BorderSymbol) {
rowMap := mmmap[row]
if rowMap == nil {
rowMap = make(map[int]BorderSymbol)
mmmap[row] = rowMap
}
rowMap[col] = symb
}
func (f *Field) digByInstructions(instructions []Instruction) (borderAmount int) {
// for the last turn
instructions = append(instructions, instructions[0])
// but also don't overcount the border
borderAmount -= instructions[0].Steps
runnerCoord := Coord{Col: 0, Row: 0}
// f.Cells[runnerCoord] = &Cell{
// IsDug: true,
// }
// f.confirmCoord(runnerCoord) // should be confirmed when the cycle is closed on last step
// borderAmount += 1
var prevInstruction Instruction
firstInstruction := true
for _, instruction := range instructions {
log.Printf("starting new instruction %+v", instruction)
if !firstInstruction {
turn := getTurnAsIfGoingFromLeft(prevInstruction.Direction, instruction.Direction)
for _, theTurn := range turn {
// log.Printf(">> putting turn %s", string(turn))
PutSymbIntoMMMMap(f.BordersFromLeft, runnerCoord.Row, runnerCoord.Col, theTurn)
}
}
firstInstruction = false
// log.Printf("starting instruction %+v", instruction)
for i := 0; i < instruction.Steps; i++ {
runnerCoord = runnerCoord.applyDirection(instruction.Direction)
// f.Cells[runnerCoord] = &Cell{
// IsDug: true,
// }
f.confirmCoord(runnerCoord)
borderAmount += 1
// log.Printf("inside %+v updated border amount to %d", instruction, borderAmount)
if instruction.Direction == Upward || instruction.Direction == Downward {
_, alreadyCountedTurn := f.BordersFromLeft[runnerCoord.Row][runnerCoord.Col]
if !alreadyCountedTurn {
PutSymbIntoMMMMap(f.BordersFromLeft, runnerCoord.Row, runnerCoord.Col, Vertical)
}
}
}
prevInstruction = instruction
}
return
}
func getTurnAsIfGoingFromLeft(directionFrom, directionTo Direction) []BorderSymbol {
// log.Printf("getTurnAsIfGoingFromLeft from %s to %s", directionFrom.String(), directionTo.String())
var symbol BorderSymbol
if directionTo == Rightward && directionFrom == Upward {
symbol = FromUp
}
if directionTo == Rightward && directionFrom == Downward {
symbol = FromDown
}
if directionTo == Leftward && directionFrom == Upward {
symbol = ToDown
}
if directionTo == Leftward && directionFrom == Downward {
symbol = ToUp
}
if directionFrom == Rightward && directionTo == Upward {
symbol = ToUp
}
if directionFrom == Rightward && directionTo == Downward {
symbol = ToDown
}
if directionFrom == Leftward && directionTo == Upward {
symbol = FromDown
}
if directionFrom == Leftward && directionTo == Downward {
symbol = FromUp
}
// panic(fmt.Sprint("got strange from %s to %s", directionFrom.String(), directionTo.String()))
return []BorderSymbol{symbol}
}
func (f *Field) String() string {
s := "text 15,15 \""
for row := f.MinRow; row <= f.MaxRow; row++ {
rowChars := make([]rune, f.MaxCol-f.MinCol+1)
for col := f.MinCol; col <= f.MaxCol; col++ {
rowBords := f.BordersFromLeft[row]
if rowBords != nil {
bord, exists := rowBords[col]
if exists {
rowChars[col-f.MinCol] = rune(bord)
continue
}
}
cell := f.Cells[Coord{col, row}]
if cell != nil && cell.ToBeDug {
rowChars[col-f.MinCol] = '@'
continue
}
if f.isCellDug(row, col) {
rowChars[col-f.MinCol] = '#'
} else {
rowChars[col-f.MinCol] = '.'
}
}
s += string(rowChars)
s += "\n"
}
s += "\""
return s
}
func (f *Field) digInsides() (result int) {
lineSum := make(chan int)
var wg sync.WaitGroup
rowsCount := f.MaxRow - f.MinRow
wg.Add(rowsCount)
done := make(chan bool)
go func() {
wg.Wait()
close(lineSum)
}()
go func() {
for rowInternalCount := range lineSum {
result += rowInternalCount
}
close(done)
}()
for row := f.MinRow; row < f.MaxRow; row++ {
go func(row int){
if row%10000 == 0 {
log.Printf("processed rows %d out of %d", row, f.MaxRow)
}
specialBorders := f.BordersFromLeft[row]
if len(specialBorders) == 0 {
wg.Done()
return
}
type BorderItem struct {
border BorderSymbol
col int
}
rowBorders := make([]BorderItem, 0)
for col, borderSymbol := range specialBorders {
rowBorders = append(rowBorders, BorderItem{borderSymbol, col})
}
slices.SortFunc(rowBorders, func(a BorderItem, b BorderItem) int {
return a.col - b.col
})
// log.Printf(">>>>>>> for row %d sorted %+v", row, rowBorders)
prevBorder := rowBorders[0]
bordersCrossed := 0
if prevBorder.border == Vertical {
bordersCrossed += 1
}
for _, specialBorder := range rowBorders[1:] {
diff := specialBorder.col - prevBorder.col - 1
if specialBorder.border == ToUp && prevBorder.border == FromUp {
bordersCrossed += 1
prevBorder = specialBorder
continue
}
if specialBorder.border == ToDown && prevBorder.border == FromDown {
bordersCrossed += 1
prevBorder = specialBorder
continue
}
if specialBorder.border == ToUp && prevBorder.border == FromDown {
prevBorder = specialBorder
continue
}
if specialBorder.border == ToDown && prevBorder.border == FromUp {
prevBorder = specialBorder
continue
}
if bordersCrossed%2 == 1 { // is in
for col := prevBorder.col + 1; col < specialBorder.col; col++ {
// f.Cells[Coord{Col: col, Row: row}] = &Cell{
// ToBeDug: true,
// }
}
lineSum <- diff
// countInside += diff
}
if specialBorder.border == Vertical {
bordersCrossed += 1
}
prevBorder = specialBorder
}
wg.Done()
}(row)
}
<-done
return result
}
// func (f *Field) digInsides() (countInside int) {
// for row := f.MinRow; row < f.MaxRow; row++ {
// if row % 10000 == 0 {
// log.Printf("processed rows %d out of %d", row, f.MaxRow)
// }
// isInside := false
// seenUp, seenDown := false, false // for detecting L---7 walls
// for col := f.MinCol; col < f.MaxCol; col++ {
// // TODO next optimization - for each row, store indices of cols with border cells
// // so that count of inside would be done by many at a time
// rightCellIsDug := f.isCellDug(row, col+1)
// if f.isCellDug(row, col) {
// upCellIsDug := f.isCellDug(row-1, col)
// downCellIsDug := f.isCellDug(row+1, col)
// if !rightCellIsDug {
// if (upCellIsDug && seenDown) || (downCellIsDug && seenUp) {
// isInside = !isInside
// }
// seenUp, seenDown = false, false
// }
// } else {
// // not a dug out cell, maybe inside and needs to be dug out
// if isInside {
// // f.Cells[Coord{col, row}] = &Cell{
// // ToBeDug: true,
// // }
// countInside += 1
// // log.Printf("tick count inside for %d %d", row, col)
// // cellPtr.ToBeDug = true
// }
// if rightCellIsDug {
// seenUp = f.isCellDug(row-1, col+1)
// seenDown = f.isCellDug(row+1, col+1)
// }
// }
// }
// }
// return
// }
func (f *Field) isCellDug(row, col int) bool {
cell := f.Cells[Coord{col, row}]
return cell != nil && cell.IsDug
}
func WriteToFile(filename string, content string) {
fileBorder, err := os.Create(filename)
if err != nil {
panic(err)
}
defer func() {
if err := fileBorder.Close(); err != nil {
panic(err)
}
}()
fileBorder.WriteString(content)
}

51
day18/notes.org Normal file
View File

@@ -0,0 +1,51 @@
#+title: Notes
* part 2 and i'm struggling.
maybe i need to mark 'inside' cells while i dig?
i don't know which is 'outside' from the getgo?
if i mark 'all the rightside', will that help to calculate inside?
* well, if we dont' have instruction with steps:1 i can just count points above and belove the line
without more complicated things
just count 'seenUp' and 'seenDown' if equal - then we changed side
and - we shouldn't have 'step1' because all numbers are soooo big.
ok. let's do that? with maps of cols.
** CANCELLED add map[row]map[col]any
** CANCELLED separate method to set it up after we have all of the BorderCellCols
** CANCELLED during digInsides on each consecutive - check above and below and count
when there's a jump - compare counts, to make decision on whether to switch 'isInside'
** no. just because they are long doesn't mean they won't ever get one near another
* another idea is to save | and corners, as if we're going from left to right
this seems reasonable.
** DONE i guess []SpecialSymbol which has Col and Symbol
** DONE no, let's make it map. yes will have to init, but yuck anyway
** TODO then different logic on border building.
if U \ D - on all but last add '|'
on last - calc with the next turn, what should be saved 'as if traversing from the left'
for L \ R - on last - calc what the turn was
** TODO !! between last and first movement the corner is unknown.
so, copy the first instruction to the end?
** moment of hope.
my calculation for example input for part 2
day18 result: 952408144115
952408144115
*** YES.
*** about 1M for 4 minutes
** so, my input is ~16M rows
3.5 seconds per 10k
** well, maybe i can parallel.
*** parallel example
day18 result: 952407566854
*** and with separate done channel from the summing goroutine
952408144115
**** YES
** and
2023/12/18 23:35:31 border is 195341588; inside is 148441957805559
2023/12/18 23:35:31
day18 result: 148442153147147
* i should have used a formula. maybe then it would taken less than 4 hours

17
day19/example Normal file
View File

@@ -0,0 +1,17 @@
px{a<2006:qkq,m>2090:A,rfg}
pv{a>1716:R,A}
lnx{m>1548:A,A}
rfg{s<537:gd,x>2440:R,A}
qs{s>3448:A,lnx}
qkq{x<1416:A,crn}
crn{x>2662:A,R}
in{s<1351:px,qqz}
qqz{s>2770:qs,m<1801:hdj,R}
gd{a>3333:R,R}
hdj{m>838:A,pv}
{x=787,m=2655,a=1222,s=2876}
{x=1679,m=44,a=2067,s=496}
{x=2036,m=264,a=79,s=2244}
{x=2461,m=1339,a=466,s=291}
{x=2127,m=1623,a=2188,s=1013}

5
day19/example1 Normal file
View File

@@ -0,0 +1,5 @@
in{x<4000:R,m<4000:R,A}
px{a<4000:R,A}
qqz{s>2770:R,m<1801:A,R}
{x=787,m=2655,a=1222,s=2876}

57
day19/intervals.go Normal file
View File

@@ -0,0 +1,57 @@
package day19
import (
"sort"
)
func merge(intervals [][]int) [][]int {
const start, end = 0, 1
var merged [][]int
if len(intervals) > 1 {
sort.Slice(intervals, func(i, j int) bool {
return intervals[i][start] < intervals[j][start]
})
}
for _, interval := range intervals {
last := len(merged) - 1
if last < 0 || interval[start] > merged[last][end] {
merged = append(merged,
[]int{start: interval[start], end: interval[end]},
)
} else if interval[end] > merged[last][end] {
merged[last][end] = interval[end]
}
}
return merged[:len(merged):len(merged)]
}
func applyLessThan(intervals [][]int, n int) [][]int {
var lessers [][]int
for _, interval := range intervals {
from := interval[0]
if from >= n {
continue
}
lessers = append(lessers, []int{from, n-1})
}
return lessers
}
func applyMoreThan(intervals [][]int, n int) [][]int {
var greaters [][]int
for _, interval := range intervals {
to := interval[1]
if to <= n {
continue
}
greaters = append(greaters, []int{n+1, to})
}
// log.Printf(">>>> in applyMoreThan %d to %+v ; result %+v", n, intervals, greaters)
return greaters
}

47
day19/notes.org Normal file
View File

@@ -0,0 +1,47 @@
#+title: Notes
* testing things
testSorter := day19.ReadSorterLine("qqz{s>2770:qs,m<1801:hdj,R}")
log.Printf("my test sorter is %+v", testSorter)
testOperation := day19.ReadOperationLine("s>2770:qs")
log.Println(testOperation)
** testing simplification
lnx{m>1548:A,A}
qqz{s>2770:qs,m<1801:hdj,R}
kt{m>2215:R,x>3386:A,x<3107:R,R}
testSorter := day19.ReadSorterLine("kt{m>2215:R,x>3386:A,x<3107:R,R}")
log.Printf("my test sorter is %+v", testSorter)
simplified := day19.SimplifyOperation(testSorter)
log.Printf("> simplivied %+v", simplified)
* i probably don't need 'actual actors'
just a generic function that takes 'detail' and 'sorterData'
then applies sorterData to the detail,
and calls itself with new sorter
with special cases for "R" and "A"
so. have funciton from OpeartionData & Detail -> true/false
if true take the destination, if false, check next
* well. only way to do this is with intervals
so, sorter check takes in interval.
then for each of the rule,
call first rule with full interval,
deduct first rule (for those that don't match) and pass to second.
deduct second and pass to next
A will return full
R will return empty
and results from each rule application should be joined
so. i need interval deduction
and i need interval join
* found a bug in always using initial intervals to calculate 'failing' after each step
2023/12/19 11:45:14 got and checked 167409079868000
In the above example, there are 167409079868000 distinct combinations of ratings that will be accepted.

341
day19/sortingParts.go Normal file
View File

@@ -0,0 +1,341 @@
package day19
import (
"fmt"
"log"
"os"
"regexp"
"strconv"
"strings"
"sync"
)
func Run() int {
fmt.Println("hello day 19. sorting parts")
filename := "day19/input"
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("cannot read file ", filename))
}
text := string(bytes)
split := strings.Split(text, "\n\n")
sorters := ReadSorters(split[0])
details := ReadDetailsPart(split[1])
log.Printf("yay, got sorters\n%+v\nand details\n%+v", sorters, details)
// countApproved := CountApprovedDetails(details, sorters)
result := 0
fullIntervals := AttrIntervals{
"x": [][]int{[]int{1, 4000}},
"m": [][]int{[]int{1, 4000}},
"a": [][]int{[]int{1, 4000}},
"s": [][]int{[]int{1, 4000}},
}
andChecked := processInterval(fullIntervals, "in", sorters)
log.Print("got and checked ", andChecked)
result = andChecked
return result
}
func CountApprovedDetails(details []DetailData, sorters map[string]SorterData) int {
var wg sync.WaitGroup
wg.Add(len(details))
approvedDetails := make(chan DetailData)
go func() {
wg.Wait()
close(approvedDetails)
}()
count := 0
acceptedScore := 0
done := make(chan any)
go func() {
for detail := range approvedDetails {
log.Println("got approved ", detail)
count += 1
for _, attrValue := range detail.Attrs {
acceptedScore += attrValue
}
}
close(done)
}()
for _, d := range details {
go func(d DetailData) {
log.Print("> starting for ", d)
isAccepted := ProcessDetail(d, sorters)
if isAccepted {
log.Println("> accepting ", d)
approvedDetails <- d
} else {
log.Println("> rejecting ", d)
}
wg.Done()
}(d)
}
<-done
return acceptedScore
}
type Operation rune
const (
LessThan Operation = '<'
MoreThan Operation = '>'
)
func (o Operation)String() string {
return string(o)
}
type OperationData struct {
AttrName string
Operation Operation
Num int
SentToName string
InitialString string
}
func (od OperationData)String() string {
return od.InitialString
}
type SorterData struct {
Name string
DefaultState string
Operations []OperationData
}
func ReadSorters(sortersText string) map[string]SorterData {
result := make(map[string]SorterData)
sortersText = strings.TrimSpace(sortersText)
lines := strings.Split(sortersText, "\n")
for _, line := range lines {
sorter := SimplifyOperation( ReadSorterLine(line) )
result[sorter.Name] = sorter
}
return result
}
// qqz{s>2770:qs,m<1801:hdj,R}
func ReadSorterLine(line string) (result SorterData) {
re1 := regexp.MustCompile(`(?P<NAME>\D+){(?P<OPERATIONS>.+)}`)
firstSplit := re1.FindStringSubmatch(line)
result.Name = firstSplit[1]
operationLines := strings.Split(firstSplit[2], ",")
operations := make([]OperationData, len(operationLines)-1)
result.Operations = operations
result.DefaultState = operationLines[len(operationLines)-1]
for i, line := range operationLines[:len(operationLines)-1] {
operations[i] = ReadOperationLine(line)
}
log.Printf("mathed %s got %+v; operations : %+v\n", line, firstSplit, operations)
return
}
// s>2770:qs
func ReadOperationLine(line string) (result OperationData) {
result.InitialString = line
re := regexp.MustCompile(`(?P<ATTRNAME>\D)(?P<OPERATION>[\>\<])(?P<NUMBER>\d+):(?P<TARGET>\D+)`)
split := re.FindStringSubmatch(line)
log.Printf("matching operation %s into %+v\n", line, split)
result.AttrName = split[1]
result.Operation = Operation([]rune(split[2])[0])
result.SentToName = split[4]
num, err := strconv.Atoi(split[3])
if err != nil {
panic(fmt.Sprintf("error getting number %s in line %s. %s", split[3], line, err))
}
result.Num = num
return
}
// drop last operations which target same 'next' as default. these check are not necessary
func SimplifyOperation(sorter SorterData) SorterData {
actualLast := len(sorter.Operations) - 1
for i := actualLast; i >= 0; i-- {
if sorter.Operations[i].SentToName != sorter.DefaultState {
break
}
actualLast -= 1
}
sorter.Operations = sorter.Operations[:actualLast+1]
return sorter
}
type DetailData struct {
Attrs map[string]int
}
func ReadDetailsPart(text string) (result []DetailData) {
text = strings.TrimSpace(text)
for _, line := range strings.Split(text, "\n") {
result = append(result, ReadDetailLine(line))
}
return
}
// {x=787,m=2655,a=1222,s=2876}
func ReadDetailLine(line string) (result DetailData) {
attrs := make(map[string]int)
result.Attrs = attrs
line = line[1 : len(line)-1]
attrsLine := strings.Split(line, ",")
re := regexp.MustCompile(`(?P<ATTR>\D)=(?P<NUM>\d+)`)
for _, attrLine := range attrsLine {
split := re.FindStringSubmatch(attrLine)
attrName := split[1]
num, err := strconv.Atoi(split[2])
if err != nil {
panic(fmt.Sprint("error parsing detail ", line))
}
attrs[attrName] = num
}
return
}
func ProcessDetail(d DetailData, sorters map[string]SorterData) (isAccepted bool) {
curSorterName := "in"
for curSorterName != "A" && curSorterName != "R" {
sorter, found := sorters[curSorterName]
if !found {
panic(fmt.Sprint("error finding soter ", curSorterName))
}
curSorterName = sorter.NextSorterNameFor(d)
}
return curSorterName == "A"
}
func (s SorterData) NextSorterNameFor(d DetailData) string {
for _, operation := range s.Operations {
if operation.IsDetailPassing(d) {
return operation.SentToName
}
}
return s.DefaultState
}
func (o OperationData) IsDetailPassing(d DetailData) bool {
detailValue := d.Attrs[o.AttrName]
switch o.Operation {
case LessThan:
return detailValue < o.Num
case MoreThan:
return detailValue > o.Num
}
panic(fmt.Sprint("unknown operation. ", o, d))
}
type AttrIntervals map[string][][]int
func (o OperationData) getPassingIntervals(i AttrIntervals) AttrIntervals {
result := make(AttrIntervals, 0)
for key, value := range i {
result[key] = value
}
operationKey := o.AttrName
operatedIntervals := result[operationKey]
switch o.Operation {
case LessThan:
result[operationKey] = applyLessThan(operatedIntervals, o.Num)
case MoreThan:
result[operationKey] = applyMoreThan(operatedIntervals, o.Num)
}
return result
}
func (o OperationData) getFailingIntervals(i AttrIntervals) AttrIntervals {
result := make(AttrIntervals, 0)
for key, value := range i {
result[key] = value
}
operationKey := o.AttrName
operatedIntervals := result[operationKey]
switch o.Operation {
case LessThan:
result[operationKey] = applyMoreThan(operatedIntervals, o.Num-1)
case MoreThan:
result[operationKey] = applyLessThan(operatedIntervals, o.Num+1)
}
return result
}
func processInterval(i AttrIntervals, sorterName string, sorters map[string]SorterData) (combinationsAccepted int) {
if sorterName == "A" {
mul := 1
for key, attrIntervals := range i {
allowedValuesOfAttr := 0
for _, interval := range attrIntervals {
from := interval[0]
to := interval[1]
len := to - from + 1
allowedValuesOfAttr += len
log.Printf("for %s allowed attrs are %d", key, allowedValuesOfAttr)
}
mul *= allowedValuesOfAttr
}
log.Printf("exit recursion for %s. Accept interval %+v . result %d. max is %d", sorterName, i, mul, 40000 * 4000 * 4000 * 4000)
return mul
}
if sorterName == "R" {
return 0
}
s := sorters[sorterName]
log.Printf("> starting interval check for %s (%+v) on %+v", sorterName, s, i)
intervalsPassingOnThisStep := i
for _, operation := range s.Operations {
intervalsPassing := operation.getPassingIntervals(intervalsPassingOnThisStep)
log.Printf(">> %s; in operation %+v. passing are %+v", sorterName, operation, intervalsPassing)
ofThoseAreAccepted := processInterval(intervalsPassing, operation.SentToName, sorters)
combinationsAccepted += ofThoseAreAccepted
log.Printf(">> %s; results so far are %d", sorterName, combinationsAccepted)
intervalsFailingAndPassedToNextCheck := operation.getFailingIntervals(intervalsPassingOnThisStep)
log.Printf(">> %s; failing for the next step %+v", sorterName, intervalsFailingAndPassedToNextCheck)
intervalsPassingOnThisStep = intervalsFailingAndPassedToNextCheck
}
log.Printf(">> %s; about to go into DEFAULT", sorterName)
intervalsAfterDefault := processInterval(intervalsPassingOnThisStep, s.DefaultState, sorters)
log.Printf(">> %s; after defaul. passing are %+v", sorterName, intervalsAfterDefault)
combinationsAccepted += intervalsAfterDefault
log.Printf(">> %s; results after default %d", sorterName, combinationsAccepted)
return
}

View File

@@ -1,100 +0,0 @@
Game 1: 4 green, 3 blue, 11 red; 7 red, 5 green, 10 blue; 3 green, 8 blue, 8 red; 4 red, 12 blue; 15 red, 3 green, 10 blue
Game 2: 3 red, 1 blue, 2 green; 1 blue, 9 green; 1 red, 10 green
Game 3: 5 green, 9 red, 4 blue; 3 green, 7 blue; 12 blue, 3 green, 3 red; 3 blue, 7 red, 2 green; 7 blue, 3 green, 10 red
Game 4: 2 green, 2 blue; 12 red, 9 green, 2 blue; 13 green, 15 red, 4 blue; 14 red, 3 green, 5 blue; 6 red, 1 green; 1 blue, 2 red, 2 green
Game 5: 2 green, 6 blue; 1 red, 3 green, 5 blue; 3 green, 4 blue; 3 blue, 5 green, 1 red; 5 blue
Game 6: 5 green, 1 blue, 3 red; 8 green, 15 red; 16 green, 5 red, 1 blue
Game 7: 1 blue, 3 red, 11 green; 18 red, 16 blue, 5 green; 13 blue, 5 green; 1 red, 8 green, 15 blue
Game 8: 1 green, 14 blue, 1 red; 10 blue; 1 green
Game 9: 4 green, 12 blue, 1 red; 14 blue; 2 blue, 4 green; 4 green, 1 red, 10 blue
Game 10: 11 green, 9 red; 12 red, 9 green; 5 red, 7 blue, 5 green; 6 green, 1 blue, 12 red; 3 red, 3 blue; 16 red, 9 blue, 7 green
Game 11: 11 green, 1 red, 9 blue; 2 red, 13 green, 5 blue; 5 green, 2 red, 5 blue; 5 green, 7 blue; 1 red, 5 blue, 1 green
Game 12: 5 green, 1 red; 1 red, 4 green; 1 blue, 12 green; 15 green, 4 blue; 4 blue, 19 green; 16 green, 4 blue
Game 13: 1 red, 9 green, 5 blue; 10 blue, 7 green, 1 red; 3 green, 2 red, 14 blue; 16 blue, 3 red
Game 14: 9 red, 1 blue, 2 green; 16 blue, 7 red; 2 green, 3 red, 14 blue; 1 green, 9 blue
Game 15: 6 blue; 4 blue; 1 red, 16 blue, 3 green
Game 16: 14 green, 5 red, 1 blue; 1 red, 1 blue; 5 blue
Game 17: 1 blue, 1 green, 3 red; 2 red, 2 blue, 2 green; 1 blue, 1 red; 1 red, 2 green, 2 blue; 2 blue; 1 green, 2 red, 1 blue
Game 18: 4 blue, 2 green, 1 red; 1 green, 1 red, 10 blue; 1 green, 1 red, 2 blue; 1 red, 5 blue; 3 green, 6 blue; 1 red, 1 green, 7 blue
Game 19: 1 blue, 13 green, 12 red; 7 blue, 2 green, 1 red; 1 blue, 3 red, 3 green; 3 blue, 8 green, 10 red; 7 blue, 2 green
Game 20: 1 red, 17 blue; 10 blue, 5 green; 9 green, 1 red, 3 blue; 1 red, 5 green, 1 blue
Game 21: 3 red, 6 blue, 5 green; 4 blue, 1 red, 7 green; 6 blue, 4 red, 9 green
Game 22: 11 blue, 2 red, 6 green; 16 blue, 5 red, 6 green; 12 red, 2 green, 10 blue; 14 blue, 2 green, 11 red
Game 23: 3 red, 5 green; 10 blue, 1 green, 9 red; 2 red, 10 green, 9 blue; 9 blue, 7 green
Game 24: 8 blue, 1 red; 3 red, 9 blue; 9 green, 2 red, 8 blue
Game 25: 2 red, 1 green, 1 blue; 1 green, 12 blue, 2 red; 2 red, 1 blue; 2 blue; 1 green, 10 blue; 6 blue
Game 26: 2 red; 4 green, 1 red, 7 blue; 11 blue, 2 red, 4 green; 1 red, 1 blue; 1 red, 5 green, 12 blue
Game 27: 1 red, 7 green, 8 blue; 13 green, 12 blue, 1 red; 6 red, 1 green, 10 blue; 8 red, 2 blue, 2 green; 11 blue, 4 green, 4 red
Game 28: 1 red, 8 blue, 3 green; 12 green, 4 blue; 1 red, 4 blue, 11 green; 7 blue, 10 green, 10 red; 11 blue, 7 red, 8 green; 10 red, 2 green, 2 blue
Game 29: 4 green, 2 red; 1 blue, 11 red; 2 blue, 3 green, 1 red; 16 red; 3 green, 8 red, 1 blue; 2 blue, 7 green, 12 red
Game 30: 1 blue, 3 green; 4 green, 2 blue; 3 red, 5 blue; 4 green, 1 red
Game 31: 2 red, 2 blue, 3 green; 2 green, 3 blue, 8 red; 7 red, 16 blue, 2 green; 5 red, 20 blue, 2 green
Game 32: 2 red, 1 green, 4 blue; 4 green, 4 red, 1 blue; 4 red, 4 blue; 1 blue, 4 red, 2 green; 4 blue, 3 green, 4 red
Game 33: 11 green, 4 blue, 10 red; 2 green, 13 red, 7 blue; 13 red, 2 blue, 8 green; 15 red, 9 blue, 12 green; 14 red, 10 green, 2 blue; 13 red, 7 green
Game 34: 11 red, 6 blue, 4 green; 16 red, 7 blue, 4 green; 6 red, 18 green, 6 blue; 3 blue, 16 red, 3 green; 2 red, 3 blue, 17 green; 3 green, 9 red, 6 blue
Game 35: 6 green, 10 red, 12 blue; 4 red, 1 blue, 2 green; 3 green, 8 blue, 7 red; 6 red, 12 blue, 2 green
Game 36: 4 green, 2 blue, 2 red; 3 green, 10 red, 1 blue; 1 blue, 3 green, 2 red; 2 green, 1 red; 1 blue, 5 red
Game 37: 3 blue, 1 red, 2 green; 8 red, 4 green, 10 blue; 4 red, 4 green
Game 38: 13 green, 3 red, 2 blue; 1 red, 13 green, 2 blue; 20 green, 3 red, 2 blue; 1 red, 2 blue, 12 green
Game 39: 13 blue, 1 red, 8 green; 5 red, 3 green, 8 blue; 6 blue, 4 green; 18 blue, 7 green, 1 red; 4 green, 3 blue, 5 red; 6 blue, 4 red, 1 green
Game 40: 2 red, 2 blue, 9 green; 1 blue, 2 red, 12 green; 16 green, 11 blue, 1 red; 1 green, 2 red; 3 blue, 2 red
Game 41: 7 blue, 1 red; 4 blue, 1 red; 3 blue, 1 red, 2 green; 13 blue
Game 42: 18 red, 1 green, 13 blue; 2 blue, 2 green, 7 red; 16 red, 12 blue; 1 green, 10 blue, 14 red
Game 43: 15 red, 6 green, 2 blue; 3 blue, 9 red, 3 green; 13 red
Game 44: 2 blue, 5 green, 3 red; 4 red, 4 blue, 19 green; 5 red, 3 blue, 9 green; 19 green, 6 red, 5 blue
Game 45: 5 red, 4 green, 13 blue; 12 red, 10 blue; 3 green, 9 blue, 5 red; 10 blue, 18 red, 5 green; 16 red, 6 green, 17 blue
Game 46: 3 green; 3 green, 2 blue; 4 blue, 2 red, 3 green; 5 blue, 3 green, 4 red; 1 green, 1 blue
Game 47: 2 blue, 1 red, 10 green; 2 red; 6 red, 1 blue; 16 red, 2 blue, 8 green; 5 blue, 8 red, 7 green
Game 48: 11 green, 4 red, 2 blue; 2 blue, 5 green, 8 red; 9 green, 6 red; 3 red, 3 green, 1 blue; 2 blue, 12 green, 17 red
Game 49: 10 blue, 4 green, 1 red; 10 red, 10 blue; 12 blue, 7 red; 13 blue, 6 green
Game 50: 1 red, 19 green, 7 blue; 4 red, 1 green, 5 blue; 16 green, 8 red, 8 blue
Game 51: 12 green, 18 blue; 13 green, 14 blue, 4 red; 7 green, 4 red, 14 blue; 8 green, 2 blue, 3 red; 16 blue, 8 green
Game 52: 9 blue, 9 green, 3 red; 8 blue, 1 green, 13 red; 2 red, 8 blue, 9 green; 13 red, 4 green; 6 green, 15 red; 11 blue, 11 red, 9 green
Game 53: 2 red, 4 green, 3 blue; 5 blue, 16 green; 4 blue, 8 red, 12 green
Game 54: 6 red, 16 green; 6 red, 15 green; 8 green, 8 red, 2 blue
Game 55: 9 red, 2 green; 4 blue; 2 green, 2 red, 7 blue; 1 red, 16 blue, 1 green; 17 blue, 5 red
Game 56: 14 green, 3 red, 9 blue; 14 blue, 15 green, 2 red; 8 red, 13 blue, 15 green; 15 blue, 2 red, 12 green; 3 red, 7 blue, 10 green; 10 blue, 13 green
Game 57: 1 blue, 10 green, 2 red; 4 blue, 9 green, 11 red; 2 blue
Game 58: 4 red, 2 blue, 5 green; 1 blue, 5 green, 4 red; 3 green, 4 red, 8 blue; 4 blue, 7 green; 5 green, 4 blue; 1 blue, 6 red
Game 59: 5 blue, 4 red, 3 green; 8 blue, 12 green, 5 red; 5 red, 8 blue, 15 green
Game 60: 6 red, 12 blue, 1 green; 10 blue, 20 green, 4 red; 6 blue, 1 green, 5 red; 9 red, 12 blue, 14 green; 15 green, 1 red, 14 blue; 10 green, 13 blue
Game 61: 1 blue, 12 green, 3 red; 4 green, 1 red, 4 blue; 8 red, 4 green, 6 blue
Game 62: 6 blue, 7 green, 3 red; 6 blue, 3 red, 3 green; 11 green, 6 red, 2 blue; 2 red, 6 blue, 3 green; 2 green, 3 blue, 3 red; 3 blue, 11 green, 11 red
Game 63: 5 green, 6 blue, 4 red; 6 green, 12 blue; 3 green, 9 blue, 10 red; 1 blue, 4 red, 5 green
Game 64: 10 green, 14 red; 1 blue, 9 red; 3 green, 10 blue, 14 red; 5 green, 3 blue, 12 red; 5 blue, 12 red, 13 green
Game 65: 1 red, 5 green, 10 blue; 14 red, 5 green, 10 blue; 10 blue, 10 red
Game 66: 9 green, 8 blue, 1 red; 8 red, 14 blue; 8 red, 7 blue, 2 green; 4 blue, 3 green, 5 red; 2 red, 8 green, 8 blue
Game 67: 4 red, 3 green, 3 blue; 4 green, 1 blue, 4 red; 1 blue, 3 red; 10 blue; 16 blue, 6 red, 4 green
Game 68: 6 blue, 6 green, 9 red; 4 blue, 9 red, 3 green; 3 blue, 8 red
Game 69: 4 green, 12 red, 3 blue; 2 red, 3 blue; 2 blue, 4 red, 2 green; 1 blue, 3 red
Game 70: 4 red, 3 green, 15 blue; 1 green, 4 red; 1 red, 1 green, 5 blue
Game 71: 4 blue, 2 red, 10 green; 7 red, 6 blue, 11 green; 4 blue, 7 red, 8 green
Game 72: 9 red, 9 blue, 1 green; 4 red, 6 green, 5 blue; 3 green, 7 red, 2 blue
Game 73: 3 green, 9 red; 4 green, 15 red; 12 red, 2 blue; 14 red, 3 green
Game 74: 2 red, 6 blue, 1 green; 3 red, 6 blue; 1 green, 12 blue, 14 red
Game 75: 3 green, 18 red; 1 green, 7 red, 1 blue; 2 red, 2 green, 3 blue; 11 red; 2 red, 3 green, 2 blue
Game 76: 6 green, 2 red, 5 blue; 13 green, 5 blue; 5 blue, 1 red, 1 green
Game 77: 4 blue, 6 green, 3 red; 15 red, 1 green; 4 green, 11 red, 13 blue; 8 blue, 6 green, 9 red; 3 blue, 1 green, 11 red; 3 green, 3 red
Game 78: 11 green, 1 blue, 2 red; 7 red, 16 blue, 11 green; 9 blue, 10 red, 6 green; 1 green, 8 blue, 10 red; 8 blue, 6 red, 1 green
Game 79: 2 blue, 5 green, 4 red; 1 blue, 1 red, 1 green; 1 blue, 5 red, 10 green; 6 red, 3 green, 3 blue; 8 red, 9 green, 6 blue; 7 blue, 6 green, 13 red
Game 80: 10 green, 7 blue, 5 red; 5 red, 1 green, 6 blue; 8 blue, 2 red, 8 green
Game 81: 3 green, 10 red; 6 blue, 8 green, 14 red; 4 green, 4 blue, 13 red; 5 blue, 11 green, 6 red; 16 red, 8 green, 5 blue; 6 green, 18 red, 6 blue
Game 82: 13 red, 1 green, 7 blue; 8 green, 4 blue, 12 red; 18 red, 5 green, 3 blue; 13 red, 4 green, 9 blue
Game 83: 1 red, 3 green, 4 blue; 5 blue, 4 green, 1 red; 3 green, 1 red, 12 blue; 4 green, 11 blue
Game 84: 3 blue, 10 green, 2 red; 3 red, 8 blue; 11 blue, 12 red, 14 green; 2 red, 11 green, 2 blue
Game 85: 8 blue, 2 green, 1 red; 13 blue, 6 red; 3 blue, 5 green
Game 86: 16 red, 8 blue; 7 blue; 16 red, 16 blue, 1 green; 15 blue, 11 red; 2 green, 7 red, 5 blue
Game 87: 6 green, 9 blue, 4 red; 1 red, 1 green, 4 blue; 5 blue, 13 green, 3 red; 2 green, 4 red; 16 blue, 10 green, 3 red
Game 88: 1 blue, 14 red; 14 red, 3 blue, 8 green; 1 blue, 5 green
Game 89: 12 green, 14 blue, 3 red; 2 red, 3 blue, 3 green; 2 blue, 8 green; 1 red, 3 green, 15 blue; 3 red, 5 blue
Game 90: 3 blue, 17 red, 11 green; 2 red, 2 blue, 7 green; 7 blue; 8 blue, 4 green, 10 red; 1 blue, 4 red
Game 91: 10 red, 9 blue, 8 green; 5 blue, 10 red, 2 green; 11 red, 17 green, 7 blue; 12 blue, 16 red, 18 green; 20 green, 5 blue, 15 red
Game 92: 1 green, 14 red, 1 blue; 2 blue, 6 green; 9 red, 6 green; 5 blue, 5 red, 2 green; 3 blue, 3 green, 10 red; 5 blue, 1 red
Game 93: 10 green, 1 red, 6 blue; 16 red, 5 blue, 2 green; 3 red, 7 green, 11 blue; 12 green, 5 blue, 4 red; 8 green, 7 blue, 10 red; 1 red, 5 blue
Game 94: 3 blue, 1 red, 3 green; 1 blue, 4 green, 4 red; 9 green
Game 95: 3 green, 5 blue, 9 red; 2 green, 9 red, 2 blue; 12 red, 9 green; 11 green, 9 red, 9 blue; 9 blue, 6 green, 10 red; 13 red, 2 blue, 5 green
Game 96: 2 red, 19 blue, 2 green; 10 blue, 1 red, 2 green; 9 blue, 1 red; 2 green, 3 blue; 1 green, 1 red, 11 blue
Game 97: 6 green, 7 blue, 5 red; 7 green, 1 red, 11 blue; 6 green, 6 red, 5 blue; 2 red, 9 blue, 1 green
Game 98: 5 green, 8 red, 15 blue; 16 green, 9 blue, 8 red; 5 blue, 3 red, 2 green; 13 blue, 12 green, 4 red; 2 red, 15 green, 3 blue; 1 green, 11 blue, 2 red
Game 99: 1 green, 7 blue, 6 red; 16 blue, 9 red; 1 green, 17 red, 12 blue; 15 red, 7 blue; 8 blue, 14 red
Game 100: 5 blue, 11 red, 6 green; 11 red, 2 blue, 5 green; 6 blue, 6 green; 2 blue, 6 red, 15 green; 7 red, 4 blue, 7 green

5
day20/example1 Normal file
View File

@@ -0,0 +1,5 @@
broadcaster -> a, b, c
%a -> b
%b -> c
%c -> inv
&inv -> a

5
day20/example2 Normal file
View File

@@ -0,0 +1,5 @@
broadcaster -> a
%a -> inv, con
&inv -> b
%b -> con
&con -> output

142
day20/looping.go Normal file
View File

@@ -0,0 +1,142 @@
package day20
import (
"cmp"
"log"
"slices"
)
func TransitiveOutputs(from string, allModules map[string]Module, visited map[string]any) map[string]any {
// log.Printf("looking for transitive children of %s\n", from)
_, alreadyProcessed := visited[from]
if alreadyProcessed {
return visited
}
module, found := allModules[from]
if !found {
return visited
}
visited[from] = struct{}{}
children := module.Outputs()
for _, output := range children {
TransitiveOutputs(output, allModules, visited)
}
delete(visited, "th")
// for key, _ := range visited {
// result = append(result, key)
// }
return visited
}
func FindSubGraphLoopLength(subgraph map[string]any, allModules map[string]Module, monitorOutputsOf string) (fromStep, toStep int, monitoredPulses map[int][]PulseType) {
step := 1
seenSubgraphStates := make(map[string]int)
monitoredPulses = make(map[int][]PulseType)
for {
monitoredPulsesOfTheStep := PropagateButtonPressWithMonitor(allModules, step, monitorOutputsOf)
subgraphModules := make(map[string]Module)
for key, _ := range subgraph {
subgraphModules[key] = allModules[key]
}
subgraphState := ModulesState(subgraphModules)
// log.Printf("looping %d. state is %s", step, subgraphState)
prevSteps, known := seenSubgraphStates[subgraphState]
if known {
// log.Printf(">>> searching for loop of %+v", subgraph)
log.Printf(">>> found loop from %d to %d. of size %d\n", prevSteps, step - 1, step - prevSteps)
return prevSteps, step, monitoredPulses
}
seenSubgraphStates[subgraphState] = step
if len(monitoredPulsesOfTheStep) > 0 {
monitoredPulses[step] = monitoredPulsesOfTheStep
}
step++
}
panic("")
}
// i see lot's of 'LowPulse'
// while i want to find steps where all inputs are remembered as High.
// so i'm interested in steps with "high" pulses and next steps that make it 'low' after
func FilterMonitoredPulses(requestedPulses map[int][]PulseType) {
afterHigh := false
for step, pulses := range requestedPulses {
processedPulses := make([]PulseType, 0)
for _, pulse := range pulses {
if pulse == HighPulse {
processedPulses = append(processedPulses, pulse)
afterHigh = true
continue
}
if afterHigh {
processedPulses = append(processedPulses, pulse)
afterHigh = false
}
}
if len(processedPulses) > 0 {
requestedPulses[step] = processedPulses
} else {
delete(requestedPulses, step)
}
}
}
// loop math
// 2023/12/20 12:35:08 >>> searching for loop of sr
// 2023/12/20 12:35:08 >>> found loop from 1 to 4028. of size 4028
// 2023/12/20 12:35:08 the pulses: +map[4026:[high low]]
// 2023/12/20 12:35:08 >>> searching for loop of ch
// 2023/12/20 12:35:08 >>> found loop from 0 to 3923. of size 3924
// 2023/12/20 12:35:08 the pulses: +map[3817:[high low]]
// 2023/12/20 12:35:08 >>> searching for loop of hd
// 2023/12/20 12:35:09 >>> found loop from 0 to 3793. of size 3794
// 2023/12/20 12:35:09 the pulses: +map[3427:[high low]]
// 2023/12/20 12:35:09 >>> searching for loop of bx
// 2023/12/20 12:35:09 >>> found loop from 0 to 3739. of size 3740
// 2023/12/20 12:35:09 the pulses: +map[3211:[high low]]
func CalcCommonStep() int {
type LoopInfo struct {
loopLength, initialDesiredStep int
curStep int
}
loopA := &LoopInfo{4027, 4026, 4026}
loopB := &LoopInfo{3923, 3922, 3922}
loopC := &LoopInfo{3793, 3792, 3792}
loopD := &LoopInfo{3739, 3211, 3738}
// nope they can have different amount of own loops.
// so it's 4 unknowns, 5 unknowns.
// i could store 4 'steps' and on each iteration increase the smallest one
// until they are all equal
loops := []*LoopInfo{loopA, loopB, loopC, loopD}
allSameStep := loopA.curStep == loopB.curStep &&
loopB.curStep == loopC.curStep &&
loopC.curStep == loopD.curStep
i := 0
for !allSameStep {
minLoop := slices.MinFunc(loops, func(a *LoopInfo, b *LoopInfo) int {
return cmp.Compare(a.curStep, b.curStep)
})
minLoop.curStep += minLoop.loopLength
if i % 10000000 == 0 {
log.Printf(">> iterations made: %d, min step is %d", i, minLoop.curStep)
}
i++
}
return loopA.curStep
}

277
day20/modules.go Normal file
View File

@@ -0,0 +1,277 @@
package day20
import (
"fmt"
"regexp"
"slices"
"strings"
)
type PulseType int
func (pt PulseType)String() string {
types := []string{"high", "low"}
return types[pt]
}
const (
HighPulse PulseType = iota
LowPulse
)
type Signal struct {
To, From string
PulseType PulseType
}
type Module interface {
Receive(s Signal) []Signal
Outputs() []string
StateSnapshot() string
MermaidFlow() string
}
// Modules
type FlipFlop struct {
Name string
OutputNames []string
IsOn bool
}
// ignores HighPulse
// on LowPulse - toggle state and send signal
func (ff *FlipFlop)Receive(s Signal) []Signal {
if s.PulseType == HighPulse {
return []Signal{}
}
ff.IsOn = !ff.IsOn
outTemplate := Signal{
From: ff.Name,
}
if ff.IsOn {
outTemplate.PulseType = HighPulse
} else {
outTemplate.PulseType = LowPulse
}
result := make([]Signal, len(ff.OutputNames))
for i, outName := range ff.OutputNames {
out := outTemplate
out.To = outName
result[i] = out
}
return result
}
func (ff *FlipFlop)Outputs() []string {
return ff.OutputNames
}
func (ff *FlipFlop)String() string {
return fmt.Sprintf("[flip-flop '%s' (on: %t) -> %s]", ff.Name, ff.IsOn, ff.OutputNames)
}
func (ff *FlipFlop)StateSnapshot() string {
return ff.String()
}
func (ff *FlipFlop)MermaidFlow() string {
result := "\n"
for _, toName := range ff.OutputNames {
result += fmt.Sprintf("%s --> %s\n", ff.Name, toName)
}
return result
}
func IsLineFlipFlop(line string) bool {
return strings.HasPrefix(line, "%")
}
func ParseFlipFlop(line string) (result FlipFlop) {
re := regexp.MustCompile(`%(?P<NAME>\D+) -> (?P<OUTPUTS>.+)`)
matches := re.FindStringSubmatch(line)
// log.Printf("matching %s getting '%s' and '%s'\n", line, matches[1], matches[2])
result.Name = matches[1]
result.OutputNames = strings.Split(matches[2], ", ")
return
}
type Broadcast struct {
OutputNames []string
}
// send same pulse to all outputs
func (b *Broadcast)Receive(s Signal) (result []Signal) {
signalTemplate := Signal{From: "broadcast", PulseType: s.PulseType}
for _, out := range b.OutputNames {
outSignal := signalTemplate
outSignal.To = out
result = append(result, outSignal)
}
return
}
func (b *Broadcast)Outputs() []string {
return b.OutputNames
}
func (b *Broadcast)String() string {
return fmt.Sprintf("[broadcast -> %+v]", b.OutputNames)
}
func (b *Broadcast)StateSnapshot() string {
return b.String()
}
func (b *Broadcast)MermaidFlow() string {
result := "\n"
for _, toName := range b.OutputNames {
result += fmt.Sprintf("%s --> %s\n", "broadcast", toName)
}
return result
}
func IsLineBroadcast(line string) bool {
return strings.HasPrefix(line, "broadcaster")
}
func ParseBroadcast(line string) (result Broadcast) {
re := regexp.MustCompile(`broadcaster -> (?P<OUTPUTS>.+)`)
matches := re.FindStringSubmatch(line)
result.OutputNames = strings.Split(matches[1], ", ")
return
}
type Conjunction struct {
Name string
OutputNames []string
MostRecentPulseFromInputIsHigh map[string]bool
}
// remembers last signal type from all inputs (initial default is Low)
// when receiving pulse, first update memory for that input
// then if for all inputs remembered is high - send LowPulse
// otherwise if some remembers are low - send HighPulse
func (c *Conjunction)Receive(s Signal) (result []Signal) {
c.MostRecentPulseFromInputIsHigh[s.From] = s.PulseType == HighPulse
allHigh := true
for _, latestImpulseHight := range c.MostRecentPulseFromInputIsHigh {
if !latestImpulseHight {
allHigh = false
break
}
}
outTemplate := Signal{From: c.Name}
if allHigh {
outTemplate.PulseType = LowPulse
} else {
outTemplate.PulseType = HighPulse
}
for _, outName := range c.OutputNames {
outSignal := outTemplate
outSignal.To = outName
result = append(result, outSignal)
}
return
}
func (c *Conjunction)Outputs() []string {
return c.OutputNames
}
func (c *Conjunction)RegisterInputs(allModules map[string]Module) {
for name, module := range allModules {
if slices.Contains( module.Outputs(), c.Name) {
c.MostRecentPulseFromInputIsHigh[name] = false
}
}
}
func (c *Conjunction)String() string {
return fmt.Sprintf("[conjunction '%s' -> %+v]", c.Name, c.OutputNames)
}
func (c *Conjunction)StateSnapshot() string {
return fmt.Sprintf("[conjunction '%s' -> %+v]", c.Name, c.MostRecentPulseFromInputIsHigh)
}
func (c *Conjunction)MermaidFlow() string {
result := "\n"
result += fmt.Sprintf("%s{%s}\n", c.Name, c.Name)
for _, toName := range c.OutputNames {
result += fmt.Sprintf("%s --> %s\n", c.Name, toName)
}
return result
}
func IsLineConjunction(line string) bool {
return strings.HasPrefix(line, "&")
}
func ParseConjunction(line string) (result Conjunction) {
re := regexp.MustCompile(`&(?P<NAME>\D+) -> (?P<OUTPUTS>.+)`)
matches := re.FindStringSubmatch(line)
// log.Printf("matching %s getting '%s' and '%s'\n", line, matches[1], matches[2])
result.Name = matches[1]
result.OutputNames = strings.Split(matches[2], ", ")
result.MostRecentPulseFromInputIsHigh = map[string]bool{}
return
}
type Button struct {}
func (b *Button)Receive(s Signal) []Signal {
return []Signal{
{ To: "broadcast", From: "button", PulseType: LowPulse },
}
}
func (b *Button)Outputs() []string {
return []string{"broadcast"}
}
func (b *Button)String() string {
return "[button]"
}
func (b *Button)StateSnapshot() string {
return b.String()
}
func (b *Button)MermaidFlow() string {
return "button --> broadcast\n"
}
type Output struct {}
func (o *Output)Receive(s Signal) []Signal {
// log.Print("Outut received signal: ", s)
return []Signal{}
}
func (o *Output)Outputs() []string {
return []string{}
}
func (o *Output)String() string {
return "[output]"
}
func (o *Output)StateSnapshot() string {
return o.String()
}
func (o *Output)MermaidFlow() string {
return ""
}

61
day20/modules_test.go Normal file
View File

@@ -0,0 +1,61 @@
package day20
import (
"slices"
"testing"
)
func TestParseFlipFlop(t *testing.T) {
flipFlopLine := "%a -> inv, con"
if !IsLineFlipFlop(flipFlopLine) {
t.Errorf("line '%s' should be flip flop\n", flipFlopLine)
}
module := ParseFlipFlop(flipFlopLine)
t.Logf("got module %+v\n", module)
}
func TestParseBroadcast(t *testing.T) {
broadcastLine := "broadcaster -> a, b, c"
if !IsLineBroadcast(broadcastLine) {
t.Error("expected line to pass broadcast check")
}
module := ParseBroadcast(broadcastLine)
t.Logf("got module %+v\n", module)
if !slices.Equal(module.OutputNames, []string{"a", "b", "c"}) {
t.Errorf("got unexpected outputs: %+v\n", module.OutputNames)
}
}
func TestParseConjunction(t *testing.T) {
conjunctionLine := "&inv -> b"
if !IsLineConjunction(conjunctionLine) {
t.Errorf("line '%s' should be flip flop\n", conjunctionLine)
}
module := ParseConjunction(conjunctionLine)
t.Logf("got module %+v\n", module)
moduleAsExpected := module.Name != "inv" || slices.Equal(module.OutputNames, []string{"b"})
if !moduleAsExpected {
t.Fail()
}
}
func TestReadManyModules(t *testing.T) {
filename := "example1"
modules := ReadModules(filename)
t.Logf("> read example1:\n%+v", modules)
filename2 := "example2"
modules2 := ReadModules(filename2)
t.Logf("> read example2:\n%+v", modules2)
}
func TestConjunctionRegisterInputs(t *testing.T) {
filename := "example2"
modules := ReadModules(filename)
conjunctionInv := modules["inv"].(*Conjunction)
conjunctionInv.RegisterInputs(modules)
t.Logf("after registering inputs on $inv : %+v", conjunctionInv.MostRecentPulseFromInputIsHigh)
}

178
day20/my-mermaid.mmd Normal file
View File

@@ -0,0 +1,178 @@
flowchart LR
gm --> tj
gm --> gf
nn --> ff
nn --> db
broadcast --> sr
broadcast --> ch
broadcast --> hd
broadcast --> bx
qm --> gm
xm --> db
pf --> qx
ln --> gf
ln --> qq
pm --> vc
pm --> qk
rz --> qx
rz --> cv
gf{gf}
gf --> fj
gf --> qm
gf --> xn
gf --> sr
fn --> pr
fn --> gf
lc --> gf
lc --> fn
sr --> gf
sr --> vl
jz --> qj
jz --> db
th{th}
th --> rx
cb --> kt
bf --> qx
bf --> pf
qj --> xm
qj --> db
ch --> db
ch --> mc
ff --> pl
pr --> gf
zd --> ln
zd --> gf
qn{qn}
qn --> th
kt --> qx
kt --> rz
fj --> zd
tj --> lc
tj --> gf
bx --> qx
bx --> qp
cr --> gx
cr --> vc
vm --> cl
nh --> hv
qk --> vc
jd --> qx
jd --> vm
hd --> vc
hd --> nh
sf --> bp
cl --> qx
cl --> bf
vc{vc}
vc --> lr
vc --> hd
vc --> ks
vc --> qn
vc --> gx
vc --> nh
vc --> hv
bp --> db
bp --> jz
cc --> nn
lr --> sb
qq --> qm
qq --> gf
db{db}
db --> ff
db --> ds
db --> sf
db --> ch
db --> cc
db --> xf
vl --> gf
vl --> fj
ks --> vz
xn{xn}
xn --> th
xf{xf}
xf --> th
pl --> sf
pl --> db
zl{zl}
zl --> th
vz --> cr
vz --> vc
gx --> cd
mc --> ds
mc --> db
qp --> cb
qp --> qx
button --> broadcast
cv --> xz
xz --> jd
qx{qx}
qx --> cb
qx --> cv
qx --> bx
qx --> xz
qx --> vm
qx --> zl
hv --> lr
cd --> pm
cd --> vc
sb --> ks
sb --> vc
ds --> cc

1
day20/my-mermaid.mmd.svg Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 141 KiB

180
day20/notes.org Normal file
View File

@@ -0,0 +1,180 @@
#+title: Notes
* ok. only thought i had was to simulate the thing
have single executor, that takes head of the queue,
signals would be (to, from, type)
take 'to' out of the map, call it's 'process(from, type)'
and different types of executors would implement this differently.
and return a slice of new signals in order, to be appended.
if queue is empty - the single button press is propagated and all is well.
we will take snapshot of state, String() repr of all executors should be enough,
and save amount of signals sent so far
* also, i suppose i'd want to have entry points for fiddling with single executors to be test cases.
* modules to implement
** DONE Broadcast
** DONE Flip-Flop
** DONE Conjunction
** DONE Button
* i guess each module could test if string is it's a representation of this type
and would be able to parse it? into it's own struct?
well, those are just functions, since only methods are associated, so ok
* how do i run single tests?
** running tests from the module
#+begin_src bash
go test sunshine.industries/aoc2023/day20 -v
#+end_src
have file with `_test.go` and `func Test...(t *testing.T) {}` name
** running single test
#+begin_src bash
go test sunshine.industries/aoc2023/day20 -v -run TestParseFlipFlop
#+end_src
* yikes. if i don't know the 'inputs' to the conjunction, don't know how to check for 'all high'
let's add registering after the map is read.
* well. for part 2 brute force doesn't work.
how could i examine inputs to the 'rx' to see when it will receive 'low'?
i suppose inputs could be on prime cycles, which would align to all required values only on a very big step?
let's do some kind of visualiztion?
how would i do graphql or mermaidjs?
flowchard in mermaid should be it
go run . > day20/my-mermaid.mmd
* so, looking at the thingy.
rx is produced by &th
which has inputs of
11:&xn -> th
14:&qn -> th
16:&xf -> th
32:&zl -> th
for rx to receive a low pulse.
&th should receive High Pulse, while all other inputs alse remembered as high.
this is not too easy.
but first let's check if loops over
- xn
- qn
- xh
- zl
are manageable.
well.
i'll need to what?
not only track the inputs of the th.
but state of the 'subloop'
and they are separate
is there an easy way to collect the names from each subloop?
i guess i could write a collect.
from each of outputs of 'broadcast'
then have a funciton that checks loop size of each subgraphs
but i will also need to figure out on which steps output of the loop is remembered as High \ Low
let's start with loop size? and modify things if need be
** starting points of loops:
children of the broadcast:
broadcaster -> sr, ch, hd, bx
sr, ch, hd, bx
** ok. some data here
2023/12/20 12:05:06 >>> searching for loop of sr
2023/12/20 12:05:06 >>> found loop from 1 to 4028. of size 4028
2023/12/20 12:05:06 >>> searching for loop of ch
2023/12/20 12:05:06 >>> found loop from 0 to 3923. of size 3924
2023/12/20 12:05:06 >>> searching for loop of hd
2023/12/20 12:05:06 >>> found loop from 0 to 3793. of size 3794
2023/12/20 12:05:06 >>> searching for loop of bx
2023/12/20 12:05:07 >>> found loop from 0 to 3739. of size 3740
one of these guys starts from 1, not from 0.
this is unusual, but OK
now, i want to figure out what are steps where output for the each cycle is 'considered as saved as 1'
i guess i could just directly probe the
`th`
on each step up to 4028
but also, if the signallings from those are rare - would be eaiser to collect steps of each signal.
** ok. i collected 'monitored pulses' and i see lots of 'Low'
what i want is all "high" and first low after those.
** oh wow, this crap
2023/12/20 12:30:05 >>> searching for loop of ch
2023/12/20 12:30:05 >>> found loop from 1 to 3924. of size 3924
2023/12/20 12:30:05 the pulses
+map[3922:[high low]]
2023/12/20 12:30:05 >>> searching for loop of hd
2023/12/20 12:30:05 >>> found loop from 0 to 3793. of size 3794
2023/12/20 12:30:05 the pulses
+map[3661:[high low]]
2023/12/20 12:30:05 >>> searching for loop of bx
2023/12/20 12:30:05 >>> found loop from 0 to 3739. of size 3740
2023/12/20 12:30:05 the pulses
+map[3499:[high low]]
2023/12/20 12:30:05 >>> searching for loop of sr
2023/12/20 12:30:05 >>> found loop from 0 to 4027. of size 4028
2023/12/20 12:30:05 the pulses
+map[624:[high low]]
*** but at least these 'high low' are all on same step.
now with info on loop start, place of pulse in the loop and length of loops,
what is the step so that those [high low] occur on same step num?
*** math should be:
3922 + LOOP_N * (LOOP_LEN)
** wait i now get different output?
2023/12/20 12:57:50 >>> searching for loop of bx
2023/12/20 12:57:50 >>> found loop from 1 to 3739. of size 3739
2023/12/20 12:57:50 the pulses: +map[3738:[high low]]
2023/12/20 12:57:50 >>> searching for loop of sr
2023/12/20 12:57:50 >>> found loop from 0 to 4026. of size 4027
2023/12/20 12:57:50 the pulses: +map[286:[high low]]
2023/12/20 12:57:50 >>> searching for loop of ch
2023/12/20 12:57:50 >>> found loop from 0 to 3922. of size 3923
2023/12/20 12:57:50 the pulses: +map[78:[high low]]
2023/12/20 12:57:50 >>> searching for loop of hd
2023/12/20 12:57:51 >>> found loop from 0 to 3792. of size 3793
2023/12/20 12:57:51 the pulses: +map[3481:[high low]]
** why is my filtering unstable?
** let's check for single loop?
** yikes. but maybe
2023/12/20 13:08:52 >>> searching for loop of sr
2023/12/20 13:08:52 >>> found loop from 2 to 4028. of size 4027
2023/12/20 13:08:52 the pulses: +map[4027:[high low]]
2023/12/20 13:09:23 >>> searching for loop of ch
2023/12/20 13:09:23 >>> found loop from 2 to 3924. of size 3923
2023/12/20 13:09:23 the pulses: +map[3923:[high low]]
2023/12/20 13:09:37 >>> searching for loop of hd
2023/12/20 13:09:37 >>> found loop from 2 to 3794. of size 3793
2023/12/20 13:09:37 the pulses: +map[3793:[high low]]
2023/12/20 13:09:49 >>> searching for loop of bx
2023/12/20 13:09:49 >>> found loop from 2 to 3740. of size 3739
2023/12/20 13:09:49 the pulses: +map[3739:[high low]]
all loops start from same plase.
i could just do 1 press. then the loop starts. and all of them have [high low] on last place.
so it's going to be 1 + least common ...
** aaand, i just did least common multiple of the cycle lenghts.
and i didn't even added 1. which is strange. i guess i did have 'off-by-one'
crap
*** yeah. i can start from step 1. but i need to first update State then check for previous
all loops are from step 1.
it's just for some reason code was unstable when i was searching for all
*** answer is 224046542165867

105
day20/propagation_test.go Normal file
View File

@@ -0,0 +1,105 @@
package day20
import (
"log"
"testing"
)
func TestPropagateButtonPressExample1(t *testing.T) {
filename := "example1"
modules := ReadModules(filename)
t.Log("got modules:\n", modules)
low, high := PropagateButtonPress(modules, 0)
t.Logf("got low %d and high %d\n", low, high)
t.Log("modules after single button press:\n", modules)
success := low == 8 && high == 4
if !success {
t.Errorf("expected low 8 got %d, high 4 got %d", low, high)
}
}
func TestPropagateButtonPressExample2(t *testing.T) {
filename := "example2"
modules := ReadModules(filename)
t.Log("got modules:\n", modules)
InitStuffs(modules)
low, high := PropagateButtonPress(modules, 0)
t.Logf("got low %d and high %d\n", low, high)
t.Log("modules after single button press:\n", modules)
success := low == 4 && high == 4
if !success {
t.Errorf("expected low 4 got %d, high 4 got %d", low, high)
}
}
func TestPropagateButtonPressExample2FourSteps(t *testing.T) {
filename := "example2"
modules := ReadModules(filename)
t.Log("got modules:\n", modules)
InitStuffs(modules)
initialModulesState := ModulesState(modules)
low, high := PropagateButtonPress(modules, 0)
t.Logf("got low %d and high %d\n", low, high)
t.Log("#1 button press:\n", modules)
success := low == 4 && high == 4
if !success {
t.Errorf("expected low 4 got %d, high 4 got %d", low, high)
}
low, high = PropagateButtonPress(modules, 0)
t.Logf("got low %d and high %d\n", low, high)
t.Log("#2 button press:\n", modules)
success = low == 4 && high == 2
if !success {
t.Errorf("expected low 4 got %d, high 2 got %d", low, high)
}
secondState := ModulesState(modules)
if initialModulesState == secondState {
t.Error("initial state should be different from second")
}
low, high = PropagateButtonPress(modules, 0)
t.Logf("got low %d and high %d\n", low, high)
t.Log("#3 button press:\n", modules)
success = low == 5 && high == 3
if !success {
t.Errorf("expected low 5 got %d, high 3 got %d", low, high)
}
thirdState := ModulesState(modules)
if initialModulesState == thirdState {
t.Error("initial state should be different from third")
}
low, high = PropagateButtonPress(modules, 0)
t.Logf("got low %d and high %d\n", low, high)
t.Log("#4 button press:\n", modules)
success = low == 4 && high == 2
if !success {
t.Errorf("expected low 4 got %d, high 2 got %d", low, high)
}
lastState := ModulesState(modules)
log.Print("initial modules state:\n", initialModulesState)
log.Print("after 4 steps modules state:\n", lastState)
if initialModulesState != lastState {
t.Error("expected state to be same after 4 steps for example 2")
}
}
func TestExample1TheQuestion(t *testing.T) {
filename := "example1"
modules := ReadModules(filename)
InitStuffs(modules)
low, high := Count10000ButtonPresses(modules)
t.Log("got low and high: ", low, high)
t.Log("response is: ", low * high)
}

241
day20/pulsePropagation.go Normal file
View File

@@ -0,0 +1,241 @@
package day20
import (
"fmt"
"log"
"os"
"strings"
)
func Run() int {
// fmt.Println("hello from dya 20")
filename := "day20/input"
modules := ReadModules(filename)
InitStuffs(modules)
// log.Print("got modules:\n", modules)
// var low, high int
// low, high = Count10000ButtonPresses(modules)
// log.Printf("got low %d and high %d\n", low, high)
// CheckSubgraphsStuff(filename)
fmt.Print( AllMermaidFlowChard(modules) )
var result int
// result = CalcCommonStep()
return result
}
func CheckSubgraphsStuff(filename string) {
// loopStarts := allModules["broadcast"].Outputs()
// loop start and loop sink
loopItems := map[string]string {
"sr": "xn",
"ch": "xf",
"hd": "qn",
"bx": "zl",
}
for start, end := range loopItems {
allModules := ReadModules(filename)
InitStuffs(allModules)
log.Printf(">>> searching for loop of %s", start)
themap := make(map[string]any)
loopModules := TransitiveOutputs(start, allModules, themap)
// i think my bug is not to reset state of `allModules`
_, _, requestedPulses := FindSubGraphLoopLength(loopModules, allModules, end)
FilterMonitoredPulses(requestedPulses)
log.Printf("the pulses: +%v", requestedPulses)
}
// yeah. and now all cycles start from 1 (first button press)
// and then they emit the [high low] on last step of their cycle
// so just LCM of these all
}
func Count10000ButtonPresses(modules map[string]Module) (lowSignalsCount, highSignalsCount int) {
count := 1000
type counts struct {
low, high int
step int
}
countsAfterState := make(map[string]counts)
// after each button press check if reached already known state - cycle is present.
// then calculate amount of signals before the loop - how much was on that previous state.
// then diff - how much added after the loop
// for now let's just print the info on loop
for i := 0; i < count; i++ {
if i % 10000 == 0 {
log.Println("done button presses: ", i)
}
stepLow, stepHigh := PropagateButtonPress(modules, i)
lowSignalsCount += stepLow
highSignalsCount += stepHigh
// log.Printf("after step %d low is %d and high is %d", i, lowSignalsCount, highSignalsCount)
state := ModulesState(modules)
prevCounts, found := countsAfterState[state]
if found {
loopLen := i - prevCounts.step
log.Printf(">>> found loop. from step %d to step %d. of len %d",
prevCounts.step, i, loopLen)
multiplication := count / loopLen
lowCountInCycle := lowSignalsCount - prevCounts.low
highCountInCycle := highSignalsCount - prevCounts.high
lowSignalsCount = lowCountInCycle * multiplication
highSignalsCount = highCountInCycle * multiplication
return
}
countsAfterState[state] = counts{stepLow, stepHigh, i}
}
return
}
func PropagateButtonPress(modules map[string]Module, i int) (lowSignalsCount, highSignalsCount int) {
signals := []Signal{{From: "button", To: "broadcast", PulseType: LowPulse}}
lowSignalsCount += 1
for len(signals) > 0 {
curSignal := signals[0]
signals = signals[1:]
// log.Printf("%s -%s-> %s", curSignal.From, curSignal.PulseType, curSignal.To)
receivingModule, found := modules[curSignal.To]
if !found {
// log.Print(fmt.Sprintf("signal %+v can't find it's recepient\n", curSignal))
if curSignal.To == "rx" && curSignal.PulseType == LowPulse {
panic(fmt.Sprintf("getting low signal to rx, on step %d", i))
}
continue
}
newSignals := receivingModule.Receive(curSignal)
// all newSignals will have same type
newSignalsAmount := len(newSignals)
if newSignalsAmount > 0 {
signals = append(signals, newSignals...)
someNewSignal := newSignals[0]
if someNewSignal.PulseType == HighPulse {
highSignalsCount += newSignalsAmount
} else {
lowSignalsCount += newSignalsAmount
}
}
}
return
}
func PropagateButtonPressWithMonitor(modules map[string]Module, i int, monitorAllOutputsOf string) []PulseType {
result := make([]PulseType, 0)
signals := []Signal{{From: "button", To: "broadcast", PulseType: LowPulse}}
for len(signals) > 0 {
curSignal := signals[0]
signals = signals[1:]
if curSignal.From == monitorAllOutputsOf {
result = append(result, curSignal.PulseType)
}
// log.Printf("%s -%s-> %s", curSignal.From, curSignal.PulseType, curSignal.To)
receivingModule, found := modules[curSignal.To]
if !found {
// log.Print(fmt.Sprintf("signal %+v can't find it's recepient\n", curSignal))
if curSignal.To == "rx" && curSignal.PulseType == LowPulse {
panic(fmt.Sprintf("getting low signal to rx, on step %d", i))
}
continue
}
newSignals := receivingModule.Receive(curSignal)
// all newSignals will have same type
newSignalsAmount := len(newSignals)
if newSignalsAmount > 0 {
signals = append(signals, newSignals...)
}
}
return result
}
// process sends single `low pulse` directly to "broadcast"
func ReadModules(filename string) map[string]Module {
result := make(map[string]Module)
bytes, err := os.ReadFile(filename)
if err != nil {
panic(fmt.Sprint("error reading file: ", filename))
}
text := strings.TrimSpace(string(bytes))
for _, line := range strings.Split(text, "\n") {
switch {
case IsLineBroadcast(line):
parsed := ParseBroadcast(line)
result["broadcast"] = &parsed
case IsLineFlipFlop(line):
parsed := ParseFlipFlop(line)
result[parsed.Name] = &parsed
case IsLineConjunction(line):
parsed := ParseConjunction(line)
result[parsed.Name] = &parsed
}
// log.Println(line)
}
buttonModule := Button{}
result["button"] = &buttonModule
outputModule := Output{}
result["output"] = &outputModule
return result
}
func InitStuffs(allModules map[string]Module) {
for _, module := range allModules {
if conjunction, ok := module.(*Conjunction); ok {
conjunction.RegisterInputs(allModules)
}
}
}
func ModulesState(modules map[string]Module) string {
// relying on printing of map values to be ordered by key
// https://stackoverflow.com/a/54524991/2788805
states := make(map[string]string)
for name, module := range modules {
states[name] = module.StateSnapshot()
}
return fmt.Sprint(states)
}
func AllMermaidFlowChard(allModules map[string]Module) (result string) {
result = "flowchart TD\n"
for _, module := range allModules {
result += module.MermaidFlow()
}
return
}

11
day21/example Normal file
View File

@@ -0,0 +1,11 @@
...........
.....###.#.
.###.##..#.
..#.#...#..
....#.#....
.##..S####.
.##..#...#.
.......##..
.##.#.####.
.##..##.##.
...........

11
day21/example1 Normal file
View File

@@ -0,0 +1,11 @@
...........
...........
...........
...........
...........
.....S.....
...........
...........
...........
...........
...........

105
day21/notes.org Normal file
View File

@@ -0,0 +1,105 @@
#+title: Notes
* part 1
so we aren't looking for minimal distance.
but all plots which are end to any path of length 'steps left'
so, i have to follow all possible paths to the end?
or. length of 6 and all even - because i could be doing <- ->
but i could be doing loop around that would increase path len by odd number
let's just make direct recursive thing.
create set of all reachable by n,
* oh, the part 2.
i suppose this 'infinite' garden could be managed with my 'neighbors' work with 'out of field'
fairly easy
but what about sizes of the maps? are we releasing maps of previous iterations?
maybe if i directly pass references to prev and current,
and manually set 'prev' to target new it will be collected?
and then elements after these steps <em>26501365</em> would fit into memory?
** i guess maybe it would help if i had 'fully saturated' field
as my minimal 'skipping' thing
** so. store FieldCoord(fieldRow, fieldCol) for fields which were fully saturated at current step.
filter out neighbors, no need to enter fully saturated fields
when counting
on odd - around the S, on even - with S
but the neighboring fields would potentially (likely?) be in different phases
but i guess they are necessarily in different phases?
or. if width odd - necessarily
if width even - then what?
then S is not in the center
my input is 131 chars of width.
so neighboring are necessarily of different phase.
could compute phase of (0,0)
and adjust from that
** TODO remake 'ReachableBySteps' into 'CountReachableBySteps' returning int
** TODO make it take 'isInitialCountOdd' - to know phase of {0,0} field
current phase can be determined by initial phase and current N
if initial count is odd, and now it's odd number, we made even iterations, so (0,0) is in even state
if initial count is even, and now it's even number, we made even iterations, so (0,0) is in even state
** DONE make neighbors take set of saturated fields
and not produce points on those fields
** DONE for field calculate what would be amount of points in each phase
...........
.....###.#.
.###.##..#.
..#.#...#..
....#.#....
.##..S####.
.##..#...#.
.......##..
.##.#.####.
.##..##.##.
...........
*** getting 39 and 42
let's check
42 is even?
*** hmmm
EOEOEOEOEOE
OEOEO###O#O
E###E##OE#E
OE#E#EOE#EO
EOEO#O#OEOE
O##EOE####O
E##OE#EOE#E
OEOEOEO##EO
E##O#O####E
O##EO##E##O
EOEOEOEOEOE
*** yes, sounds good
** CANCELLED after getting all new points. get coords of all fields we're working on.
( there already should be no points in saturated fields )
for each such field, check if it is saturated.
- can be done by comparing the phase with amount of points on saturated
if field saturated - add the coord into set
and remove all the points
** CANCELLED on the last step, when n is 0
return len(startingAt) + (all saturated fields) * (amount of elems in their phase)
** calculating points in even 7356 and odd 7321 phases
* so need to scrap things and do a more analytics approach.
no blocks on horizontal & vertical from (S)
meaning diamond expands to left & right well
* 26501365 = 202300 * 131 + 65 where 131 is the dimension of the grid
* if there is a formula A*i^2 + B*i + C = D
where i is full iteration
* for initial steps :
2023/12/21 13:25:23 after steps 65. full iter 0. got count 3701
2023/12/21 13:25:24 after steps 196. full iter 1. got count 33108
2023/12/21 13:25:27 after steps 327. full iter 2. got count 91853
2023/12/21 13:25:42 after steps 458. full iter 3. got count 179936
* https://www.dcode.fr/newton-interpolating-polynomial
14669x^2 + 14738*x+3701

211
day21/stepCounter.go Normal file
View File

@@ -0,0 +1,211 @@
package day21
import (
"fmt"
"log"
"os"
"strings"
)
func Run() int {
fmt.Print("hello day21")
filename := "day21/input"
field := ReadField(filename)
log.Print(field)
// for i := 6; i <= 10; i++ {
// reachableBySteps := field.ReachableBySteps(i, map[Coord]any{
// Coord{Row: field.RowStart, Col: field.ColStart}: struct{}{},
// })
// log.Print("reachable after steps : ", i, len(reachableBySteps))
// field.PrintCoord(reachableBySteps, 1)
// }
// initialSolutions := make(map[int]int)
// for fullIter := 0; fullIter < 4; fullIter++ {
// steps := 65 + fullIter * 131
// reachableBySteps := field.ReachableBySteps(steps, map[FieldPoint]any{
// FieldPoint{
// InField: Coord{Row: field.RowStart, Col: field.ColStart},
// }: struct{}{},
// })
// log.Printf("after steps %d. full iter %d. got count %d", steps, fullIter, len(reachableBySteps))
// initialSolutions[fullIter] = len(reachableBySteps)
// }
log.Println("will try to use the values to get coeff of Ax^2 + Bx + C = 0")
log.Println("then solve for x == 202300")
// f(x) = 14714x^2 + 14603x + 3791
// no.
// 14669x^2 + 14738*x+3701
x := 202300
result := 14669*x*x + 14738*x+3701
return result
}
// let's do dijkstra?
// i would need lots of space for edges?
// let's use a map with minimal distances?
// OR. just breath first traversal
type Field struct {
RowStart, ColStart int
symbols [][]rune
}
type Coord struct {
Row, Col int
}
type FieldPoint struct {
InField Coord
MetaField Coord
}
func (f Field) ReachableBySteps(n int, startingAt map[FieldPoint]any) map[FieldPoint]any {
if n%100 == 0 {
log.Println("going step: ", n)
}
if n == 0 {
return startingAt
}
// else collect directly available
oneStepExpanded := make(map[FieldPoint]any)
for cur := range startingAt {
for _, neighbor := range f.Neighbors(cur) {
oneStepExpanded[neighbor] = struct{}{}
}
}
// if n < 4 {
// log.Print("reachable after steps : ", n, len(oneStepExpanded))
// f.PrintCoord(oneStepExpanded, 5)
// }
return f.ReachableBySteps(n-1, oneStepExpanded)
}
func (f Field) Neighbors(c FieldPoint) (resut []FieldPoint) {
closeCoords := []FieldPoint{
{InField: Coord{Row: c.InField.Row + 1, Col: c.InField.Col}, MetaField: c.MetaField},
{InField: Coord{Row: c.InField.Row - 1, Col: c.InField.Col}, MetaField: c.MetaField},
{InField: Coord{Row: c.InField.Row, Col: c.InField.Col + 1}, MetaField: c.MetaField},
{InField: Coord{Row: c.InField.Row, Col: c.InField.Col - 1}, MetaField: c.MetaField},
}
for i, close := range closeCoords {
height := len(f.symbols)
width := len(f.symbols[0])
if close.InField.Row == height {
close.InField.Row = 0
close.MetaField.Row += 1
}
if close.InField.Row == -1 {
close.InField.Row = height - 1
close.MetaField.Row -= 1
}
if close.InField.Col == width {
close.InField.Col = 0
close.MetaField.Col += 1
}
if close.InField.Col == -1 {
// log.Printf("moving COL to lefter field from %d to %d", close.Col, width-1)
close.InField.Col = width - 1
close.MetaField.Col -= 1
}
closeCoords[i] = close
// but this is not it. i need to store the XX and YY
// so that points in other 'fields' would count separately. yuk
}
for _, close := range closeCoords {
if f.ValidCoord(close.InField.Row, close.InField.Col) {
symb := f.symbols[close.InField.Row][close.InField.Col]
if symb == '.' || symb == 'S' {
resut = append(resut, close)
}
}
}
// log.Print("getting neighbors for ", c, resut)
return
}
func (f Field) ValidCoord(row, col int) bool {
// log.Print("check valid ", row, col, row >= 0 && row < len(f.symbols) && col >= 0 && col < len(f.symbols[0]))
valid := row >= 0 && row < len(f.symbols) && col >= 0 && col < len(f.symbols[0])
if !valid {
panic(fmt.Sprint("getting invalid coord: ", row, col))
}
return valid
}
func (f Field) String() (result string) {
result += "\n"
for _, line := range f.symbols {
result += string(line)
result += "\n"
}
return
}
func ReadField(filename string) (result Field) {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
text := strings.TrimSpace(string(bytes))
lines := strings.Split(text, "\n")
rows := make([][]rune, len(lines))
for rowNum, line := range lines {
rows[rowNum] = []rune(line)
for colNum, symb := range line {
if symb == 'S' {
result.RowStart = rowNum
result.ColStart = colNum
}
}
}
result.symbols = rows
return
}
func (f Field) PrintCoord(coords map[FieldPoint]any, expandByField int) {
for fieldRow := -expandByField; fieldRow <= expandByField; fieldRow++ {
lines := make([]string, len(f.symbols))
for fieldCol := -expandByField; fieldCol <= expandByField; fieldCol++ {
for rowNum, row := range f.symbols {
for colNum, col := range row {
_, marked := coords[FieldPoint{InField: Coord{Row: rowNum, Col: colNum},
MetaField: Coord{Row: fieldRow, Col: fieldCol}}]
if marked {
lines[rowNum] += "O"
} else {
lines[rowNum] += string(col)
}
}
}
}
for _, line := range lines {
fmt.Println(line)
}
}
return
}

104
day22/block.go Normal file
View File

@@ -0,0 +1,104 @@
package day22
import (
"fmt"
"log"
"os"
"regexp"
"strconv"
"strings"
"github.com/deckarep/golang-set/v2"
)
type XY struct {
X, Y uint
}
type Block struct {
NameNum int
XMin, XMax uint
YMin, YMax uint
Z uint
IsSettled bool
ZHeight uint
Supports mapset.Set[*Block]
SupportedBy mapset.Set[*Block]
}
func (b *Block) String() string {
return fmt.Sprintf("[Block %d - x:%d-%d, y:%d-%d, z:%d, h:%d, isSettled %t]",
b.NameNum, b.XMin, b.XMax, b.YMin, b.YMax, b.Z, b.ZHeight, b.IsSettled)
}
func AtoIOrPanic(a string) int {
n, err := strconv.Atoi(a)
if err != nil {
panic(err)
}
return n
}
func ReadBlock(line string, num int) (b Block) {
b.NameNum = num
re := regexp.MustCompile(`(\d+),(\d+),(\d+)~(\d+),(\d+),(\d+)`)
matches := re.FindStringSubmatch(line)
x1, x2 := AtoIOrPanic(matches[1]), AtoIOrPanic(matches[4])
y1, y2 := AtoIOrPanic(matches[2]), AtoIOrPanic(matches[5])
z1, z2 := AtoIOrPanic(matches[3]), AtoIOrPanic(matches[6])
b.XMax = uint(max(x1, x2))
b.XMin = uint(min(x1, x2))
b.YMax = uint(max(y1, y2))
b.YMin = uint(min(y1, y2))
b.Z = uint(min(z1, z2))
b.ZHeight = uint(max(z1, z2)) - b.Z
b.Supports = mapset.NewSet[*Block]()
b.SupportedBy = mapset.NewSet[*Block]()
return
}
func (b *Block) getXY() (coords []XY) {
for x := b.XMin; x <= b.XMax; x++ {
for y := b.YMin; y <= b.YMax; y++ {
coords = append(coords, XY{X: x, Y: y})
}
}
return
}
func ReadBlockFile(filename string) (blocks []*Block) {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
text := strings.TrimSpace(string(bytes))
for i, line := range strings.Split(text, "\n") {
block := ReadBlock(line, i)
blocks = append(blocks, &block)
}
return
}
func BlocksByZ(blocks []*Block) [][]*Block {
maxZ := uint(0)
for _, block := range blocks {
if block.Z > maxZ {
maxZ = block.Z
}
}
log.Print("found max z: ", maxZ)
result := make([][]*Block, maxZ+1)
for _, block := range blocks {
result[block.Z] = append(result[block.Z], block)
}
return result
}

37
day22/block_test.go Normal file
View File

@@ -0,0 +1,37 @@
package day22
import (
"strings"
"testing"
)
func TestReadBlock(t *testing.T) {
lines := `1,0,1~1,2,1
0,0,2~2,0,2
0,2,3~2,2,3
0,0,4~0,2,4
2,0,5~2,2,5
0,1,6~2,1,6
1,1,8~1,1,9`
for _, line := range strings.Split(lines, "\n") {
b := ReadBlock(line, 0)
t.Logf("read %s into block %+v", line, b)
t.Logf("XY coords for %+v are : %+v", b, b.getXY())
}
}
func TestReadFile(t *testing.T) {
filename := "example"
// filename := "input"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
for z, zBlocks := range byZ {
zBlocksLine := ""
for _, block := range zBlocks {
zBlocksLine += block.String()
}
t.Logf("for level %d blocks %+v", z, zBlocksLine)
}
}

7
day22/example Normal file
View File

@@ -0,0 +1,7 @@
1,0,1~1,2,1
0,0,2~2,0,2
0,2,3~2,2,3
0,0,4~0,2,4
2,0,5~2,2,5
0,1,6~2,1,6
1,1,8~1,1,9

76
day22/notes.org Normal file
View File

@@ -0,0 +1,76 @@
#+title: Notes
* ok. let's try this.
i'd want to have block type
with function to get it's XY coords
i'd want to settle blocks first.
but if i store enough data, for example block.supports slice i'll be able to anser first task.
(settledOnZ) i would want [][]*Block per level from 0 to up. with references to blocks that settled on that level
(maxSettledXY) and for going from 0 up i'll want XY of the top block settled with it's level. i guess i could store settled level in the block as well
then for settling blocks, i will need (sorted map if data is sparse?) go from 0 up,
order of processing for blocks on same z level is not important.
for each block get it's XY, check maxSettledXY if there's a block check it's Z,
for all block XY coords, find maximal settled Z, and refs to all blocks that are directly under with that same Z.
for the block set settledZ to Z+1, and for all those blocks add the block to 'supports'
add block to settledOnZ[Z+1]
for the second part, i can scan all the blocks, don't even need the settledOnZ, just check if it's 'supports' is empty
** DONE block type
store z, and have 'settledZ', maybe with default -1?
** DONE coords type, func to get XY coords of the block
** DONE now i guess what? do i want a sorted map? or just map from height to blocks on that hight?
let's read file, and calc max height present?
i suppose funciton to read file could also be initially entered via test, right?
** DONE now go through the z levels, block by block, doing setting.
i suppose i could organize setting methods around Space?
it will store (settledOnZ) and (maxSettledOnXY)
** DONE [#A] when i settle single block. the maxSettledOnXY - should use (z + height)
** i can already imagine secon part? what is the most volume that can be disintegrated? or what? most volume is just all
* part 1, wrong answer.
i guess try to go, setting the input? block after block and try to check the calculations?
what i want to check:
how maxSettledOnXY works, how linking works. maybe i'll find a problem in few steps =C
** can't see anything just glancing around.
maybe then trying to pick a block and track what's under it?
* ok. let's try to brute force?
for each block, remove it?
create new space and try to settle it
** this is shit. why blocks move up?
2023/12/22 12:12:24 >>> starting for block [Block 1 - x:0-2, y:0-0, z:1, h:0, isSettled true] (supports [[Block 3 - x:0-0, y:0-2, z:3, h:0, isSettled true] [Block 4 - x:2-2, y:0-2, z:3, h:0, isSettled true] [Block 3 - x:0-0, y:0-2, z:2, h:0, isSettled true] [Block 4 - x:2-2, y:0-2, z:2, h:0, isSettled true]])
2023/12/22 12:12:24 block [Block 2 - x:0-2, y:2-2, z:2, h:0, isSettled true] moved from 1 to 2
2023/12/22 12:12:24 block [Block 3 - x:0-0, y:0-2, z:3, h:0, isSettled true] moved from 2 to 3
2023/12/22 12:12:24 block [Block 4 - x:2-2, y:0-2, z:3, h:0, isSettled true] moved from 2 to 3
2023/12/22 12:12:24 block [Block 5 - x:0-2, y:1-1, z:4, h:0, isSettled true] moved from 3 to 4
2023/12/22 12:12:24 block [Block 6 - x:1-1, y:1-1, z:5, h:1, isSettled true] moved from 4 to 5
2023/12/22 12:12:24 for block [Block 1 - x:0-2, y:0-0, z:1, h:0, isSettled true] new space has 5 moved
* ok. brute force with copying slices worked.
now i want to debug.
for each brick, when there is 0 falling, i want to check what are it's surroundings
** my initial was : 567
** checking example of badly determined:
>> for block [Block 291 - x:6-8, y:7-7, z:75, h:0, isSettled false]
checking under coord {X:6 Y:7}. found under [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]. ( 'overriding' ) with 35 ; maxZ 35
directly supporting blocks are [[Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]]
checking under coord {X:7 Y:7}. found under [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]. ( 'adding' ) with 35 ; maxZ 35
directly supporting blocks are [[Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true] [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]]
checking under coord {X:8 Y:7}. found under [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]. ( 'adding' ) with 35 ; maxZ 35
directly supporting blocks are [[Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true] [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true] [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]]
>> after settring block [Block 291 - x:6-8, y:7-7, z:36, h:0, isSettled true]. supported by [[Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true] [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true] [Block 698 - x:6-8, y:7-7, z:35, h:0, isSettled true]]
** ouch. duplicates in slices. because there's no easy set thingy
not doing this was my bug.
#+begin_src go
slices.SortFunc(block.SupportedBy, func(a *Block, b *Block) int {
return cmp.Compare(a.NameNum, b.NameNum)
})
block.SupportedBy = slices.Compact(block.SupportedBy)
#+end_src
* maybe rewrite with Set?
* should have done that from the start

38
day22/printingSpace.go Normal file
View File

@@ -0,0 +1,38 @@
package day22
import (
"math"
"github.com/tidwall/pinhole"
)
func TestPinhole() {
p := pinhole.New()
p.DrawCube(-0.3, -0.3, -0.3, 0.3, 0.3, 0.3)
p.Rotate(math.Pi/3, math.Pi/2, 0)
p.SavePNG("cube.png", 500, 500, nil)
}
func PrintSpace(s Space, filename string) {
// pinhole is from -1 to 1. let's use from 0 to 1.
// so coord should be divided by max height, and let's hope that they are not too wide
rotation := []float64{math.Pi/3, math.Pi/6, 0}
p := pinhole.New()
p.DrawRect(-1, -1, 1, 1, 0)
for _, zLevel := range s.SettledOnZ {
for _, block := range zLevel {
p.DrawCube(float64(block.XMin) / float64(s.MaxZ),
float64(block.YMin) / float64(s.MaxZ),
float64(block.Z) / float64(s.MaxZ),
float64(block.XMax + 1) / float64(s.MaxZ),
float64(block.YMax + 1) / float64(s.MaxZ),
float64(block.Z + block.ZHeight + 1) / float64(s.MaxZ))
}
}
p.Rotate(rotation[0], rotation[1], rotation[2])
p.SavePNG(filename, 1920, 1080, nil)
}

18
day22/sandSlabs.go Normal file
View File

@@ -0,0 +1,18 @@
package day22
import (
"fmt"
)
func Run() int {
fmt.Print("oi, hello day 22")
filename := "day22/input"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
space.SettleAll()
result := space.CountFreeBlocks()
return result
}

206
day22/space.go Normal file
View File

@@ -0,0 +1,206 @@
package day22
import (
"log"
"slices"
)
type Space struct {
MaxZ uint
SettledOnZ [][]*Block
MaxSettledOnXY map[XY]*Block
UnsettledByZ [][]*Block
}
func NewSpace(blocksByZ [][]*Block) Space {
return Space{
UnsettledByZ: blocksByZ,
MaxZ: uint(len(blocksByZ) - 1),
MaxSettledOnXY: make(map[XY]*Block),
SettledOnZ: make([][]*Block, len(blocksByZ)),
}
}
func (s *Space) AgainCountFreeBlocks() (result int) {
for _, row := range s.SettledOnZ {
for _, block := range row {
thisSupports := block.Supports
canDisintegrate := true
for blockThisSupports := range thisSupports.Iter() {
if blockThisSupports.SupportedBy.Cardinality() == 1 {
// we cannot disintigrate this block
canDisintegrate = false
}
}
if canDisintegrate {
result += 1
}
}
}
return
}
func (s *Space) InitialCollectGoodToDisintegrate() (result []Block) {
allBlocks := make(map[*Block]any)
for _, row := range s.SettledOnZ {
for _, block := range row {
allBlocks[block] = struct{}{}
if block.SupportedBy.Cardinality() == 1 {
onlySupport, _ := block.SupportedBy.Pop()
log.Printf("in block %+v. only support is %+v", block, onlySupport)
log.Printf("should be NOT OK to remove %+v", onlySupport)
delete(allBlocks, onlySupport)
}
}
}
for block := range allBlocks {
result = append(result, *block)
}
return
}
func (s *Space) CountFreeBlocks() (result int) {
return len(s.InitialCollectGoodToDisintegrate())
}
func (s *Space) ThirdTimeCollectGoodToDisintegrate() (blocks []Block) {
// for each block create a new space without it. try to settle and check if 0 moved
log.Println(">>>>> starting hardcode count <<<<<")
for rowNum, row := range s.SettledOnZ {
for blockNum, block := range row {
// log.Printf(">>> starting for block %+v (supports %+v)\n", block, block.Supports)
newUnsettled := slices.Clone(s.SettledOnZ)
for rowNum, row := range newUnsettled {
newUnsettled[rowNum] = slices.Clone(row)
}
newUnsettled[rowNum] = slices.Delete(newUnsettled[rowNum], blockNum, blockNum+1)
// and now copy the blocks
for rowNum, row := range newUnsettled {
for blockNum, block := range row {
newBlock := *block
newUnsettled[rowNum][blockNum] = &newBlock
}
}
newSpace := NewSpace(newUnsettled)
moved := newSpace.SettleAll()
if moved > 0 {
// log.Printf("for block %+v new space has %d moved\n\n", block, moved)
} else {
// log.Printf("for block %+v new space has %d moved\n\n", block, moved)
blocks = append(blocks, *block)
}
}
}
return
}
func (s *Space) ThirdTimeCountFreeBlocks() (result int) {
return len(s.ThirdTimeCollectGoodToDisintegrate())
}
func (s *Space) CountChainReactoins() (result int) {
for rowNum, row := range s.SettledOnZ {
for blockNum, _ := range row {
newUnsettled := slices.Clone(s.SettledOnZ)
for rowNum, row := range newUnsettled {
newUnsettled[rowNum] = slices.Clone(row)
}
newUnsettled[rowNum] = slices.Delete(newUnsettled[rowNum], blockNum, blockNum+1)
// and now copy the blocks
for rowNum, row := range newUnsettled {
for blockNum, block := range row {
newBlock := *block
newUnsettled[rowNum][blockNum] = &newBlock
}
}
newSpace := NewSpace(newUnsettled)
moved := newSpace.SettleAll()
result += moved
}
}
return
}
func (s *Space) SettleAll() (totalMoved int) {
for i := uint(1); i <= s.MaxZ; i++ {
movedAfterLayer := s.SettleZ(i)
totalMoved += movedAfterLayer
}
return
}
// settle all blocks in Z, remove Z from UnsettledByZ
func (s *Space) SettleZ(z uint) (totalMoved int) {
blocksToSettle := s.UnsettledByZ[int(z)]
for _, block := range blocksToSettle {
hasMoved := s.SettleBlock(block)
if hasMoved {
totalMoved += 1
}
}
s.UnsettledByZ[int(z)] = nil
return
}
// for the block:
// check all XY in MaxSettledOnXY
// if there are any settled blocks on these XY, find max of their Z
// for all blocks with that Z - add block to their 'supports'
// set Z for block to Z+1, settled to true
// add block as highest settled for all the XY
// add block to MaxSettledOnXY
func (s *Space) SettleBlock(block *Block) (hasMoved bool) {
initialZ := block.Z
underZMax := uint(0)
underZBlocks := make([]*Block, 0)
// fmt.Printf("\n>> for block %s\n", block)
for _, xy := range block.getXY() {
underBlock, found := s.MaxSettledOnXY[xy]
// if block.NameNum
if found {
underBlockMaxZ := underBlock.Z + underBlock.ZHeight
// action := " 'skipping' "
if underBlockMaxZ > underZMax {
underZBlocks = []*Block{underBlock}
underZMax = underBlockMaxZ
// action = " 'overriding' "
} else if underBlockMaxZ == underZMax {
underZBlocks = append(underZBlocks, underBlock)
// action = " 'adding' "
}
// fmt.Printf("checking under coord %+v. found under %+v. (%s) with %d ; maxZ %d\n directly supporting blocks are %+v\n",
// xy, underBlock, action, underBlockMaxZ, underZMax, underZBlocks)
} else {
// fmt.Printf("checking under coord %+v. nothing under\n", xy)
}
s.MaxSettledOnXY[xy] = block
}
for _, settledUnderblock := range underZBlocks {
settledUnderblock.Supports.Add(block)
block.SupportedBy.Add(settledUnderblock)
}
block.Z = underZMax + 1
block.IsSettled = true
s.SettledOnZ[block.Z] = append(s.SettledOnZ[block.Z], block)
// fmt.Printf(">> after settring block %s. supported by %+v\n\n", block, block.SupportedBy)
// time.Sleep(500 * time.Millisecond)
hasMoved = initialZ != block.Z
// if hasMoved {
// log.Printf("block %+v moved from %d to %d", block, initialZ, block.Z)
// }
return
}

135
day22/space_test.go Normal file
View File

@@ -0,0 +1,135 @@
package day22
import (
"slices"
"testing"
)
func TestSpaceSettleSingle(t *testing.T) {
filename := "example"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
t.Logf("read space %+v", space)
block := blocks[2]
t.Logf("block before setting %+v", block)
space.SettleBlock(block)
t.Logf("space after settings %+v:\n%+v", block, space)
}
func TestSpaceSettleSecondNearby(t *testing.T) {
filename := "example"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
t.Logf("read space %+v", space)
block1 := blocks[0]
block2 := blocks[3]
t.Logf("block 1 before setting %+v", block1)
space.SettleBlock(block1)
t.Logf("space after settling block 1 %+v", space)
t.Logf("block 2 before setting %+v", block2)
space.SettleBlock(block2)
t.Logf("space after settling block 2 %+v", space)
t.Logf("space after settling %+v", space)
}
func TestSpaceSettleThirdOnTopFirst(t *testing.T) {
filename := "example"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
t.Logf("read space %+v", space)
block1 := blocks[0]
block2 := blocks[3]
block3 := blocks[2] // should overlap X & Y coords of block 1
t.Logf("block 1 before setting %+v", block1)
space.SettleBlock(block1)
t.Logf("space after settling block 1 %+v", space)
t.Logf("block 2 before setting %+v", block2)
space.SettleBlock(block2)
t.Logf("space after settling block 2 %+v", space)
t.Logf("block 3 before setting %+v", block3)
space.SettleBlock(block3)
t.Logf("space after settling block 3 %+v", space)
t.Logf("space after settling %+v", space)
t.Logf("blocks 1 & 3 should support it: %+v , %+v", block1.Supports, block2.Supports)
// because block 3 is 0-2, 2-2
// and that overlaps 1-1, 0-2 AND 0-0, 0-2
t.Logf("other blocks should not supt %+v", block3.Supports)
}
func TestSpaceExampleSettleAll(t *testing.T) {
filename := "example"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
space.SettleAll()
t.Logf("settled space %+v", space)
// maybe i can check via console.
i := 2
t.Logf("level %d is : %+v", i, space.SettledOnZ[i])
// it looks ok for the example.
// let's hope?
t.Logf("for example, free blocks amount is %d", space.CountFreeBlocks())
// oh, i need 'supported'?
// how do i need to count the task question
// i guess we can start with set of all blocks, then?
// run over all, if some block is only supported by some underBlock - remove that underblock
}
func TestPinholeStart(t *testing.T) {
TestPinhole()
}
func TestExampleSpacePrint(t *testing.T) {
filename := "example"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
// PrintSpace(space, "before-settping.png")
space.SettleAll()
PrintSpace(space, "after-settping.png")
}
func TestCompareInitialAndBruteforce(t *testing.T) {
filename := "input"
blocks := ReadBlockFile(filename)
byZ := BlocksByZ(blocks)
space := NewSpace(byZ)
space.SettleAll()
initialBlocks := space.InitialCollectGoodToDisintegrate()
correct := space.ThirdTimeCollectGoodToDisintegrate()
t.Log("len of initial solution : ", len(initialBlocks))
t.Log("len of correct solution : ", len(correct))
for _, disintegratableInInitial := range initialBlocks {
indexInCorrect := slices.IndexFunc(correct, func(e Block) bool {
return e.NameNum == disintegratableInInitial.NameNum
})
if indexInCorrect == -1 {
t.Logf("> found %+v. falsly marked as disintegratable\n\n", disintegratableInInitial)
}
}
}

26
day23/aLongWalk.go Normal file
View File

@@ -0,0 +1,26 @@
package day23
import (
"fmt"
)
// length of longest scenic route
func Run() int {
fmt.Println("day 23")
max := 0
filename := "day23/input"
field := ReadField(filename)
fmt.Println(field.SparseString())
// finalPaths := RunDFSTingy(field)
// // log.Println(finalPaths)
// for _, path := range finalPaths {
// if path.Visited.Cardinality() > max {
// log.Println("one path len is ", path.Visited.Cardinality())
// max = path.Visited.Cardinality()
// }
// }
return max
}

23
day23/example Normal file
View File

@@ -0,0 +1,23 @@
#.#####################
#.......#########...###
#######.#########.#.###
###.....#.>.>.###.#.###
###v#####.#v#.###.#.###
###.>...#.#.#.....#...#
###v###.#.#.#########.#
###...#.#.#.......#...#
#####.#.#.#######.#.###
#.....#.#.#.......#...#
#.#####.#.#.#########v#
#.#...#...#...###...>.#
#.#.#v#######v###.###v#
#...#.>.#...>.>.#.###.#
#####v#.#.###v#.#.###.#
#.....#...#...#.#.#...#
#.#########.###.#.#.###
#...###...#...#...#.###
###.###.#.###v#####v###
#...#...#.#.>.>.#.>.###
#.###.###.#.###.#.#v###
#.....###...###...#...#
#####################.#

10
day23/example2 Normal file
View File

@@ -0,0 +1,10 @@
#.#####################
#.#####################
#.##............#######
#.##.##########.#######
#....##########.#######
####..#########.#######
#####...........#######
###############.#######
###############.#######
###############.#######

175
day23/field.go Normal file
View File

@@ -0,0 +1,175 @@
package day23
import (
"fmt"
"os"
"strings"
)
type Coord struct {
Row, Col int
}
type CellType rune
const (
Path CellType = '.'
Tree CellType = '#'
SlideDown CellType = 'v'
SlideUp CellType = '^'
SlideLeft CellType = '<'
SlideRight CellType = '>'
)
type Field struct {
MaxRow, MaxCol int
Cells map[Coord]CellType
StartCol, EndCol int
}
func (f *Field) EndCoord() Coord {
return Coord{Row: f.MaxRow, Col: f.EndCol}
}
func (f *Field) NeighborsPart2(c Coord) (neighbors []Coord) {
symb, exists := f.Cells[c]
if !exists {
panic(fmt.Sprintf("coord %+v not found in field", c))
}
var coords []Coord
switch symb {
case Tree:
panic(fmt.Sprintf("attempting to get neighbors of a tree at %+v", c))
default:
coords = []Coord{
{Row: c.Row + 1, Col: c.Col},
{Row: c.Row - 1, Col: c.Col},
{Row: c.Row, Col: c.Col + 1},
{Row: c.Row, Col: c.Col - 1},
}
}
for _, coord := range coords {
neighborSymb, found := f.Cells[coord]
if !found || neighborSymb == Tree {
continue
}
neighbors = append(neighbors, coord)
}
return
}
func (f *Field) Neighbors(c Coord) (neighbors []Coord) {
symb, exists := f.Cells[c]
if !exists {
panic(fmt.Sprintf("coord %+v not found in field", c))
}
var coords []Coord
switch symb {
case Path:
coords = []Coord{
{Row: c.Row + 1, Col: c.Col},
{Row: c.Row - 1, Col: c.Col},
{Row: c.Row, Col: c.Col + 1},
{Row: c.Row, Col: c.Col - 1},
}
case Tree:
panic(fmt.Sprintf("attempting to get neighbors of a tree at %+v", c))
case SlideDown:
coords = []Coord{{Row: c.Row + 1, Col: c.Col}}
case SlideUp:
coords = []Coord{{Row: c.Row - 1, Col: c.Col}}
case SlideLeft:
coords = []Coord{{Row: c.Row, Col: c.Col - 1}}
case SlideRight:
coords = []Coord{{Row: c.Row, Col: c.Col + 1}}
}
for _, coord := range coords {
neighborSymb, found := f.Cells[coord]
if !found || neighborSymb == Tree {
continue
}
neighbors = append(neighbors, coord)
}
return
}
func (f *Field) String() (result string) {
result += "\n"
for row := 0; row <= f.MaxRow; row++ {
for col := 0; col <= f.MaxCol; col++ {
if row == 0 && col == f.StartCol {
result += "S"
continue
}
if row == f.MaxRow && col == f.EndCol {
result += "E"
continue
}
symb := f.Cells[Coord{Row: row, Col: col}]
result += string(symb)
}
result += "\n"
}
return
}
func (f *Field) SparseString() (result string) {
result += "\n"
for row := 0; row <= f.MaxRow; row++ {
for col := 0; col <= f.MaxCol; col++ {
if row == 0 && col == f.StartCol {
result += "S"
continue
}
if row == f.MaxRow && col == f.EndCol {
result += "E"
continue
}
symb := f.Cells[Coord{Row: row, Col: col}]
if symb != Tree {
neighbors := f.NeighborsPart2(Coord{Row: row, Col: col})
if len(neighbors) > 2 {
result += "o"
} else {
result += "."
}
} else {
result += " "
}
}
result += "\n"
}
return
}
func ReadField(filename string) (result Field) {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
lines := strings.Split(strings.TrimSpace(string(bytes)), "\n")
result.MaxRow = len(lines) - 1
result.MaxCol = len(lines[0]) - 1
rows := make(map[Coord]CellType)
for rowNum, row := range lines {
for colNum, symb := range row {
rows[Coord{Row: rowNum, Col: colNum}] = CellType(symb)
if rowNum == 0 && symb == rune(Path) {
result.StartCol = colNum
}
if rowNum == result.MaxRow && symb == rune(Path) {
result.EndCol = colNum
}
}
}
result.Cells = rows
return
}

31
day23/field_test.go Normal file
View File

@@ -0,0 +1,31 @@
package day23
import "testing"
func TestReadField(t *testing.T) {
filename := "example"
field := ReadField(filename)
t.Log(field.String())
}
func TestStartNeighbors(t *testing.T) {
filename := "example"
field := ReadField(filename)
startNeighbors := field.Neighbors(Coord{Row: 0, Col: field.StartCol})
t.Log(startNeighbors)
}
// 5,3
func TestForkNeighbors(t *testing.T) {
filename := "example"
field := ReadField(filename)
startNeighbors := field.Neighbors(Coord{Row: 5, Col: 3})
t.Log(startNeighbors)
}
func TestSlideNeighbors(t *testing.T) {
filename := "example"
field := ReadField(filename)
startNeighbors := field.Neighbors(Coord{Row: 6, Col: 3})
t.Log(startNeighbors)
}

248
day23/graph.go Normal file
View File

@@ -0,0 +1,248 @@
package day23
import (
"fmt"
"log"
"slices"
mapset "github.com/deckarep/golang-set/v2"
)
type Node struct {
index int
c Coord
name string
}
func (n Node)Name() string {
var r string
if n.index < 25 {
num := 'A' + n.index
r = string(rune(num))
} else {
num := 'a' + n.index - 25
r = string(rune(num))
}
return r
}
type Graph struct {
nodes map[Coord]Node
nodesByIndex []Node
edges [][]int // from, to, length. excluding from, including to
}
func MaxDist(from, to Node) (result int) {
return
}
func PrintFieldWithGraph(g Graph, f Field) (result string) {
result += "\n"
for row := 0; row <= f.MaxRow; row++ {
for col := 0; col <= f.MaxCol; col++ {
symb := f.Cells[Coord{Row: row, Col: col}]
if symb != Tree {
coord := Coord{Row: row, Col: col}
node, exists := g.nodes[coord]
if exists {
result += fmt.Sprint(node.Name())
} else {
result += "."
}
} else {
result += " "
}
}
result += "\n"
}
return
}
func CreateGraph(f Field) (g Graph) {
startCoord := Coord{Row: 0, Col: f.StartCol}
// directly below start
initialPath := PathEnd{
end: Coord{Row: 1, Col: f.StartCol}, visited: mapset.NewSet[Coord](),
}
g = Graph{
nodes: map[Coord]Node{
startCoord: Node{
index: 0,
c: startCoord,
name: "A",
},
},
}
const presumedNodeCount = 36
g.edges = make([][]int, presumedNodeCount)
for i := 0; i < presumedNodeCount; i++ {
g.edges[i] = make([]int, presumedNodeCount)
}
recursiveGraphStep(f, initialPath, &g, startCoord, 1, mapset.NewSet[Coord]())
g.edges[0][0] = 0
g.nodesByIndex = make([]Node, len(g.nodes))
for _, node := range g.nodes {
g.nodesByIndex[node.index] = node
}
return
}
func (g *Graph)Neighbors(node Node) (nodes []Node) {
index := node.index
for toIndex, len := range g.edges[index] {
if len > 0 {
nodes = append(nodes, g.nodesByIndex[toIndex])
}
}
return
}
var maxSoFar int = -1
func CheckMaxSoFar(maybe int) {
if maybe > maxSoFar {
maxSoFar = maybe
}
}
func (g *Graph) DFSLenOnGraph(atNode Node, visited mapset.Set[int],
toNode Node, lenSoFar int) int {
if atNode == toNode {
CheckMaxSoFar(lenSoFar)
return lenSoFar
}
log.Printf("at %+v to %+v cur dist is %d.\t\t|max so far %d| \n", atNode, toNode, lenSoFar, maxSoFar)
neighbors := g.Neighbors(atNode)
toVisit := slices.DeleteFunc(neighbors, func(n Node) bool {
return visited.Contains(n.index)
})
if len(toVisit) == 0 {
return -1
}
max := -1
for _, nextNode := range toVisit {
newVisited := visited.Clone()
newVisited.Add(atNode.index)
dist := g.edges[atNode.index][nextNode.index]
maxFromNext := g.DFSLenOnGraph(nextNode, newVisited, toNode, lenSoFar + dist)
if maxFromNext > max {
max = maxFromNext
}
}
return max
}
// run dfs, remembering from which remembers from which node we go, which path already traversed
func recursiveGraphStep(f Field, p PathEnd, g *Graph, goingFrom Coord, goingLen int, visitedPathPoints mapset.Set[Coord]) {
// log.Printf("entering coord %+v. from %+v with len %d\n", p.end, goingFrom, goingLen)
// if visitedPathPoints.Contains(p.end) {
// return
// }
neighbors := f.NeighborsPart2(p.end)
isCrossRoad := len(neighbors) > 2
if isCrossRoad {
log.Println("this should be crossroad ", p.end)
}
isStart := p.end == Coord{Row: 0, Col: f.StartCol}
isEnd := p.end == f.EndCoord()
if isEnd {
log.Println("this should be end ", p.end)
}
isNode := isCrossRoad || isStart || isEnd
continuedPaths := ExtendPath(p, f)
if !isNode {
// just recurse into next paths, from same node, with increased len
visitedPathPoints.Add(p.end)
for _, nextStep := range continuedPaths {
recursiveGraphStep(f, nextStep, g, goingFrom, goingLen+1, visitedPathPoints)
}
} else {
node, known := g.nodes[p.end]
// check if known, if not known - create
if !known {
node = Node{
c: p.end,
index: len(g.nodes),
}
node.name = node.Name()
g.nodes[p.end] = node
log.Printf("creating node %s %+v\n", node.Name(), node)
}
from := g.nodes[goingFrom]
log.Printf("from %s to %s\n", from.Name(), node.Name())
// and add vertices to currently traversed
if g.edges[node.index][from.index] == 0 {
g.edges[node.index][from.index] = goingLen
g.edges[from.index][node.index] = goingLen
} else {
knownEdge := g.edges[node.index][from.index]
if goingLen > knownEdge {
g.edges[node.index][from.index] = goingLen
g.edges[from.index][node.index] = goingLen
}
}
// NOTE ah, it's possible to have two edges between i and j
// but, i only need the longest one
// log.Printf("adding edges between %d & %d of len %d\n", node.index, from.index, goingLen)
// continue with new 'from' and len of 1
if !known {
for _, nextStep := range continuedPaths {
log.Printf("from %s should recurse to %+v", node.Name(), nextStep)
recursiveGraphStep(f, nextStep, g, p.end, 1, visitedPathPoints)
}
}
}
return
}
func GraphToMermaid(g Graph) (result string) {
result += "\nflowchart LR\n"
lines := mapset.NewSet[string]()
for _, node := range g.nodes {
for to, len := range g.edges[node.index] {
var toNode Node
for _, other := range g.nodes {
if other.index == to {
toNode = other
}
}
if len > 0 {
var fromName, toName string
if node.index < toNode.index {
fromName = node.Name()
toName = toNode.Name()
} else {
fromName = toNode.Name()
toName = node.Name()
}
line := fmt.Sprintf("\t%s---|length %d|%s\n", fromName, len, toName)
lines.Add(line)
}
}
// result += fmt.Sprintf("%s--|%d|%s\n", a ...any)
}
for line := range lines.Iter() {
result += line
}
return
}

101
day23/graph_test.go Normal file
View File

@@ -0,0 +1,101 @@
package day23
import (
"fmt"
"os"
"testing"
mapset "github.com/deckarep/golang-set/v2"
)
func TestGraphCreate(t *testing.T) {
filename := "example2"
field := ReadField(filename)
fmt.Println(field.SparseString())
graph := CreateGraph(field)
t.Log(graph)
}
func TestPrintGraph(t *testing.T) {
filename := "example2"
field := ReadField(filename)
fmt.Println(field.SparseString())
graph := CreateGraph(field)
t.Log(PrintFieldWithGraph(graph, field))
t.Logf(">>>\n %+v\n", graph)
}
func TestPrintGraphInput(t *testing.T) {
filename := "input"
field := ReadField(filename)
fmt.Println(field.SparseString())
graph := CreateGraph(field)
t.Log(PrintFieldWithGraph(graph, field))
t.Logf(">>>\n %+v\n", graph)
}
func TestPrintMermaidGraphInput(t *testing.T) {
filename := "input"
field := ReadField(filename)
fmt.Println(field.SparseString())
graph := CreateGraph(field)
mmdContent := GraphToMermaid(graph)
t.Log(mmdContent)
fileBorder, err := os.Create(filename + ".mmd")
if err != nil {
panic(err)
}
defer func() {
if err := fileBorder.Close(); err != nil {
panic(err)
}
}()
fileBorder.WriteString(mmdContent)
}
func TestGraphMaxBetweenExample(t *testing.T) {
filename := "example"
field := ReadField(filename)
graph := CreateGraph(field)
t.Log(PrintFieldWithGraph(graph, field))
from := graph.nodes[Coord{Row: 0, Col: field.StartCol}]
to := graph.nodes[field.EndCoord()]
dist := graph.DFSLenOnGraph(from, mapset.NewSet[int](), to, 0)
t.Log(graph)
t.Logf("please dist %d", dist)
}
func TestGraphMaxBetweenInput(t *testing.T) {
filename := "input"
field := ReadField(filename)
graph := CreateGraph(field)
t.Log(PrintFieldWithGraph(graph, field))
from := graph.nodes[Coord{Row: 0, Col: field.StartCol}]
to := graph.nodes[field.EndCoord()]
dist := graph.DFSLenOnGraph(from, mapset.NewSet[int](), to, 0)
t.Log(graph)
t.Logf("please dist %d", dist)
}

62
day23/input.mmd Normal file
View File

@@ -0,0 +1,62 @@
flowchart LR
M---|length 166|N
d---|length 62|h
H---|length 190|I
f---|length 136|h
j---|length 94|k
B---|length 152|L
I---|length 40|J
W---|length 56|X
E---|length 214|F
C---|length 60|K
V---|length 142|b
a---|length 110|b
I---|length 138|P
J---|length 184|K
Y---|length 146|a
c---|length 190|d
Q---|length 114|T
J---|length 240|O
C---|length 184|D
L---|length 172|M
Q---|length 140|R
Y---|length 464|k
O---|length 76|V
N---|length 102|O
K---|length 152|L
U---|length 80|c
V---|length 72|W
b---|length 202|j
A---|length 39|B
W---|length 236|a
P---|length 166|Q
e---|length 174|f
G---|length 186|R
T---|length 258|d
X---|length 142|Y
b---|length 128|c
F---|length 378|G
S---|length 108|T
N---|length 62|W
U---|length 110|V
a---|length 138|k
S---|length 234|e
d---|length 108|e
H---|length 166|Q
O---|length 158|P
M---|length 360|X
h---|length 184|i
B---|length 244|C
D---|length 96|J
D---|length 154|E
R---|length 118|S
E---|length 146|I
P---|length 128|U
T---|length 268|U
i---|length 198|j
G---|length 144|H
F---|length 102|H
f---|length 77|g
K---|length 266|N
c---|length 64|i

125
day23/notes.org Normal file
View File

@@ -0,0 +1,125 @@
#+title: Notes
* ok, second part is long.
and here optimization of storing direction of enter into path, and it's length,
would that be helpful?
it might not, because based on visited some future longer path might not be available.
i don't know how to optimize.
i could maybe do same calculation in parallel, somehow
put not into queue, but into channel
* wait a second. previous answer was 2018
and now long checks result in me waiting for intermediate 1882.
let's add early cutoffs, if not end by 2018, then abandon
doubt that's implementable
* well. do i want to try parallel?
seems like false path, really
like there should be a better optimizaiton first
* maybe we could join detours into 'potential longest paths'
like if we traverse, and get to a point which was previously visited,
for evey path that went through that split path,
i could check whether i can take paths that went through this point, and switch their part with the detoured part.
* and maybe we could continue longest paths first?
like making pathsToFurther a heap by visited.Cordinality ?
oh, and then we'll find 'some path to end'
and additional paths will try to add their detour.
so, i guess when finding a path to end, i could save path to end for each point.
then if i reach the point, i could check if i can use some of the
and i guess if i do depth first then i'll always have all paths to end from a point if i return to it?
* this sounds like an idea.
with heap do depth first.
if it's first visit to a point, just go further
if i find the end, i'd want to mark all points on the path with path info
hm. recursive calls might make this easier.
because i'd want both 'prefixVisited' set and totalPathSet
due to depth first, we'll discover shortest path first.
and points will get mapped with this first (of potential multiple) path info to end.
now if on followup steps i get into the point with info on paths to end,
that should mean that i've already found all paths to end from that point, right?
now i need to check for the 'detour' which 'paths to end' are still possible with that detour added
by taking set of elements from this point, to end. and checking that intersection with detour elements is 0.
if there are like this - report finding a new path, and save to all elements of that path somehow.
and now on finding detours i wouldn't need to re-check path to end, that should save a lot of time
** so how to go about in coding this?
have shared map[Coord][]EndPathInfo
the DFS means i'm recursing into each child.
and taking the result of the call.
it should be info on path to end? or multiple paths to end.
which should be added to current node.
and then calling with start point will return paths to end from start, and i'll be able to take the by length
ok. but. if i'm entering the coord, and there are already paths to end.
then i need to presume that those are only possible paths to end from this point,
because all other paths should have been explored by now,
i for my 'detour' determine whether it is consistent with any of already found paths to end.
** NO. dfs doesn't mean i'll find shortest path first.
so if i'm in visited, it doesn't mean that stored is shorter and current is a detour.
but dfs should mean that all paths from this prefix have finished.
so, sure. there have to be all done?
** my example2 has fork on row 3 col 10
so 4,10 and 3,11 should be visited separately.
6,17 is where they join and the point which should have second entry
** allright, ugh. my new solution is memory hogging.
maybe i can draw the stuff and it will be several neat thingies
* maybe new approach?
make a graph. with vertices of Start, End and Crossroads.
yes.
let's create a graph representation.
** so, from A to AG
i think i can do this manually now
** distances are
39
244
184
154
214
378
144
190
40
184
152
172
166
102
158
166
140
118
108
268
110
72
56
142
146
110
128
190
108
174
77
1
** again?
no, let's write code.
** didn't count all the way
2023/12/23 15:55:55 at {index:30 c:{Row:125 Col:137} name:f} to {index:31 c:{Row:140 Col:139} name:g} cur dist is 3997. |max so far 6406|
signal: interrupt
FAIL sunshine.industries/aoc2023/day23 380.499s
tried more or less stable value, and interrupted

161
day23/paths.go Normal file
View File

@@ -0,0 +1,161 @@
package day23
import (
"fmt"
"log"
mapset "github.com/deckarep/golang-set/v2"
)
type PathEnd struct {
end Coord
visited mapset.Set[Coord]
}
func (p PathEnd)Sring() string {
return fmt.Sprintf("PathEnd[at %+v, visited: %+v]", p.end, p.visited)
}
func ExtendPath(p PathEnd, f Field) (nextPaths []PathEnd) {
endPointNeighbors := f.NeighborsPart2(p.end)
for _, potentialNewEnd := range endPointNeighbors {
if !p.visited.Contains(potentialNewEnd) {
nextVisited := p.visited.Clone()
nextVisited.Add(p.end)
nextPaths = append(nextPaths, PathEnd{
end: potentialNewEnd,
visited: nextVisited,
})
}
}
return
}
// info on path from start to end
type PathInfo struct {
Visited mapset.Set[Coord]
}
func RunDFSTingy(f Field) []PathInfo {
initialPath := PathEnd{
end: Coord{Row: 0, Col: f.StartCol}, visited: mapset.NewSet[Coord](),
}
initialShared := make(map[Coord][]PathInfo)
return DFSScenicPaths(f, initialPath, initialShared)
}
var knownMax int = 0
func CheckAndPrintMax(maybeNewMax int) {
if maybeNewMax > knownMax {
log.Printf("\n\n>>>>found new max: %d\n", maybeNewMax)
knownMax = maybeNewMax
}
}
func DFSScenicPaths(f Field, curPath PathEnd,
sharedMem map[Coord][]PathInfo) (pathsFromTheStartToEnd []PathInfo) {
curCoord := curPath.end
if curCoord == (Coord{ Row: 6, Col: 15 }) {
log.Println(">>>>>>>>")
}
// log.Printf("entering %+v with mem %+v\n", curPath, sharedMem[curCoord])
if curCoord == f.EndCoord() {
pathsFromTheStartToEnd = append(pathsFromTheStartToEnd, PathInfo{curPath.visited.Clone()})
log.Printf("got to end. cur len is %d\n", curPath.visited.Cardinality())
CheckAndPrintMax(curPath.visited.Cardinality())
// i guess return only from current to end?
// and on non terminal first time, return copy with self added?
return
}
// now for non final point
knownPaths, visitedBefore := sharedMem[curCoord]
// NOTE but this only if we haven't visited this coord before!
if !visitedBefore {
nextSteps := ExtendPath(curPath, f)
suffixesFromCurToEnd := make([]PathInfo, 0)
for _, nextPath := range nextSteps {
pathsToEndThrough := DFSScenicPaths(f, nextPath, sharedMem)
// i guess here deduct the prefix.
for _, path := range pathsToEndThrough {
// will contain this and further
suffix := PathInfo{
Visited: path.Visited.Difference(curPath.visited).Clone(),
}
// log.Printf(">> from path \n%+v make suffix \n%+v\n\n", path, suffix)
suffixesFromCurToEnd = append(suffixesFromCurToEnd, suffix)
}
pathsFromTheStartToEnd = append(pathsFromTheStartToEnd, pathsToEndThrough...)
if len(pathsToEndThrough) != 0 {
// log.Printf("setting mem for %+v to %+v", curCoord, suffixesFromCurToEnd)
sharedMem[curCoord] = suffixesFromCurToEnd
}
}
return
} else {
// have visited this point before, due to dfs all possible paths to end should already be known
// so curPath.visited should contian a detour.
// need to figure out if this detour is compatible with any of the known paths to end
// from those create 'new' paths to end with that detour
// return those and add those to the shared mem
for _, knownPathToEnd := range knownPaths {
// those are all points through which this known path goes from current to end
// if our curPath
fromCurToEnd := knownPathToEnd.Visited
thisPrefix := curPath.visited
if thisPrefix.Intersect(fromCurToEnd).Cardinality() == 0 {
// then current prefix is compatible with this path.
fromCurPrefixToEnd := thisPrefix.Clone()
fromCurPrefixToEnd.Union(fromCurToEnd)
pathsFromTheStartToEnd = append(pathsFromTheStartToEnd, PathInfo{fromCurPrefixToEnd})
log.Printf("additional path to end of len %d\n", fromCurPrefixToEnd.Cardinality())
CheckAndPrintMax(fromCurPrefixToEnd.Cardinality())
}
}
log.Printf("having second visit into %+v.\n", curPath)
return
}
panic("should not be reachable")
}
// return paths that end on End
func RunAllScenicPaths(f Field) (result []PathEnd) {
pathsToFurther := []PathEnd{
{end: Coord{Row: 0, Col: f.StartCol}, visited: mapset.NewSet[Coord]()},
}
for len(pathsToFurther) > 0 {
curCheckedPath := pathsToFurther[0]
pathsToFurther = pathsToFurther[1:]
if curCheckedPath.end == f.EndCoord() {
result = append(result, curCheckedPath)
// log.Printf("found end path of len %d . %+v", curCheckedPath.visited.Cardinality(), curCheckedPath)
continue
}
nextSteps := ExtendPath(curCheckedPath, f)
// log.Printf("for %+v next steps %+v\n", curCheckedPath, pathsToFurther)
// log.Printf("remaining paths to check len is %d", len(pathsToFurther))
// log.Println(pathsToFurther)
if len(nextSteps) > 0 {
pathsToFurther = append(pathsToFurther, nextSteps...)
}
}
return
}

19
day23/paths_test.go Normal file
View File

@@ -0,0 +1,19 @@
package day23
import "testing"
func TestRunAllPaths(t *testing.T) {
filename := "example"
field := ReadField(filename)
finalPaths := RunAllScenicPaths(field)
t.Log(finalPaths)
max := 0
for _, path := range finalPaths {
if path.visited.Cardinality() > max {
max = path.visited.Cardinality()
}
}
t.Logf("max path len is %d", max)
}

72
day23/willHelp.mmd Normal file
View File

@@ -0,0 +1,72 @@
flowchart LR
L---|length 152|K
L---|length 172|M
U---|length 268|T
U---|length 110|V
W---|length 72|V
W---|length 56|X
a---|length 146|Y
a---|length 110|b
f---|length 174|e
f---|length 77|g
f---|length 136|h
H---|length 144|G
H---|length 190|I
T---|length 108|S
T---|length 268|U
M---|length 172|L
M---|length 166|N
F---|length 214|E
F---|length 378|G
I---|length 190|H
I---|length 40|J
A---|length 2|A
A---|length 39|B
Q---|length 166|P
Q---|length 140|R
Y---|length 142|X
Y---|length 146|a
d---|length 190|c
d---|length 108|e
e---|length 108|d
e---|length 174|f
h---|length 136|f
h---|length 184|i
J---|length 40|I
J---|length 184|K
N---|length 166|M
N---|length 102|O
X---|length 56|W
X---|length 142|Y
j---|length 198|i
j---|length 94|k
B---|length 39|A
B---|length 244|C
G---|length 378|F
G---|length 144|H
P---|length 158|O
P---|length 166|Q
D---|length 184|C
D---|length 154|E
E---|length 154|D
E---|length 214|F
K---|length 184|J
K---|length 152|L
O---|length 102|N
O---|length 158|P
R---|length 140|Q
R---|length 118|S
S---|length 118|R
S---|length 108|T
V---|length 110|U
V---|length 72|W
c---|length 128|b
c---|length 190|d
C---|length 244|B
C---|length 184|D
k---|length 94|j
i---|length 184|h
i---|length 198|j
g---|length 77|f
b---|length 110|a
b---|length 128|c

BIN
day23/willHelp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 203 KiB

5
day24/example Normal file
View File

@@ -0,0 +1,5 @@
19, 13, 30 @ -2, 1, -2
18, 19, 22 @ -1, -1, -2
20, 25, 34 @ -2, -2, -4
12, 31, 28 @ -1, -2, -1
20, 19, 15 @ 1, -5, -3

73
day24/hailMary.go Normal file
View File

@@ -0,0 +1,73 @@
package day24
import (
"fmt"
)
// most inner loop
// assumint stone hits h1 at t1, and h2 at t2
// return the line. so 'HailParam' for my stone trajectory
func AssumeHails(h1, h2 HailParam, t1, t2 int) (stoneTrajectory HailParam, isInt bool) {
Dx, isXInt := AssumedDelta(h1.p0.x, h2.p0.x, h1.Dx, h2.Dx, t1, t2)
Dy, isYInt := AssumedDelta(h1.p0.y, h2.p0.y, h1.Dy, h2.Dy, t1, t2)
Dz, isZInt := AssumedDelta(h1.p0.z, h2.p0.z, h1.Dz, h2.Dz, t1, t2)
isInt = isXInt && isYInt && isZInt
x := AssumedStartFromDelta(h1.p0.x, h1.Dx, t1, Dx)
y := AssumedStartFromDelta(h1.p0.y, h1.Dy, t1, Dy)
z := AssumedStartFromDelta(h1.p0.z, h1.Dz, t1, Dz)
stoneTrajectoryLine := fmt.Sprintf("%d, %d, %d @ %d, %d, %d", x, y, z, Dx, Dy, Dz)
stoneTrajectory = ReadHailLine(stoneTrajectoryLine)
return
}
func HailMaryLoop(hails []HailParam) {
// for t1, t2 from [1, 100]
// try to fit stoneTrajectory on every pair of hails.
// and hope for integer fit
for t1 := 1; t1 <= 100; t1++ {
for t2 := t1+1 ; t2 <= 100; t2++ {
for i, hail := range hails {
innerHail:
for j, otherHail := range hails {
if i == j {
continue innerHail
}
_, isInt := AssumeHails(hail, otherHail, t1, t2)
if !isInt {
continue innerHail // TODO first hope to loose
}
// if isInt {
// log.Printf("hail mary int fit between %s (%d) and %s (%d)",
// hail.SomeString(), t1, otherHail.SomeString(), t2)
// }
}
}
}
}
}
// TODO check for inner loop : when get assumed stoneTrajectory
// for all hail params, check that they intercept
// func CheckAssumedTrajectory(assumedStone HailParam, hails []HailParam) bool {
// for _, hail := range hails {
// // i guess i could try to do what?
// // assume oh, no. there can be t whatever
// }
// }
func AssumedDelta(c1, c2 int, Dc1, Dc2 int, t1, t2 int) (delta int, isInt bool) {
divisor := t1 - t2
divisible := c1 - c2 + (t1 * Dc1) - (t2 * Dc2)
isInt = divisible % divisor == 0
delta = divisible / divisor
return
}
func AssumedStartFromDelta(c1 int, Dc1 int, t1, Dc int) (c int) {
return c1 + t1 * Dc1 - t1 * Dc
}

9
day24/hailMary_test.go Normal file
View File

@@ -0,0 +1,9 @@
package day24
import "testing"
func TestHailMaryOnExamle(t *testing.T) {
filename := "input"
hails := ReadHailFile(filename)
HailMaryLoop(hails)
}

196
day24/lines.go Normal file
View File

@@ -0,0 +1,196 @@
package day24
import (
"log"
"os"
"strconv"
"strings"
)
const (
// CoordMin int = 7
// CoordMax int = 27
CoordMin int = 200000000000000
CoordMax int = 400000000000000
)
type Point struct {
x, y, z int
}
type HailParam struct {
p0, p1 Point
Dx, Dy, Dz int
line string
// for 2d : ay + bx = 0
a, b, c int
// for 2d : y = slope*x + shift
slope, shift float64
}
func (h *HailParam) SomeString() string {
return h.line
}
func (h *HailParam) GetCoord(name string) (result int) {
switch name {
case "x":
result = h.p0.x
case "y":
result = h.p0.y
case "z":
result = h.p0.z
default:
panic("unknown param")
}
return
}
func (h *HailParam) GetSpeedOf(name string) (result int) {
switch name {
case "x":
result = h.Dx
case "y":
result = h.Dy
case "z":
result = h.Dz
default:
panic("unknown param")
}
return
}
func CheckPairwiseIntersections(hails []HailParam) (totalIntersections int) {
for i, hail := range hails {
for j := i + 1; j < len(hails); j++ {
otherHail := hails[j]
intersect := CheckTaskIntersection(hail, otherHail)
if intersect {
totalIntersections += 1
}
}
}
return
}
func CheckTaskIntersection(h1, h2 HailParam) (doIntersect bool) {
log.Printf("intersecting %+v and %+v\n", h1, h2)
// x, y, intersectAtAll := IntersectByTwoPoints(h1, h2)
x, y, intersectAtAll := IntersectBySlopeAndShift(h1, h2)
if !intersectAtAll {
log.Println("no intersection at all\n", x, y)
return false
}
isH1Future := h1.FloatPointInFuture(x, y)
isH2Future := h2.FloatPointInFuture(x, y)
if !isH1Future {
log.Printf("point %f, %f in the past for h1\n", x, y)
}
if !isH2Future {
log.Printf("point %f, %f in the past for h2\n", x, y)
}
if !isH1Future || !isH2Future {
return false
}
if x < float64(CoordMin) || x > float64(CoordMax) ||
y < float64(CoordMin) || y > float64(CoordMax) {
log.Printf("intersect at %f %f but outside of area\n", x, y)
return false // outside of area
}
log.Println("> intersect inside of the area! ", x, y)
return true
}
func IntersectInTheeDimentions(h1, h2 HailParam) (interX, interY, interZ float64,
interT float64, isIntersecting bool) {
return
}
func IntersectBySlopeAndShift(h1, h2 HailParam) (intersectionX, intersectionY float64, isIntersecting bool) {
if h1.slope == h2.slope {
return
}
// y = slope * x + shift
// slope1 * x + shift1 = slope2 * x + shift2
// x = ( shift2 - shift1 ) / (slope1 - slope2)
x := (h2.shift - h1.shift) / (h1.slope - h2.slope)
y := h1.slope*x + h1.shift
return x, y, true
}
func (h HailParam) PointInFuture(p Point) bool {
xPositiveSteps := (p.x-h.p0.x)*h.Dx >= 0
yPositiveSteps := (p.y-h.p0.y)*h.Dy >= 0
zPositiveSteps := (p.z-h.p0.z)*h.Dz >= 0
return xPositiveSteps && yPositiveSteps && zPositiveSteps
}
func (h HailParam) FloatPointInFuture(x, y float64) bool {
xPositiveSteps := (x-float64(h.p0.x))*float64(h.Dx) >= 0
// yPositiveSteps := (y - float64(h.p0.y)) * float64(h.Dy) >= 0
// return xPositiveSteps && yPositiveSteps
return xPositiveSteps
}
// 19, 13, 30 @ -2, 1, -2
func ReadHailLine(line string) (h HailParam) {
h.line = line
line = strings.ReplaceAll(line, "@", "")
line = strings.ReplaceAll(line, ",", "")
fields := strings.Fields(line)
h.p0.x = AtoIOrPanic(fields[0])
h.p0.y = AtoIOrPanic(fields[1])
h.p0.z = AtoIOrPanic(fields[2])
h.Dx = AtoIOrPanic(fields[3])
h.Dy = AtoIOrPanic(fields[4])
h.Dz = AtoIOrPanic(fields[5])
countP1AfterMillis := 1
h.p1.x = h.p0.x + countP1AfterMillis*h.Dx
h.p1.y = h.p0.y + countP1AfterMillis*h.Dy
h.p1.z = h.p0.z + countP1AfterMillis*h.Dz
h.a = h.p0.y - h.p1.y
h.b = h.p1.x - h.p0.x
h.c = -(h.p0.x*h.p1.y - h.p1.x*h.p0.y)
h.slope = float64(h.Dy) / float64(h.Dx)
// y = slope * x + shift
// shift = y - slope * x // for some point
h.shift = float64(h.p0.y) - h.slope*float64(h.p0.x)
return
}
func ReadHailFile(filename string) []HailParam {
bytes, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
text := strings.TrimSpace(string(bytes))
lines := strings.Split(text, "\n")
result := make([]HailParam, len(lines))
for i, line := range lines {
result[i] = ReadHailLine(line)
}
return result
}
func AtoIOrPanic(str string) (num int) {
num, err := strconv.Atoi(str)
if err != nil {
panic(err)
}
return
}

87
day24/lines_test.go Normal file
View File

@@ -0,0 +1,87 @@
package day24
import (
"strings"
"testing"
)
func TestReadLine(t *testing.T) {
lines := `19, 13, 30 @ -2, 1, -2
18, 19, 22 @ -1, -1, -2
20, 25, 34 @ -2, -2, -4
12, 31, 28 @ -1, -2, -1
20, 19, 15 @ 1, -5, -3`
for _, line := range strings.Split(lines, "\n") {
hail := ReadHailLine(line)
t.Log(hail)
}
}
func TestReadLineInput(t *testing.T) {
lines := `147847636573416, 190826994408605, 140130741291716 @ 185, 49, 219
287509258905812, 207449079739538, 280539021150559 @ -26, 31, 8
390970075767404, 535711685410735, 404166182422876 @ -147, -453, -149
306391780523937, 382508967958270, 264612201472049 @ -24, -274, 28
278063616684570, 510959526404728, 288141792965603 @ -18, -441, -6`
for _, line := range strings.Split(lines, "\n") {
hail := ReadHailLine(line)
t.Logf("%+v\n", hail)
}
}
func TestSecondPointIsInFuture(t *testing.T) {
lines := `19, 13, 30 @ -2, 1, -2
18, 19, 22 @ -1, -1, -2
20, 25, 34 @ -2, -2, -4
12, 31, 28 @ -1, -2, -1
20, 19, 15 @ 1, -5, -3`
for _, line := range strings.Split(lines, "\n") {
hail := ReadHailLine(line)
t.Log(hail)
t.Logf("calced seconds point %+v is in future %t\n", hail.p1, hail.PointInFuture(hail.p1))
}
}
func TestIntersectExampleOne(t *testing.T) {
// Hailstone A: 19, 13, 30 @ -2, 1, -2
// Hailstone B: 18, 19, 22 @ -1, -1, -2
// Hailstones' paths will cross inside the test area (at x=14.333, y=15.333).
hA := ReadHailLine("19, 13, 30 @ -2, 1, -2")
hB := ReadHailLine("18, 19, 22 @ -1, -1, -2")
x, y, check := IntersectBySlopeAndShift(hA, hB)
if !check {
panic("should intersect")
}
t.Logf("got intersection at %f %f", x, y)
}
func TestIntersectExampleTwo(t *testing.T) {
// Hailstone A: 18, 19, 22 @ -1, -1, -2
// Hailstone B: 20, 25, 34 @ -2, -2, -4
hA := ReadHailLine("18, 19, 22 @ -1, -1, -2")
hB := ReadHailLine("20, 25, 34 @ -2, -2, -4")
x, y, check := IntersectBySlopeAndShift(hA, hB)
if check {
panic("should not intersect")
}
t.Logf("got intersection at %f %f", x, y)
}
func TestExamplePairwiseChecks(t *testing.T) {
filename := "example"
hails := ReadHailFile(filename)
for _, hail := range hails {
t.Log(hail)
}
intersections := CheckPairwiseIntersections(hails)
t.Log("counted intersections ", intersections)
}

View File

@@ -0,0 +1,12 @@
package day24
import (
"fmt"
)
func Run() int {
fmt.Println("hello day 24, i'm getting tired")
filenae := "day24/input"
hails := ReadHailFile(filenae)
return CheckPairwiseIntersections(hails)
}

118
day24/notes.org Normal file
View File

@@ -0,0 +1,118 @@
#+title: Notes
* i want help from math
https://math.stackexchange.com/questions/28503/how-to-find-intersection-of-two-lines-in-3d
'vector parametric form' is exactly what we're getting in the input?
* huh and only 'looking forward in time' so solutions with negative t are not important.
cooool
** i see that speeds are integers, so updated points are integers.
maybe i do calculation of every line on every time point?
and if i do that is there a way to get intersections efficietly?
** i'll also need the ends for lines? ends to the line segments.
with limits on the x & y by the task
for example both 7 <= <= 27
for input 200000000000000 <= <= 400000000000000
also. can't we move the coords? maybe not? maybe only for one
so, what do i do? to get the ends of the lines?
i try to calcluate with both x & y in 2 min\max. then if the other is ok, than that's the ends?
wait, what happens when i do x = 7, and x = 27 and y is outside? it means no intesections, i guess
or it could be outside from different sides, so not all x are ok, but there's still line there
** Using homogeneous coordinates
https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection
no, i don't understant that
** https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection#Given_two_points_on_each_line
with 2 points. i guess
but also - check if the point in future of the hail, by comparing with speeds?
should be easy
** and i got wrong result
day24 result: 8406
** another formula gives
day24 result: 8406
** another formula
12938
*
* ok, part 2.
what if.
i start checking t = 0, 1, etc.
for each t, i need two points of the two hail lines.
it would constitute the trajectory.
then condition for the solution that all other hail lines will intersect it at some t.
so check for intersection (maybe not necessarily in the field?)
go though lines, if any fail to intersect - continue with t
if all intersect, find where the rock has to be in time 0
oh. no.
it's not just intersect. it's that the movement of the rock with t would be there at correct time? yuck?
would there really be more than i line that intersects all of the hail lines?
i'll just need to also figure out t=0 from other coords.
i don't like this at all.
And intersections have to be over (X, Y, Z)
** so 'hail mary' approach would be
scan first 1k nanoseconds. so already 1M calculations
( this is first part of desperation, that at least 2 hails will intercept in first 1k ticks )
for collision 1, assume HailA is on path.
then iterate for all other assumint they are intercepted on t 2 etc ?
no. the intersections could be on non-integer times?
( this would be second part of the 'hail mary' )
from that i should be able to construct the 'trajectory' line.
and then check with all other points - do the intersect?
( and check of intersection in future would be nice )
then if line confirmed, will need to calc for t = 0, t = 1, and get speeds
*** not hoping for all integer intersections
or what if i will hope for that?
let's try?
* ok, what if i could do system of equasions?
#+begin_src
yuck_test.go:12:
x + Dx * t0 == 19 + -2 * t0
y + Dy * t0 == 13 + 1 * t0
z + Dz * t0 == 19 + -2 * t0
x + Dx * t1 == 18 + -1 * t1
y + Dy * t1 == 19 + -1 * t1
z + Dz * t1 == 18 + -2 * t1
x + Dx * t2 == 20 + -2 * t2
y + Dy * t2 == 25 + -2 * t2
z + Dz * t2 == 20 + -4 * t2
solve for x, y, z, Dx, Dy, Dz, t1, t2, t3. ti > 0
#+end_src
#+begin_src
yuck_test.go:18:
x + Dx * t0 == 147847636573416 + 185 * t0
y + Dy * t0 == 190826994408605 + 49 * t0
z + Dz * t0 == 147847636573416 + 219 * t0
x + Dx * t1 == 287509258905812 + -26 * t1
y + Dy * t1 == 207449079739538 + 31 * t1
z + Dz * t1 == 287509258905812 + 8 * t1
x + Dx * t2 == 390970075767404 + -147 * t2
y + Dy * t2 == 535711685410735 + -453 * t2
z + Dz * t2 == 390970075767404 + -149 * t2
solve for x, y, z, Dx, Dy, Dz, t1, t2, t3. ti > 0
#+end_src
* got some solution
https://z3prover.github.io/papers/programmingz3.html#sec-intro
enefedov@LLF33A87M:~/Documents/personal/advent-of-code-2023$ python day24/pythonZ3/forInput.py
Solution: [t0 = 666003776903,
t2 = 779453185471,
t1 = 654152070134,
Dz = 18,
Dx = 47,
Dy = -360,
z = 273997500449219,
y = 463222539161932,
x = 239756157786030]

47
day24/pythonZ3/example.py Normal file
View File

@@ -0,0 +1,47 @@
#!/usr/bin/env python3
from z3 import *
s = Solver()
x = Real('x')
Dx = Real('Dx')
y = Real('y')
Dy = Real('Dy')
z = Real('z')
Dz = Real('Dz')
t0 = Real('t0')
eqT0 = t0 >= 0
eq0x = x + Dx * t0 == (-2 * t0) + 19
eq0y = y + Dy * t0 == (1 * t0) + 13
eq0z = z + Dz * t0 == (-2 * t0) + 30
t1 = Real('t1')
eqT1 = t1 >= 0
eq1x = x + Dx * t1 == (-1 * t1) + 18
eq1y = y + Dy * t1 == (-1 * t1) + 19
eq1z = z + Dz * t1 == (-2 * t1) + 22
t2 = Real('t2')
eqT2 = t2 >= 0
eq2x = x + Dx * t2 == (-2 * t2) + 20
eq2y = y + Dy * t2 == (-2 * t2) + 25
eq2z = z + Dz * t2 == (-4 * t2) + 34
#solve for x, y, z, Dx, Dy, Dz, t1, t2, t3.
s.add(eqT0,
eq0x,
eq0y,
eq0z,
eqT1,
eq1x,
eq1y,
eq1z,
eqT2,
eq2x,
eq2y,
eq2z)
if s.check() == sat:
print("Solution:", s.model())
else:
print("No solution found")

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python3
from z3 import *
s = Solver()
x = Real('x')
Dx = Real('Dx')
y = Real('y')
Dy = Real('Dy')
z = Real('z')
Dz = Real('Dz')
t0 = Real('t0')
eqT0 = t0 >= 0
eq0x = x + Dx * t0 == (185 * t0) + 147847636573416
eq0y = y + Dy * t0 == (49 * t0) + 190826994408605
eq0z = z + Dz * t0 == (219 * t0) + 140130741291716
t1 = Real('t1')
eqT1 = t1 >= 0
eq1x = x + Dx * t1 == (-26 * t1) + 287509258905812
eq1y = y + Dy * t1 == (31 * t1) + 207449079739538
eq1z = z + Dz * t1 == (8 * t1) + 280539021150559
t2 = Real('t2')
eqT2 = t2 >= 0
eq2x = x + Dx * t2 == (-147 * t2) + 390970075767404
eq2y = y + Dy * t2 == (-453 * t2) + 535711685410735
eq2z = z + Dz * t2 == (-149 * t2) + 404166182422876
#solve for x, y, z, Dx, Dy, Dz, t1, t2, t3.
s.add(eqT0,
eq0x,
eq0y,
eq0z,
eqT1,
eq1x,
eq1y,
eq1z,
eqT2,
eq2x,
eq2y,
eq2z)
if s.check() == sat:
print("Solution:", s.model())
else:
print("No solution found")
print(273997500449219 + 463222539161932 + 239756157786030)

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env python3
from z3 import *
x = Real('x')
y = Real('y')
eq1 = x + y == 5
eq2 = x - y == 3
s = Solver()
s.add(eq1, eq2)
if s.check() == sat:
print("Solution:", s.model())
else:
print("No solution found")

78
day24/yuck.go Normal file
View File

@@ -0,0 +1,78 @@
package day24
import (
"fmt"
)
func SystemsWithSymbols() (result string) {
result += "\n"
coords := []string{"x", "y", "z"}
for i := 0; i < 3; i++ {
for _, coord := range coords {
result += fmt.Sprintf("%s + D%s * t%d == %s%d + D%s%d * t%d\n",
coord, coord, i, coord, i, coord, i, i)
}
}
result += "solve for x, y, z, Dx, Dy, Dz, t1, t2, t3. ti > 0"
return
}
func SystemFromThreeHailstones(hails []HailParam) (result string) {
result += "\n"
coords := []string{"x", "y", "z"}
for i := 0; i < 3; i++ {
result += fmt.Sprintf("t%d >= 0\n", i)
hailIter := hails[i]
for _, coord := range coords {
result += fmt.Sprintf("%s + D%s * t%d == %d + %d * t%d\n",
coord, coord, i,
hailIter.GetCoord(coord), hailIter.GetSpeedOf(coord), i)
}
}
result += "solve for x, y, z, Dx, Dy, Dz, t1, t2, t3."
return
}
func SystemFromThreeHailstonesToTheLeft(hails []HailParam) (result string) {
result += "\n"
coords := []string{"x", "y", "z"}
for i := 0; i < 3; i++ {
result += fmt.Sprintf("t%d >= 0\n", i)
hailIter := hails[i]
for _, coord := range coords {
result += fmt.Sprintf("%s + D%s * t%d - (%d * t%d) == %d \n",
coord, coord, i,
hailIter.GetSpeedOf(coord), i, hailIter.GetCoord(coord))
}
}
result += "solve for x, y, z, Dx, Dy, Dz, t1, t2, t3."
return
}
func SystemAsPythonInit(hails []HailParam) (result string) {
result += "\n"
coords := []string{"x", "y", "z"}
for _, coord := range coords {
result += fmt.Sprintf("%s = Real('%s')\n", coord, coord)
result += fmt.Sprintf("D%s = Real('D%s')\n", coord, coord)
}
for i := 0; i < 3; i++ {
result += fmt.Sprintf("t%d = Real('t%d')\n", i, i)
result += fmt.Sprintf("eqT%d = t%d >= 0\n", i, i)
hailIter := hails[i]
for _, coord := range coords {
result += fmt.Sprintf("eq%d%s = %s + D%s * t%d == (%d * t%d) + %d \n",
i, coord,
coord, coord, i,
hailIter.GetSpeedOf(coord), i, hailIter.GetCoord(coord))
}
}
result += "//solve for x, y, z, Dx, Dy, Dz, t1, t2, t3."
return
}

19
day24/yuck_test.go Normal file
View File

@@ -0,0 +1,19 @@
package day24
import "testing"
func TestPrintJustSymbol(t *testing.T) {
t.Log(SystemsWithSymbols())
}
func TestPrintSystemExample(t *testing.T) {
filename := "example"
hails := ReadHailFile(filename)
t.Log(SystemAsPythonInit(hails))
}
func TestPrintSystemInput(t *testing.T) {
filename := "input"
hails := ReadHailFile(filename)
t.Log(SystemAsPythonInit(hails))
}

10
day25/Snowerload.go Normal file
View File

@@ -0,0 +1,10 @@
package day25
import (
"fmt"
)
func Run() int {
fmt.Println("time to wrap things up")
return 0
}

File diff suppressed because it is too large Load Diff

13
day25/example Normal file
View File

@@ -0,0 +1,13 @@
jqt: rhn xhk nvd
rsh: frs pzl lsr
xhk: hfx
cmg: qnr nvd lhk bvb
rhn: xhk bvb hfx
bvb: xhk hfx
pzl: lsr hfx nvd
qnr: nvd
ntq: jqt hfx bvb xhk
nvd: lhk
lsr: lhk
rzs: qnr cmg lsr rsh
frs: qnr lhk lsr

View File

@@ -0,0 +1,15 @@
flowchart TD
cmg --- qnr
cmg --- lhk
jqt --- nvd
bvb --- rhn
lsr --- pzl
lhk --- lsr
lsr --- rsh
hfx --- ntq
qnr --- rzs
bvb --- hfx
lhk --- nvd
frs --- qnr
jqt --- ntq
rhn --- xhk

View File

@@ -0,0 +1,34 @@
flowchart TD
ntq --- xhk
bvb --- hfx
lsr --- pzl
nvd --- pzl
pzl --- rsh
frs --- lhk
bvb --- cmg
jqt --- xhk
bvb --- rhn
jqt --- rhn
hfx --- xhk
frs --- lsr
lhk --- lsr
jqt --- nvd
cmg --- rzs
hfx --- pzl
bvb --- xhk
rhn --- xhk
frs --- rsh
cmg --- qnr
nvd --- qnr
qnr --- rzs
bvb --- ntq
frs --- qnr
cmg --- nvd
hfx --- rhn
jqt --- ntq
hfx --- ntq
lsr --- rsh
cmg --- lhk
rsh --- rzs
lhk --- nvd
lsr --- rzs

34
day25/example-graph.mmd Normal file
View File

@@ -0,0 +1,34 @@
flowchart TD
hfx --- pzl
cmg --- lhk
lsr --- rsh
rsh --- rzs
bvb --- rhn
jqt --- xhk
nvd --- pzl
lsr --- pzl
frs --- qnr
frs --- lsr
lhk --- lsr
lsr --- rzs
rhn --- xhk
hfx --- ntq
nvd --- qnr
qnr --- rzs
bvb --- xhk
hfx --- xhk
jqt --- rhn
jqt --- nvd
cmg --- nvd
lhk --- nvd
frs --- rsh
ntq --- xhk
cmg --- rzs
bvb --- ntq
cmg --- qnr
hfx --- rhn
jqt --- ntq
bvb --- cmg
frs --- lhk
pzl --- rsh
bvb --- hfx

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 37 KiB

3
day25/example2 Normal file
View File

@@ -0,0 +1,3 @@
jqt: rhn nvd rsh
rsh: frs pzl lsr
xhk: hfx

301
day25/graph.go Normal file
View File

@@ -0,0 +1,301 @@
package day25
import (
"fmt"
"log"
"os"
"strings"
mapset "github.com/deckarep/golang-set/v2"
)
type Graph struct {
Nodes map[string]*Node
}
type Node struct {
Name string
Neighbors mapset.Set[string]
}
func (n Node) String() string {
return fmt.Sprintf("[%s : %+v]", n.Name, n.Neighbors)
}
func ReadGraphFile(filename string) (g Graph) {
g.Nodes = map[string]*Node{}
bytes, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
text := strings.TrimSpace(string(bytes))
for _, line := range strings.Split(text, "\n") {
g.readGraphLine(line)
}
return
}
func (g *Graph) readGraphLine(l string) {
firstSplit := strings.Split(l, ":")
node, exists := g.Nodes[firstSplit[0]]
if !exists {
node = &Node{
Name: firstSplit[0],
Neighbors: mapset.NewSet[string](),
}
}
secondSplit := strings.Fields(firstSplit[1])
for _, neighborName := range secondSplit {
neighbor, exists := g.Nodes[neighborName]
if !exists {
neighbor = &Node{
Name: neighborName,
Neighbors: mapset.NewSet[string](),
}
g.Nodes[neighborName] = neighbor
}
neighbor.Neighbors.Add(node.Name)
node.Neighbors.Add(neighbor.Name)
}
g.Nodes[node.Name] = node
return
}
// NOTE this is so sad. nodeA.Neighbors.Remove(nodeB.Name) hangs for a reason i don't understand
func (g *Graph) RemoveEdge(a, b string) {
// log.Printf("entering remove edge for %s and %s", a, b)
nodeA, existsA := g.Nodes[a]
// log.Println("got first node", nodeA, existsA)
nodeB, existsB := g.Nodes[b]
// log.Println("got second node", nodeB, existsB)
if !existsA || !existsB {
panic("requesting not found node")
}
// log.Println("before removals")
// log.Println("before remove first", nodeA)
// nodeA.Neighbors = newANeighbors
nodeA.Neighbors = nodeA.Neighbors.Difference(mapset.NewSet[string](nodeB.Name))
// nodeA.Neighbors.Remove(nodeB.Name)
// log.Println("removed first", nodeA)
// log.Println("before remove second", nodeB)
// nodeB.Neighbors = newBNeighbors
nodeB.Neighbors = nodeB.Neighbors.Difference(mapset.NewSet[string](nodeA.Name))
// nodeB.Neighbors.Remove(nodeA.Name)
// log.Println("removed second", nodeB)
}
func (g *Graph) AddEdge(a, b string) {
nodeA, existsA := g.Nodes[a]
nodeB, existsB := g.Nodes[b]
if !existsA || !existsB {
panic("requesting not found node")
}
nodeA.Neighbors.Add(nodeB.Name)
nodeB.Neighbors.Add(nodeA.Name)
}
func (g *Graph) findCycle() (from, to string, exists bool) {
// log.Printf(">>>> starting new find cycle")
var firstNode *Node
for _, n := range g.Nodes {
firstNode = n
break
}
// log.Printf("initial search from %s and neighbors %+v", firstNode.Name, firstNode.Neighbors)
for neighborName := range firstNode.Neighbors.Iter() {
initialVisited := mapset.NewSet[string](firstNode.Name)
// log.Printf("initial dfs from %s to %s with initial visited %+v", firstNode.Name, neighborName, initialVisited)
from, to, exists = g.dfcCycle(firstNode.Name, neighborName, initialVisited)
if exists {
break
}
}
// log.Printf("<<<< cycle %t, from %s to %s", exists, from, to)
return
}
func (g *Graph) dfcCycle(fromName, atName string, visited mapset.Set[string]) (cycleFrom, cycleTo string, cycleExists bool) {
// log.Printf("> step from %+v to %+v. visited : %+v", fromName, atName, visited)
if visited.Cardinality() == len(g.Nodes) {
log.Println("exit by visited all")
return
}
atNode := g.Nodes[atName]
if visited.Contains(atName) {
return fromName, atName, true
}
for neighborName := range atNode.Neighbors.Iter() {
if neighborName == fromName {
continue
}
newVisited := visited.Clone()
newVisited.Add(atName)
cycleFrom, cycleTo, cycleExists = g.dfcCycle(atName, neighborName, newVisited)
if cycleExists {
break
}
}
return
}
func (g *Graph) ComponentFrom(fromName string) (component mapset.Set[string]) {
startNode := g.Nodes[fromName]
component = mapset.NewSet[string](startNode.Name)
toVisit := startNode.Neighbors.Clone()
for toVisit.Cardinality() > 0 {
runnerNodeName, _ := toVisit.Pop()
if component.Contains(runnerNodeName) {
continue
}
component.Add(runnerNodeName)
runnerNode := g.Nodes[runnerNodeName]
unvisitedNeighbors := runnerNode.Neighbors.Difference(component)
// log.Printf("adding %s to component. neighbors %+v, adding %+v to visit",
// runnerNodeName, runnerNode.Neighbors, unvisitedNeighbors)
toVisit = toVisit.Union(unvisitedNeighbors)
}
return
}
func (g *Graph) ToMermaid() (result string) {
result += "flowchart TD\n"
edges := mapset.NewSet[string]()
for _, node := range g.Nodes {
for neighborName := range node.Neighbors.Iter() {
var first, second string
if node.Name < neighborName {
first = node.Name
second = neighborName
} else {
first = neighborName
second = node.Name
}
edges.Add(fmt.Sprintf("\t%s --- %s\n", first, second))
}
}
for line := range edges.Iter() {
result += line
}
return
}
func (g *Graph)SaveAsMermaid(filename string) {
mmd := g.ToMermaid()
file, err := os.Create(filename)
if err != nil {
panic(err)
}
defer func() {
if err := file.Close(); err != nil {
panic(err)
}
}()
file.WriteString(mmd)
}
type Edge struct {
smaller, bigger string
}
func (e Edge)String() string {
return fmt.Sprintf("%s/%s", e.smaller, e.bigger)
}
func CreateEdge(a, b string) Edge {
var smaller, bigger string
if a < b {
smaller = a
bigger = b
} else {
smaller = b
bigger = a
}
return Edge{smaller, bigger}
}
func (g *Graph) RemoveAllCycles() (removedEdges mapset.Set[Edge]) {
removedEdges = mapset.NewSet[Edge]()
hasCycle := true
var from, to string
for hasCycle {
from, to, hasCycle = g.findCycle()
if hasCycle {
// log.Printf("\n!!!! found cycle %s to %s\n", from, to)
edgeToRemove := CreateEdge(from, to)
removedEdges.Add(edgeToRemove)
g.RemoveEdge(from, to)
}
}
return
}
func (g *Graph) TryToSplit() (componentSizeMult int) {
// first remove all cycles
removedEdges := g.RemoveAllCycles()
g.SaveAsMermaid("after-removing-cycles.mmd")
// log.Printf("all removed edges %+v, two of them are necessary to split initial graph into 2 ", removedEdges)
triedEdges := mapset.NewSet[Edge]()
for _, node := range g.Nodes {
for neighborName := range node.Neighbors.Iter() {
edge := CreateEdge(neighborName, node.Name)
if triedEdges.Contains(edge) {
continue
}
triedEdges.Add(edge)
// first remove the edge
g.RemoveEdge(edge.bigger, edge.smaller)
// then ask for components of the nodes of removed edge
compA := g.ComponentFrom(edge.bigger)
compB := g.ComponentFrom(edge.smaller)
// iterate over the initially removed edges. only two of them should be 'connecting'
// i.e were necessary to remove
necessaryEdgesCount := 0
for initiallyRemovedEdge := range removedEdges.Iter() {
endA, endB := initiallyRemovedEdge.bigger, initiallyRemovedEdge.smaller
isNonNecessary := (compA.Contains(endA) && compA.Contains(endB)) || (compB.Contains(endA) && compB.Contains(endB))
if !isNonNecessary {
// log.Printf("with edge %+v test removed, the %+v also seems necessary", edge, initiallyRemovedEdge)
necessaryEdgesCount += 1
}
}
// log.Printf("with edge %+v test removed neessary count is %d", edge, necessaryEdgesCount)
// if we found 2 necessary, then our currently tried edge is the third necesary to remove
// and out two components are the searched
if necessaryEdgesCount == 2 {
return compA.Cardinality() * compB.Cardinality()
}
// in the end add edge back if not fitting
g.AddEdge(edge.bigger, edge.smaller)
}
}
// now huh. if we didn't find `necessaryEdgesCount == 2`
// that means 0, 1 or 3
return
}

96
day25/graph_test.go Normal file
View File

@@ -0,0 +1,96 @@
package day25
import (
"testing"
mapset "github.com/deckarep/golang-set/v2"
)
func TestReadFileExample(t *testing.T) {
filename := "example"
g := ReadGraphFile(filename)
t.Logf("read graph %+v", g)
}
func TestRemoveEdge(t *testing.T) {
filename := "example"
g := ReadGraphFile(filename)
t.Logf("read graph %+v", g)
g.RemoveEdge("bvb", "hfx")
t.Logf("after removing bvb-hfv %+v", g)
}
func TestCreateExampleMermaid(t *testing.T) {
filename := "example"
g := ReadGraphFile(filename)
g.SaveAsMermaid("example-graph.mmd")
}
func TestComponentOnInitial(t *testing.T) {
// should be all nodes
filename := "example"
g := ReadGraphFile(filename)
comp := g.ComponentFrom("bvb")
t.Logf("got component %+v", comp)
if comp.Cardinality() != len(g.Nodes) {
t.Errorf("should have same size!")
}
}
func TestComponentOnMini(t *testing.T) {
// should be all nodes
filename := "example2"
g := ReadGraphFile(filename)
comp := g.ComponentFrom("jqt")
t.Logf("got component %+v", comp)
if comp.Cardinality() == len(g.Nodes) {
t.Errorf("should have different size!")
}
}
func TestRemoveAllCycles(t *testing.T) {
filename := "example"
g := ReadGraphFile(filename)
g.SaveAsMermaid("example-before-removing.mmd")
t.Logf("initial graph is %+v", g)
edges := g.RemoveAllCycles()
expectedNecessary := mapset.NewSet[Edge](
CreateEdge("hfx", "pzl"),
CreateEdge("bvb", "cmg"),
CreateEdge("nvd", "jqt"),
)
intersection := expectedNecessary.Intersect(edges)
t.Logf("i expect that exactly two will be in intersection %+v", intersection)
if intersection.Cardinality() != 2 {
panic("huh?")
// ok, this is not what i expected.
// this is unstable. but i could run it several times? and hopefully luck out?
}
t.Logf("removed edges %+v", edges)
t.Logf("after removal graph is %+v", g)
g.SaveAsMermaid("example-after-removing.mmd")
}
func TestSplittingExample(t *testing.T) {
filename := "example"
g := ReadGraphFile(filename)
result := g.TryToSplit()
t.Logf("hopefully same as example answer: %d", result)
}
func TestSplittingInput(t *testing.T) {
// kind of brute force
result := 0
filename := "input"
for result == 0 {
g := ReadGraphFile(filename)
result = g.TryToSplit()
t.Logf("hopefully as answer: %d", result)
}
}

Some files were not shown because too many files have changed in this diff Show More