├── 2021 ├── 1 │ ├── input │ └── main.py ├── 2 │ ├── input │ ├── main-2.py │ └── main.py ├── 3 │ ├── input │ └── main.py ├── 4 │ ├── input │ └── main.py ├── 5 │ ├── input │ └── main.py ├── 6 │ ├── input │ └── main.py ├── 7 │ ├── input │ └── main.py ├── 8 │ ├── input │ └── main.py ├── 9 │ ├── input │ └── main.py ├── 10 │ ├── input │ └── main.py ├── 11 │ ├── input │ └── main.py ├── 12 │ ├── input │ └── main.py ├── 13 │ ├── input │ └── main.py ├── requirements.in └── requirements.txt ├── 2022 ├── 1 │ ├── fake │ ├── input │ └── main.py ├── 2 │ ├── fake │ ├── input │ └── main.py ├── 3 │ ├── fake │ ├── input │ └── main.py ├── 4 │ ├── fake │ ├── input │ └── main.py ├── 5 │ ├── fake │ ├── input │ └── main.py ├── 6 │ ├── fake │ ├── input │ └── main.py ├── 7 │ ├── fake │ ├── input │ └── main.py ├── 8 │ ├── fake │ ├── input │ └── main.py ├── 9 │ ├── fake │ ├── input │ ├── main.py │ └── main_keras.py ├── 10 │ ├── fake │ ├── fake1 │ ├── input │ └── main.py ├── 11 │ ├── fake │ ├── input │ └── main.py ├── 12 │ ├── fake │ ├── input │ └── main.py ├── requirements.in └── requirements.txt ├── .gitignore └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | 140 | # PyCharm 141 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 142 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 143 | # and can be added to the global gitignore or merged into this file. For a more nuclear 144 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 145 | #.idea/ 146 | -------------------------------------------------------------------------------- /2021/1/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/1 5 | """ 6 | 7 | import sys 8 | 9 | import tensorflow as tf 10 | 11 | 12 | class IncreasesCounter: 13 | """Stateful counter. Counts the number of "increases".""" 14 | 15 | def __init__(self): 16 | self._count = tf.Variable(0, trainable=False, dtype=tf.int64) 17 | self._prev = tf.Variable(0, trainable=False, dtype=tf.int64) 18 | 19 | def reset(self): 20 | """Reset the counter state.""" 21 | self._count.assign(0) 22 | self._prev.assign(0) 23 | 24 | @tf.function 25 | def __call__(self, dataset: tf.data.Dataset) -> tf.Tensor: 26 | """ 27 | Args: 28 | dataset: the dataset containing the ordered sequence of numbers 29 | to process. 30 | Returns: 31 | The number of increases. tf.Tensor, dtype=tf.int64 32 | """ 33 | self._prev.assign(next(iter(dataset.take(1)))) 34 | for number in dataset.skip(1): 35 | if tf.greater(number, self._prev): 36 | self._count.assign_add(1) 37 | self._prev.assign(number) 38 | return self._count 39 | 40 | 41 | def main(): 42 | """Entrypoint. Suppose the "input" file is in the cwd.""" 43 | 44 | dataset = tf.data.TextLineDataset("input").map( 45 | lambda string: tf.strings.to_number(string, out_type=tf.int64) 46 | ) 47 | 48 | counter = IncreasesCounter() 49 | increases = counter(dataset) 50 | tf.print("[part one] increases: ", increases) 51 | 52 | # --- Part Two --- 53 | 54 | # Create 3 datasets by shifting by 1 element every time 55 | # Craete batches of 3 elements, sum them. Create a new dataset 56 | # interleaving the values and call the counter over this dataset 57 | datasets = [dataset, dataset.skip(1), dataset.skip(2)] 58 | for idx, dataset in enumerate(datasets): 59 | datasets[idx] = dataset.batch(3, drop_remainder=True).map(tf.reduce_sum) 60 | 61 | interleaved_dataset = tf.data.Dataset.choose_from_datasets( 62 | datasets, tf.data.Dataset.range(3).repeat() 63 | ) 64 | 65 | # New counter, because the `IncreaseCounter` object has a state 66 | counter = IncreasesCounter() 67 | increases = counter(interleaved_dataset) 68 | tf.print("[part two] increases: ", increases) 69 | 70 | 71 | if __name__ == "__main__": 72 | sys.exit(main()) 73 | -------------------------------------------------------------------------------- /2021/10/input: -------------------------------------------------------------------------------- 1 | [{<<<{{[(<{{{{[]()}{[]()}}<[[]<>]<[]()>>}([[[][]]]<[{}<>]((){})>)}>)[<([<<(){}>{(){}}>[[<>{}](<>[ 2 | <<{(<({[<{{<[<()()><{}{}>][[()[]][(){}]]>((<[]<>>(<>{}))((<>{}){<>()}))}<{({{}}[<>{}])({{}{}}{{} 3 | (<[(<<[((({<[{()[]}]>}{[[[{}[]]{{}{}}][{{}()}<<>()>]]})))<[([(<[{}{}]>{<<>{}>})<((<>()){[][]})<{< 4 | {({<[[[(<{{<<[[]()]>]}{<({<>{}}([]<>))[({}{}){()()}]>((<[][]>{[]()})[(()<>)[{}{}]])}}>){[<[{[{[]}{<><> 5 | (<{([(<[{[{<[<()<>>({}[])]>}][<{({<>}(<><>))}({[<>{}](<>())}{[{}()]<()<>>})><[<<{}<>><()[])>[<{}[] 6 | {<{[({<<<[[<(({}))>(([<><>][<><>])<<()()][{}{}]>)]][<[{<{}{}>(<>[])}](({{}[]}(()()))<<[]()> 7 | (<{[[(({(<([([[]{}][()[]]){{()[]}[<><>]}](<(<><>){[][]}>({{}{}}{{}()})))>){((([{()}[{}[]]]))[<[ 8 | {<[(<([<{(<[[({}{})({}())]]>[(<<{}<>>([]())><({}[]}{()[]}>)([<(){}>][[{}<>][<><>]])])([{{<()()>[()[]]}[[<>]( 9 | {(({([([<((<({<>{}}<[]>)(<[]{}>)>))({{{[<><>]<<><>>}[(<>{})[{}()]]}{[<[]{}>([]())]{({}{})}}}<({([]())}({<>() 10 | <<{<[<(([<[{[{[]{}}([][])]{{<><>}{<><>}}}{({<>[]})({<>[]})}]}<<[[[[]()]{[]()}]<{{}{}}[{}{}]>](<(()())[[][]]>[ 11 | [{[<[{(([<<{{(()[])(<>[])}}{(<<>{}>([]<>))}]>({{((<><>)[[]{}])((()<>))}[[({}[])({}{})]([{}()]([]()) 12 | {[{[{[{({([<{[<>()][(){}]}<[[]()]{(){}}>>]{{(([][])([]{}))}})}(<{{<({}())(<>[]>><[()()]>}{[((){}){<>{}}] 13 | [{{[<<[[(<({<{()()}(()[])>(<{}[]>)}[(<<>{}><[][]>)[({}<>]([]{})]])[<[<<>()><[]<>>]{<[][]>[<>]}>{[{{}() 14 | {[(([<({[<(({([][])<{}()>}{<{}[]><[]<>>))(<{[]<>}[()()]>))({{({}{})}<<<>[]>([][])>}[[{()[]}<<>{}>]{(<>[]) 15 | {[<[(({{{{[((([]())[{}()])[[{}[])<{}<>>])((<()[]>{{}[]})(<<><>>(<>{})))]}}}[([[({[{}[]]<(){}>})( 16 | {[{((<[<{{([{([]())(<>{})}[<{}[]><[][]>]])<((<[]{}>{()[]})[(()())(<>())])(({{}()}([]())))>}}>[{[<<<(( 17 | [{[[{{{<{<<({<{}{}>([][])}{([]{})[(){}]})<{(<>[]){()[]}}{[(){}][[]<>]}>>>}[[([([<>{}][[]{}])[ 18 | [((<({<({([<(<<>()>)[{()<>}{[]<>}]>[[{{}[]}<{}()}]{{()[]}}]]{[<<[][]><<><>>><(<>)>]<{{()<>}[[]<>]}{( 19 | [<{[[[(<(({(<({}<>){<>[]}>){(<()<>>{()()})[{[]()}([]())]}}{[[<()[]){<>{}}]<<<>{}>[(){}]>]<[[()<>]<()[ 20 | <{<<[[[[{[<<{({}<>){()[]}}([<>()]{<>})><(<<><>)<()<>>)[<[]()>[<>{}]]>>(<<({}{})[[][]]>([<>{}]({}<>))>{<<< 21 | {[[{<({<{(<[(([]{}){[]{}})]<[[[][]][{}{}]]{[{}<>}{{}<>}}>><({[()[]]<{}{}>}{({})})<<<()[]>[{}()] 22 | <[[{{([({[{(<{()()}>{(()[])(<>[])}){<[<>[]]{[]()}>[<[]<>>[<>()]]}}]})]<{{(([<<{}()>>(([]()){{}()})]<[(<>[] 23 | [(<<<({{<([{(((){})[{}[]])[{[]{}}(()<>)]}]{({{<>{}}[<><>]}{(()<>)})[[([]{})<()[]>]{(<>[]]<{}{}>}]})[{[[(() 24 | [<[{{[({{{({(<[][]>{[]})[{{}()}<[]{}>]}<[<<>()>({}{})]<{[][]}<[]{}>>>)}<[[{<<>()>(<>{})}<<()<>>[<>{}]>]]<<< 25 | <<<(<<<(<[{<(<(){}>((){}))({()<>}{<><>})>}([<({}())[<>()]><{{}()}([])>][{(())}(([])<<>{}>)})]>)>[[{(( 26 | {[[(((<[{[{[<({}())((){})>[{<>{}}<<>{}>]]{{<<>()>{[]()}}<[()]({}())>}}(([{<><>}][<<>[]><[][]>])<[({}[]]]({ 27 | {(<(({<({<[([<[][]><()()>]({{}{}}[{}<>]))<<(()[])[<>[]]>{{[]{}}(<>[])}>]((<[{}[]]>{<()()>[<>{ 28 | <([{[([(<[([{<[]{}>[<>()]}[{<><>}]])[([([]<>){()[]}])[({{}[]})<[<>[]]<[]<>>>]]]>)])[(({<<{{[<>{}]{<>()}}} 29 | (<({{[<<{<{<{{()}{[]<>}]{{(){}}[[]()]}>{{{[][]}[()]}[(()<>)(<>[])]}}{{(<[]><(){}>){(<>())([])}}{[(<>)({}<>) 30 | {<[({{<<{([([((){})](<{}[]><<>{}>))([<<>[]><()()>])][<{{[]<>}(()<>)}<<()[]>{(){}}>>{(<()[]>[{}]) 31 | [<[[{[{{[{(<[<()<>>]<[(){}][{}]>>)}([{{{(){}}[<>{}]}((()<>)<[]<>))}][({<[]{}>{[][]}}({{}{} 32 | {[[([<((<(([(<<><>>{[]()})]<{[()<>]{{}{}}}{[{}[]]<[]{}>}>)((<<[]<>>>({<>()}))(<[[]{}]>{{[]{}}[( 33 | {({[([{(<{{(<[<>[]]<[]<>>>(<{}[]>[<>[]]))<([{}<>]{{}()})<[()()]([]{})>>}(<{<<>{}>([][])}><(({}[]) 34 | {<<{[([{({[[<<<>[]>[[]{}]>]({{<>[]}(()<>)}{[{}<>]<()[]>})]<({[()()][{}[]]}<<()()><(){}>>)>}[([ 35 | <{{([<{{{{([(([]<>){<>{}})][<<<><>>[(){}]>])}{<([({}<>)<<>[]>])({{[]()}(()())){{<>[]}({}{})})>(<({<><>})>< 36 | <({<{[<<({(<{[{}[]]{{}{}}}{[(){}][[]<>)}>[(<(){}><{}{}>){(<>)[[]{}]}])}(<[<(()())><<<>()>{()()}>][ 37 | {[[<<{<({<[(([{}]<<>[]>))[<{<><>}({}{})>]]{([{()<>}{[]()}][{()()}<(){}>])(<({}{})<[]{}>>[[<>{}] 38 | {<[({{{[(<(({{()<>}(()[])}[<<>{}>{{}<>}])<{({}[])<()()>}{(()())<{}()>}>}([<(()())([][])>[([]){()<>}]] 39 | [{<[{{(([{{<({<>[]}(<>()))<[[][]]>>(([{}]({}())){[()[]]((){})})}{((<<>[]>[[]<>])<[<>[]}([]())>) 40 | [<<<{[((({<[((()<>)<()[]>){{<>{}}({}())}]{{<[]()>[[]()]}{[()]<{}[]>}}>[[{[<>()]}((<><>><()<>>)](([[]()](()))[ 41 | (([<<{<[<[({{<()<>><[]{}>}{{()()><<><>>}})<[[{(){}}[[][]]][[[]<>]]]{{((){})}[[<>{}]<[][]>]}>][[({({})[[] 42 | <<((<({[(({[[[{}{}][<>]](<[]{}>[<>{}])]{(([]())(()<>))<[[]()]([]<>)>}}{[(<{}()>((){}))<{{}{}}[()<>]>]({< 43 | {<{(([(([{{{({<>}({}{}))[[[][]]([]<>)]}<<(()[])[[]]>((<><>)<[][]>)>}}[[[(<[]{}>)]{{([]()){{}<>}>[<[ 44 | ([[[{[([{{<{<<()()>(()[])>{({}()){<>[]}}}><(((()())({}[]))([()[]]<<>[]>))[[{{}[]>]{((){})}]>}<[[[[ 45 | {[[[{(<[{(({<<{}()>{<>{}}>[[[]()]<{}{}>]}{[[{}[]]{{}}]((<>))}))[{{{(()[])<{}{}>}<<[][]>{<>()}>}{[{<>{}}<(){}> 46 | <({[<[(<<{[{<{{}<>}(()())>(<(){}>[<><>])}[([{}]{()[]})<{<>{}}{{}}>]]<{([()[]]<()[]>)[([]())([]<>)]}((((){} 47 | {[<<[{([((([(({})[{}<>])({[]()}{<>{}})][{{{}<>}}(({}<>))]){<{<<>()>[(){}]}(({}<>)<<>[]>)>[<[()[]]([][]) 48 | <<([<<{[<<<<<(<>{})<{}{}>>>{([()()]<<><>>)(((){}))}>{<<[[]<>][(){}]>[(()<>){[][]}]>{(<()()){<>{}})<[[]{} 49 | [{<{[{{([<({({(){}}<{}<>>){[{}{}]{()<>}}}[{<{}[]><<>[]>}<[[]()][{}<>]>])<{([{}[]][<>[]])<[<>()](()[])>}[(< 50 | ({([{{<[[[({<(<>())<<>()>>([()[]](<><>))}{<(<>[])<{}()>>({{}[]}{{}<>})})[<{[[][]]({}[])}{{{}[]}} 51 | {<[([{<[[<([{<[]<>>(()<>)}[(<>{})<{}{}>]][({<><>}<[]()>)(<()()>[{}{}])])>]]<[[({([<><>]{()})<{<>()}{<> 52 | [(<[[{[{{{<({[()()][[]<>]}<(<>[]){[]{}}>)<[{()()}{[]}]<[{}{}]<{}[]>>>>}{([<[()[]]>{([][]){()[]}}])}}}]<{<[ 53 | {{<{<((<<{[<{[<>()]<<>[]>}<<[]()><{}()>>>({(<>[])<<><>>}{(<>[])(()())})]}>[[[[{<[]()>[()[]]}((<>{})[()<>]) 54 | [<<[<[{(<({{[{[]<>}<{}()>][[<>[]][{}<>]]}<(({}{})({}[]))({[]<>}[{}{}])>}<[(<[]{}>[()()])][[({}[])(() 55 | <{[{[<<(<{([{({}{}){<>{}}}{[()](()<>)}][[({}())({}<>)][<()<>>{<><>}]])}<({{((){})(()[])}}(<[{}[]]<[][]>><{ 56 | {(<{{[[[<<{{<([]<>)<{}()>)({{}{}}([]{}))}[{({}{})}<[()]>]}{[[<{}<>>](({}{})([][]))]{([{}<>]{()<> 57 | <([[{{<(<{(<((<>())[[]{}])>((<{}<>>{()<>})[{()()}(()<>)]))}[{(<{{}{}}[{}()]>)}([[[{}[]]<<><>>][<()()>[ 58 | [(<[{<({{<<<<{<>{}}(<><>)>[{[][]}[[][]]]>{([(){}](<><>))((()<>))}><<{{{}<>}{{}<>}}({<>}<[]<>>)>>>{{{(([])[{}{ 59 | ((({[[[[<<<{<<[][]><<>()>>[<()[]>(()<>)]}{{<<>{}><<>[]>}{[{}{}][()()]}}><{[[{}[]]<{}[])]{{<>[]} 60 | [({[[<<<<{[<(<[]<>>[<>()])[[<><>]<<>()>]>>{[{(<><>)[()()]}{(()())[<>{}]}]{{<<><>><[][]>}({<>[]}(()<> 61 | {[[{(<<([[{(<[{}]>{{<>{}}{{}[]}})}]{<({[(){}][()()]}{(<>{})[[]]})>{<{[[]()][<>[]]}(<[]()>[<>{}]]><{[{}() 62 | ({{[[<<({<<{(<<><>>[{}<>]><((){})>}{{{[]()}([]())}[{{}{}}[<>]]}><<[[{}[]]([][])]<[{}[]]{{}{}} 63 | <(((<(<<<[[[<{[][]}>{<[]>{<>[]}]]{<<{}[]>[()()]>}]([([()()]<[]()>){{{}()}}]<(({}{})(()())) 64 | [{{<[{[<<<[[<<{}<>>({}<>)>[<[]()>(()())>]<<<()[]><[][]>><[[]{}]([]{})>>]>><[{(<([])<{}()>>(<{}<>><<>{}>) 65 | <<<(<[[[<(<<[[[][]][{}{}]]>>(<([[]()))({{}[]}[[][]])>))><[{(<({}<>)(()<>)>{[{}<>]})[({<><>}[<>()])< 66 | <{<{[(<<([[(<[[]<>][{}[]]>(<()<>>(<>{}>))]]<{{([{}()]{<>()})<(<>()){()}>}{<<()()>(<>[])>{(())[[]{}]}}}(<{[{} 67 | {[[[[[[{[[{[[[()()][()[]]]{{{}()}(())}]}{<([{}()]{<>()})<[()<>>[<>()]>>[(<[][]>{[]<>})[<{}<>>]]}]]((({({(){} 68 | (<{(<([([{({{{[][]}{[]<>}}[<{}[]>[<>[]]]})}(<[{{(){}}(<><>)}((()[]))](([{}()]([])){([]<>]{{}()}})>[[[{( 69 | <{{{<<[<({([[[()[]]]][{(()[])(()())}])}){<{[{<[]()><[][]>}]([{[]()}<{}[]>][([]())[()[]]])}[<[(<>()){ 70 | (<{{[<<[<{[[[(<>[])[<>()]][[[]()][{}()]]][([<>])<[<>[]][[]<>]>]][[({{}<>}[{}{}])({[]()}{()<>})]]}(<([ 71 | {[({([{(((([{<[]()>}<[()()]([])>]{<[()[]]{[]<>}>[<<>{}><()>]}))))<([<[[<()[]><<>()>]][(<()[]><{}< 72 | ({([<[[{([([<{<>()}>[({}[]){<><>}]][((<>){{}{}})(({}())<{}{}>)])])<((({({}{})[<><>]}(((){}))){(( 73 | {[(<[[[{{[{<<{()()}(()())>({()()}[{}{}])><[{(){}}[[][]]]>}<[<(<>{})(<>())>({{}[]}{[]<>})]<{<{}<>>}{<{}< 74 | <<[({<<{([([{{<>}[<>()]}]{[[[]()]{<>()}](([]{}))})<<[{[]{}}([]())][<[]{}>{[]{}}]>({({}[])} 75 | <<({[[{{<[<[{[(){}][()[]]}]({{<>}<<>{}>}((<>())<{}()>))>]{{{({[][]}[[]<>])({<><>}({}))}{[{()()}[<>{}]][<< 76 | {<<[<{<{{({{((()[])[[][]])[{[][]}[<>{}]]}{[{<><>}([][])][[[]]((){})]})){{{([{}{}]((){})){{<><>} 77 | <[<{{(([(<<[<{{}[]}{[]<>}><([]{})[[]()]>]{{<()<>>([]())}{<()<>><<>[]>}}><[{[<>[]]<[]()>}<<[][]>{[]<>}>][{(() 78 | <([{{[({[([{(<()()>[<>[]]){<[]{}><[]{}>}}])]<{[[<[{}<>][[]()]>[{{}()}[<>[]]]]{{([]{}]{()<>}}[[()< 79 | ({{{<<<[<<{(<([][])(<>{})>((()[])))}{({<()()>[<>{}]}{{{}[]}(()[])})<[{()())<<>[]>]({<>}<[]<>>) 80 | <<<{(<({((<({<()>(<><>)})[[<{}()>(<>())]]>(<<({}[])[<>[]])>({{[]()}(<><>)}(([][])<<>()>)))){[[([<>][{}]) 81 | (<((<([({[<<{<{}()>(<><>)}{[[]{}]({}())}>>(<(<<>[]>)<[<>()]<<>{}>]><{<{}<>>}{{<><>}((){})}>)]{[<(<{ 82 | {(([<{(<{<<<(([][])(()[])){<()[]><{}()>}>{(([]<>)<<>()>)<<{}<>>(<>())>}>{(<{{}<>}<[][]>>[[<>{}]<<>{}>])([(< 83 | {{{{<([([[[[(({}{})<<>>){{<><>}<()[]>}]([[(){}]<{}()>][[<>]<{}{}>])][<([[]<>][{}{}])[<[]<>>[[]<>]]>{<{[ 84 | [<(({{<(({[({[[]<>]<[]<>>}<{{}<>}<(){}>>)[([()()][()[]])<<()()><<>()>>]](<<[{}[]]{[][]}>({[]}[<>{}])>{[[[][] 85 | (({({{([{{({<<(){}>(()[])}}{{([]<>)[{}[]]}})}<(<{[<><>]}>[[<{}>[{}()]]])<{{{<>()}<<>[]>}}[(<< 86 | [<((<((<[<{{(<<>[]><()<>>)[<<><>><<><>>]}{([()()][(){}])}}><<<<(<>())[{}]>[([]<>)({}{})]>[[<{ 87 | {<<(({({{{(<{{()[]}}[({})({}())]>)}}}(<[[[[{()<>}<()<>>]([[]{}][<>{}])](<<{}[]>{{}<>}>(<{}<>>{[]{}})>]][({{ 88 | <<{([[[<((<{{(<>[])[()[]]}([[]()])}>))>]]]<<({[(({<{[][]}({}[])>{<{}{}>[<>{}]}}<<[<>()][[]()]>{ 89 | <[[({([<[<{([{[][]}[<><>]]{([]<>)(()<>)})[<(()())<[]<>>>[<[]<>>(()())]]}>(<(<[[]()]<[][]>>{[[]()]{[][]}} 90 | [<([[({[{{<{{([][])}(<<><>>(()<>))}[[<{}{}>{{}[]}]{{[]{}}}]>}}[<<[{({}{}){{}()}}({[]{}}([]{}))][{[()()] 91 | ((({<(<{(<([<{[]{}}<(){}>><{[][]}(()<>)>](<[[]()]({}[])>{[()<>]}))<{(<[]()>[[][]]>[<[][]>[<>() 92 | {({<({(<([<([([]{})({}[])])>])<((<[[[]{}]{<>[]}]{{<>[]}<<><>>}>)(<({[]{}}{[]()})[{<>[]}<{}{}>]>{({ 93 | {{{[<<<{<(<{<[[]<>]<[]()>>}<<(()[])([]())><((){})<{}[]>>>><(<((){})<[]>>(([][]))){<[[]{}][()[] 94 | [<({[(({([<(<[{}<>]>(<()[]>{[]{}})){{{[]()}}[<()<>>[<>[]]]}>]({<<{[]}[<>()]>(<<><>>{[]{}})>[<({}{})[()<>]>[[< 95 | -------------------------------------------------------------------------------- /2021/10/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/10 5 | """ 6 | 7 | import sys 8 | 9 | import tensorflow as tf 10 | 11 | 12 | class Tokenizer(tf.Module): 13 | def __init__(self): 14 | super().__init__() 15 | 16 | self._opening_tokens = tf.constant(["(", "[", "{", "<"]) 17 | self._closing_tokens = tf.constant([")", "]", "}", ">"]) 18 | 19 | self._syntax_score_table = tf.lookup.StaticHashTable( 20 | tf.lookup.KeyValueTensorInitializer( 21 | self._closing_tokens, 22 | tf.constant([3, 57, 1197, 25137], tf.int64), 23 | ), 24 | default_value=tf.constant(-1, tf.int64), 25 | ) 26 | 27 | self._autocomplete_score_table = tf.lookup.StaticHashTable( 28 | tf.lookup.KeyValueTensorInitializer( 29 | self._closing_tokens, 30 | tf.constant([1, 2, 3, 4], tf.int64), 31 | ), 32 | default_value=tf.constant(-1, tf.int64), 33 | ) 34 | 35 | self._open_close = tf.lookup.StaticHashTable( 36 | tf.lookup.KeyValueTensorInitializer( 37 | self._opening_tokens, 38 | self._closing_tokens, 39 | ), 40 | default_value="", 41 | ) 42 | 43 | self._close_open = tf.lookup.StaticHashTable( 44 | tf.lookup.KeyValueTensorInitializer( 45 | self._closing_tokens, 46 | self._opening_tokens, 47 | ), 48 | default_value="", 49 | ) 50 | 51 | self._pos = tf.Variable(0, dtype=tf.int64) 52 | self._corrupted_score = tf.Variable(0, dtype=tf.int64) 53 | 54 | @tf.function 55 | def corrupted(self, dataset): 56 | for line in dataset: 57 | stack = tf.TensorArray(tf.string, size=0, dynamic_size=True) 58 | self._pos.assign(0) 59 | for position in tf.range(tf.size(line)): 60 | current_token = line[position] 61 | if tf.reduce_any(tf.equal(current_token, self._opening_tokens)): 62 | stack = stack.write(tf.cast(self._pos, tf.int32), current_token) 63 | self._pos.assign_add(1) 64 | else: 65 | expected_token = self._open_close.lookup( 66 | stack.read(tf.cast(self._pos - 1, tf.int32)) 67 | ) 68 | self._pos.assign_sub(1) 69 | if tf.not_equal(current_token, expected_token): 70 | tf.print( 71 | position, 72 | ": expected: ", 73 | expected_token, 74 | " but found ", 75 | current_token, 76 | " instead", 77 | ) 78 | self._corrupted_score.assign_add( 79 | self._syntax_score_table.lookup(current_token) 80 | ) 81 | break 82 | return self._corrupted_score 83 | 84 | @tf.function 85 | def incomplete(self, dataset): 86 | scores = tf.TensorArray(tf.int64, size=0, dynamic_size=True) 87 | for line in dataset: 88 | stack = tf.TensorArray(tf.string, size=0, dynamic_size=True) 89 | self._pos.assign(0) 90 | 91 | for position in tf.range(tf.size(line)): 92 | current_token = line[position] 93 | if tf.reduce_any(tf.equal(current_token, self._opening_tokens)): 94 | stack = stack.write(tf.cast(self._pos, tf.int32), current_token) 95 | self._pos.assign_add(1) 96 | else: 97 | expected_token = self._open_close.lookup( 98 | stack.read(tf.cast(self._pos - 1, tf.int32)) 99 | ) 100 | self._pos.assign_sub(1) 101 | if tf.not_equal(current_token, expected_token): 102 | tf.print( 103 | position, 104 | ": expected: ", 105 | expected_token, 106 | " but found ", 107 | current_token, 108 | " instead", 109 | ) 110 | self._pos.assign(0) 111 | break 112 | 113 | if tf.not_equal(self._pos, 0): # stack not completely unrolled 114 | unstacked = tf.squeeze( 115 | tf.reverse( 116 | tf.expand_dims(stack.stack()[: self._pos], axis=0), axis=[1] 117 | ) 118 | ) 119 | closing = self._open_close.lookup(unstacked) 120 | tf.print("Unstacked missing part: ", closing, summarize=-1) 121 | 122 | # Use pos variable as line score 123 | self._pos.assign(0) 124 | for idx in tf.range(tf.shape(closing)[0]): 125 | char = closing[idx] 126 | self._pos.assign(self._pos * 5) 127 | self._pos.assign_add(self._autocomplete_score_table.lookup(char)) 128 | 129 | scores = scores.write(scores.size(), self._pos) 130 | 131 | # sort the scores 132 | scores_tensors = tf.sort(scores.stack()) 133 | # tf.print(scores_tensors) 134 | return scores_tensors[(tf.shape(scores_tensors)[0] - 1) // 2] 135 | 136 | 137 | def main(): 138 | """Entrypoint. Suppose the "input" file is in the cwd.""" 139 | 140 | dataset = tf.data.TextLineDataset("input").map(tf.strings.bytes_split) 141 | tokenier = Tokenizer() 142 | 143 | tf.print("Part one: ", tokenier.corrupted(dataset)) 144 | tf.print("Part two: ", tokenier.incomplete(dataset)) 145 | 146 | 147 | if __name__ == "__main__": 148 | sys.exit(main()) 149 | -------------------------------------------------------------------------------- /2021/11/input: -------------------------------------------------------------------------------- 1 | 7313511551 2 | 3724855867 3 | 2374331571 4 | 4438213437 5 | 6511566287 6 | 6727245532 7 | 3736868662 8 | 2348138263 9 | 2417483121 10 | 8812617112 11 | -------------------------------------------------------------------------------- /2021/11/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/11 5 | """ 6 | 7 | import sys 8 | from typing import Tuple 9 | 10 | import tensorflow as tf 11 | 12 | 13 | class FlashCounter(tf.Module): 14 | def __init__(self, population, steps): 15 | super().__init__() 16 | 17 | self._steps = steps 18 | self._population = tf.Variable(population, dtype=tf.int64) 19 | self._counter = tf.Variable(0, dtype=tf.int64) 20 | 21 | self._zero = tf.constant(0, dtype=tf.int64) 22 | self._one = tf.constant(1, dtype=tf.int64) 23 | self._nine = tf.constant(9, tf.int64) 24 | self._ten = tf.constant(10, dtype=tf.int64) 25 | 26 | self._queue = tf.queue.FIFOQueue(-1, [tf.int64]) 27 | 28 | self._flashmap = tf.Variable(tf.zeros_like(self._population)) 29 | 30 | @tf.function 31 | def _neighs( 32 | self, grid: tf.Tensor, center: tf.Tensor 33 | ) -> Tuple[tf.Tensor, tf.Tensor]: 34 | y, x = center[0], center[1] 35 | 36 | shape = tf.shape(grid, tf.int64) - 1 37 | 38 | if tf.logical_and(tf.less(y, 1), tf.less(x, 1)): # 0,0 39 | mask = tf.constant([(1, 0), (0, 1), (1, 1)]) 40 | elif tf.logical_and(tf.equal(y, shape[0]), tf.equal(x, shape[1])): # h,w 41 | mask = tf.constant([(-1, 0), (0, -1), (-1, -1)]) 42 | elif tf.logical_and(tf.less(y, 1), tf.equal(x, shape[1])): # top right 43 | mask = tf.constant([(0, -1), (1, 0), (1, -1)]) 44 | elif tf.logical_and(tf.less(x, 1), tf.equal(y, shape[0])): # bottom left 45 | mask = tf.constant([(-1, 0), (-1, 1), (0, 1)]) 46 | elif tf.less(x, 1): # left 47 | mask = tf.constant([(1, 0), (-1, 0), (-1, 1), (0, 1), (1, 1)]) 48 | elif tf.equal(x, shape[0]): # right 49 | mask = tf.constant([(-1, 0), (1, 0), (0, -1), (-1, -1), (1, -1)]) 50 | elif tf.less(y, 1): # top 51 | mask = tf.constant([(0, -1), (0, 1), (1, 0), (1, -1), (1, 1)]) 52 | elif tf.equal(y, shape[1]): # bottom 53 | mask = tf.constant([(0, -1), (0, 1), (-1, 0), (-1, -1), (-1, 1)]) 54 | else: # generic 55 | mask = tf.constant( 56 | [(-1, 0), (0, -1), (1, 0), (0, 1), (-1, -1), (1, 1), (-1, 1), (1, -1)] 57 | ) 58 | 59 | # tf.print(center, mask, summarize=-1) 60 | 61 | coords = center + tf.cast(mask, tf.int64) 62 | neighborhood = tf.gather_nd(grid, coords) 63 | return neighborhood, coords 64 | 65 | @tf.function 66 | def find_sync_step(self): 67 | # use count as step 68 | self._counter.assign(0) 69 | while tf.logical_not(tf.reduce_all(tf.equal(self._population, self._zero))): 70 | self._counter.assign_add(1) 71 | # First, the energy level of each octopus increases by 1. 72 | self._population.assign_add(tf.ones_like(self._population)) 73 | 74 | # Then, any octopus with an energy level greater than 9 flashes. 75 | flashing_coords = tf.where(tf.greater(self._population, self._nine)) 76 | self._queue.enqueue_many(flashing_coords) 77 | 78 | # This increases the energy level of all adjacent octopuses by 1, including octopuses that are diagonally adjacent. 79 | # If this causes an octopus to have an energy level greater than 9, it also flashes. 80 | # This process continues as long as new octopuses keep having their energy level increased beyond 9. 81 | # (An octopus can only flash at most once per step.) 82 | while tf.greater(self._queue.size(), 0): 83 | p = self._queue.dequeue() 84 | if tf.greater(self._flashmap[p[0], p[1]], 0): 85 | continue 86 | self._flashmap.scatter_nd_update([p], [1]) 87 | 88 | _, neighs_coords = self._neighs(self._population, p) 89 | updates = tf.repeat( 90 | self._one, 91 | tf.shape(neighs_coords, tf.int64)[0], 92 | ) 93 | self._population.scatter_nd_add(neighs_coords, updates) 94 | flashing_coords = tf.where(tf.greater(self._population, self._nine)) 95 | self._queue.enqueue_many(flashing_coords) 96 | 97 | # Finally, any octopus that flashed during this step has its energy level set to 0, as it used all of its energy to flash. 98 | indices = tf.where(tf.equal(self._flashmap, self._one)) 99 | if tf.greater(tf.size(indices), 0): 100 | shape = tf.shape(indices, tf.int64) 101 | updates = tf.repeat(self._zero, shape[0]) 102 | self._population.scatter_nd_update(indices, updates) 103 | 104 | self._flashmap.assign(tf.zeros_like(self._flashmap)) 105 | 106 | tf.print(self._counter, self._population, summarize=-1) 107 | return self._counter 108 | 109 | @tf.function 110 | def __call__(self): 111 | for step in tf.range(self._steps): 112 | # First, the energy level of each octopus increases by 1. 113 | self._population.assign_add(tf.ones_like(self._population)) 114 | 115 | # Then, any octopus with an energy level greater than 9 flashes. 116 | flashing_coords = tf.where(tf.greater(self._population, self._nine)) 117 | self._queue.enqueue_many(flashing_coords) 118 | 119 | # This increases the energy level of all adjacent octopuses by 1, including octopuses that are diagonally adjacent. 120 | # If this causes an octopus to have an energy level greater than 9, it also flashes. 121 | # This process continues as long as new octopuses keep having their energy level increased beyond 9. 122 | # (An octopus can only flash at most once per step.) 123 | while tf.greater(self._queue.size(), 0): 124 | p = self._queue.dequeue() 125 | if tf.greater(self._flashmap[p[0], p[1]], 0): 126 | continue 127 | self._flashmap.scatter_nd_update([p], [1]) 128 | 129 | _, neighs_coords = self._neighs(self._population, p) 130 | updates = tf.repeat( 131 | self._one, 132 | tf.shape(neighs_coords, tf.int64)[0], 133 | ) 134 | self._population.scatter_nd_add(neighs_coords, updates) 135 | flashing_coords = tf.where(tf.greater(self._population, self._nine)) 136 | self._queue.enqueue_many(flashing_coords) 137 | 138 | # Finally, any octopus that flashed during this step has its energy level set to 0, as it used all of its energy to flash. 139 | indices = tf.where(tf.equal(self._flashmap, self._one)) 140 | if tf.greater(tf.size(indices), 0): 141 | shape = tf.shape(indices, tf.int64) 142 | updates = tf.repeat(self._zero, shape[0]) 143 | self._counter.assign_add(shape[0]) 144 | self._population.scatter_nd_update(indices, updates) 145 | 146 | self._flashmap.assign(tf.zeros_like(self._flashmap)) 147 | 148 | # tf.print(step, self._population, summarize=-1) 149 | return self._counter 150 | 151 | 152 | def main(): 153 | """Entrypoint. Suppose the "input" file is in the cwd.""" 154 | 155 | population = tf.convert_to_tensor( 156 | list( 157 | tf.data.TextLineDataset("fake") 158 | .map(tf.strings.bytes_split) 159 | .map(lambda string: tf.strings.to_number(string, out_type=tf.int64)) 160 | ) 161 | ) 162 | 163 | steps = tf.constant(100, tf.int64) 164 | flash_counter = FlashCounter(population, steps) 165 | tf.print(flash_counter()) 166 | 167 | # -- Part 2 --- 168 | 169 | # If you can calculate the exact moments when the octopuses will all flash 170 | # simultaneously, you should be able to navigate through the cavern. 171 | # What is the first step during which all octopuses flash? 172 | 173 | # Re-use the status and avoid "steps" iterations 174 | tf.print(steps + flash_counter.find_sync_step()) 175 | 176 | 177 | if __name__ == "__main__": 178 | sys.exit(main()) 179 | -------------------------------------------------------------------------------- /2021/12/input: -------------------------------------------------------------------------------- 1 | RT-start 2 | bp-sq 3 | em-bp 4 | end-em 5 | to-MW 6 | to-VK 7 | RT-bp 8 | start-MW 9 | to-hr 10 | sq-AR 11 | RT-hr 12 | bp-to 13 | hr-VK 14 | st-VK 15 | sq-end 16 | MW-sq 17 | to-RT 18 | em-er 19 | bp-hr 20 | MW-em 21 | st-bp 22 | to-start 23 | em-st 24 | st-end 25 | VK-sq 26 | hr-st 27 | -------------------------------------------------------------------------------- /2021/12/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/12 5 | """ 6 | 7 | import sys 8 | 9 | import tensorflow as tf 10 | 11 | 12 | def main(): 13 | """Entrypoint. Suppose the "input" file is in the cwd.""" 14 | 15 | connections = tf.data.TextLineDataset("input").map( 16 | lambda string: tf.strings.split(string, "-") 17 | ) 18 | 19 | # Create a map between human readable node names and 20 | # numeric indices 21 | human_to_id = tf.lookup.experimental.MutableHashTable(tf.string, tf.int64, -1) 22 | id_to_human = tf.lookup.experimental.MutableHashTable(tf.int64, tf.string, "") 23 | 24 | idx = tf.Variable(0, dtype=tf.int64) 25 | indices = tf.TensorArray(tf.int64, size=0, dynamic_size=True) 26 | for edge in connections: 27 | node_i = human_to_id.lookup(edge[0]) 28 | node_j = human_to_id.lookup(edge[1]) 29 | 30 | if tf.equal(node_i, -1): 31 | human_to_id.insert([edge[0]], [idx]) 32 | id_to_human.insert([idx], [edge[0]]) 33 | node_i = tf.identity(idx) 34 | idx.assign_add(1) 35 | if tf.equal(node_j, -1): 36 | human_to_id.insert([edge[1]], [idx]) 37 | id_to_human.insert([idx], [edge[1]]) 38 | node_j = tf.identity(idx) 39 | idx.assign_add(1) 40 | 41 | ij = tf.convert_to_tensor([node_i, node_j]) 42 | indices = indices.write(indices.size(), ij) 43 | 44 | indices = indices.stack() 45 | indices = tf.reshape(indices, (-1, 2)) 46 | A = tf.tensor_scatter_nd_update( 47 | tf.zeros((idx, idx), dtype=tf.int64), 48 | indices, 49 | tf.repeat(tf.cast(1, tf.int64), tf.shape(indices)[0]), 50 | ) 51 | A = A + tf.transpose(A) 52 | 53 | # Visit only once (per path search) 54 | keys = human_to_id.export()[0] 55 | visit_only_once_human = tf.gather( 56 | keys, 57 | tf.where( 58 | tf.equal( 59 | tf.range(tf.shape(keys)[0]), 60 | tf.cast(tf.strings.regex_full_match(keys, "[a-z]+?"), tf.int32) 61 | * tf.range(tf.shape(keys)[0]), 62 | ) 63 | ), 64 | ) 65 | visit_only_once_human = tf.squeeze(visit_only_once_human) 66 | visit_only_once_id = human_to_id.lookup(visit_only_once_human) 67 | 68 | # Visit multiple times = {keys} - {only once} 69 | visit_multiple_times_human = tf.sparse.to_dense( 70 | tf.sets.difference( 71 | tf.reshape(keys, (1, -1)), tf.reshape(visit_only_once_human, (1, -1)) 72 | ) 73 | ) 74 | visit_multiple_times_human = tf.squeeze(visit_multiple_times_human) 75 | visit_multiple_times_id = human_to_id.lookup(visit_multiple_times_human) 76 | 77 | # Goal: go from start to end 78 | # Finding all the possible paths 79 | # I can use the adjiacency matrix for finding the neighbors of every node 80 | # extracting the neighbord coordinates to get their IDs 81 | # Check if I've been already in that neighbor the right amount of times 82 | 83 | start_id, end_id = human_to_id.lookup(["start", "end"]) 84 | 85 | # Every neighbor is a possible new, distinct path, hence 86 | # every time we visit a new neighbor we should have a different state. 87 | 88 | # Note: the problem asks us to COUNT the number of possible paths 89 | count = tf.Variable(0, dtype=tf.int64) 90 | paths = [] 91 | 92 | @tf.function 93 | def _neigh_ids(A, node_id): 94 | return tf.squeeze(tf.where(tf.equal(A[node_id, :], 1))) 95 | 96 | def _visit(A: tf.Tensor, node_id: tf.Tensor, path: tf.Tensor): 97 | current_path = tf.concat([path, [node_id]], axis=0) 98 | if tf.equal(node_id, end_id): 99 | paths.append(current_path) 100 | count.assign_add(1) 101 | return current_path 102 | 103 | neighs = _neigh_ids(A, node_id) 104 | neigh_shape = tf.shape(neighs) 105 | if tf.equal(tf.size(neighs), 0): 106 | return current_path 107 | 108 | if tf.equal(tf.size(neigh_shape), 0): 109 | neighs = tf.expand_dims(neighs, 0) 110 | neigh_shape = tf.shape(neighs) 111 | 112 | for idx in tf.range(neigh_shape[0]): 113 | neigh_id = neighs[idx] 114 | if tf.logical_and( 115 | tf.reduce_any(tf.equal(neigh_id, visit_only_once_id)), 116 | tf.reduce_any(tf.equal(neigh_id, current_path)), 117 | ): 118 | continue 119 | _visit(A, neigh_id, current_path) 120 | return current_path 121 | 122 | # All the paths starts from start 123 | neighs = _neigh_ids(A, start_id) 124 | for idx in tf.range(tf.shape(neighs)[0]): 125 | neigh_id = neighs[idx] 126 | _visit(A, neigh_id, [start_id]) 127 | 128 | tf.print("Part one: ", count) 129 | 130 | # for path in paths: 131 | # tf.print(id_to_human.lookup(path), summarize=-1) 132 | 133 | count.assign(0) 134 | inner_count = tf.Variable(0) 135 | 136 | def _visit2(A: tf.Tensor, node_id: tf.Tensor, path: tf.Tensor): 137 | current_path = tf.concat([path, [node_id]], axis=0) 138 | 139 | # Skip start 140 | if tf.equal(node_id, start_id): 141 | return current_path 142 | 143 | # Success on end node 144 | if tf.equal(node_id, end_id): 145 | # paths.append(current_path) 146 | count.assign_add(1) 147 | return current_path 148 | 149 | # More than 2 lowercase visited twice 150 | visited, visited_idx, visited_count = tf.unique_with_counts(current_path) 151 | visited = tf.gather_nd(visited, tf.where(tf.greater(visited_count, 1))) 152 | inner_count.assign(0) 153 | for idx in tf.range(tf.shape(visited)[0]): 154 | if tf.reduce_any(tf.equal(visited[idx], visit_only_once_id)): 155 | inner_count.assign_add(1) 156 | 157 | if tf.greater_equal(inner_count, 2): 158 | return current_path 159 | 160 | neighs = _neigh_ids(A, node_id) 161 | neigh_shape = tf.shape(neighs) 162 | if tf.equal(tf.size(neighs), 0): 163 | return current_path 164 | 165 | if tf.equal(tf.size(neigh_shape), 0): 166 | neighs = tf.expand_dims(neighs, 0) 167 | neigh_shape = tf.shape(neighs) 168 | 169 | for idx in tf.range(neigh_shape[0]): 170 | neigh_id = neighs[idx] 171 | 172 | # already visited twice and is lowcase 173 | if tf.logical_and( 174 | tf.reduce_any(tf.equal(neigh_id, visit_only_once_id)), 175 | tf.greater( 176 | tf.reduce_sum(tf.cast(tf.equal(neigh_id, current_path), tf.int32)), 177 | 1, 178 | ), 179 | ): 180 | continue 181 | 182 | _visit2(A, neigh_id, current_path) 183 | 184 | return current_path 185 | 186 | neighs = _neigh_ids(A, start_id) 187 | for idx in tf.range(tf.shape(neighs)[0]): 188 | neigh_id = neighs[idx] 189 | _visit2(A, neigh_id, [start_id]) 190 | 191 | # for path in paths: 192 | # tf.print(id_to_human.lookup(path), summarize=-1) 193 | tf.print("Part two: ", count) 194 | 195 | 196 | if __name__ == "__main__": 197 | sys.exit(main()) 198 | -------------------------------------------------------------------------------- /2021/13/input: -------------------------------------------------------------------------------- 1 | 938,670 2 | 246,156 3 | 622,476 4 | 137,296 5 | 708,323 6 | 1019,283 7 | 415,505 8 | 1043,234 9 | 666,871 10 | 157,893 11 | 969,266 12 | 1280,690 13 | 987,260 14 | 296,428 15 | 1302,361 16 | 92,168 17 | 206,204 18 | 937,659 19 | 551,488 20 | 147,456 21 | 1279,42 22 | 463,154 23 | 407,266 24 | 1153,893 25 | 495,372 26 | 733,459 27 | 378,169 28 | 48,705 29 | 31,852 30 | 577,571 31 | 36,672 32 | 1014,661 33 | 441,880 34 | 1305,791 35 | 115,553 36 | 455,267 37 | 360,833 38 | 915,455 39 | 346,53 40 | 606,367 41 | 1014,513 42 | 930,633 43 | 190,350 44 | 564,577 45 | 1205,376 46 | 928,462 47 | 577,435 48 | 825,686 49 | 517,598 50 | 782,119 51 | 1110,539 52 | 67,236 53 | 77,185 54 | 577,723 55 | 808,855 56 | 139,742 57 | 865,414 58 | 159,522 59 | 1159,626 60 | 440,58 61 | 743,505 62 | 252,502 63 | 1190,659 64 | 855,521 65 | 1094,185 66 | 798,176 67 | 1114,61 68 | 174,695 69 | 427,581 70 | 161,670 71 | 74,222 72 | 1250,829 73 | 930,261 74 | 398,497 75 | 541,47 76 | 340,102 77 | 1057,614 78 | 820,753 79 | 1057,453 80 | 1158,141 81 | 492,499 82 | 267,234 83 | 642,568 84 | 904,586 85 | 1274,47 86 | 169,781 87 | 241,164 88 | 1037,621 89 | 50,254 90 | 44,63 91 | 388,561 92 | 1099,522 93 | 1255,809 94 | 736,171 95 | 1139,145 96 | 323,708 97 | 928,686 98 | 527,851 99 | 478,771 100 | 895,505 101 | 320,455 102 | 156,268 103 | 787,399 104 | 1136,695 105 | 150,751 106 | 252,537 107 | 1242,631 108 | 468,40 109 | 436,208 110 | 380,261 111 | 324,3 112 | 954,93 113 | 946,504 114 | 991,817 115 | 604,177 116 | 565,348 117 | 705,406 118 | 89,38 119 | 735,418 120 | 234,161 121 | 880,0 122 | 956,553 123 | 523,847 124 | 654,753 125 | 946,266 126 | 465,822 127 | 768,282 128 | 3,5 129 | 18,513 130 | 758,737 131 | 980,639 132 | 1248,725 133 | 1250,570 134 | 807,716 135 | 1138,375 136 | 1076,354 137 | 922,561 138 | 417,434 139 | 831,626 140 | 115,861 141 | 1255,710 142 | 793,473 143 | 77,499 144 | 964,169 145 | 989,343 146 | 875,183 147 | 758,224 148 | 398,397 149 | 1208,670 150 | 903,266 151 | 77,403 152 | 82,176 153 | 427,313 154 | 139,152 155 | 810,550 156 | 577,619 157 | 867,665 158 | 443,665 159 | 485,775 160 | 272,686 161 | 196,833 162 | 291,163 163 | 1029,387 164 | 1221,38 165 | 1019,611 166 | 544,78 167 | 299,826 168 | 1289,854 169 | 1160,751 170 | 1230,469 171 | 408,290 172 | 1056,176 173 | 281,507 174 | 170,828 175 | 18,828 176 | 1175,329 177 | 539,413 178 | 698,764 179 | 373,59 180 | 433,376 181 | 1020,504 182 | 85,771 183 | 151,716 184 | 934,245 185 | 172,792 186 | 383,78 187 | 1223,725 188 | 348,278 189 | 3,453 190 | 152,525 191 | 1037,618 192 | 1197,686 193 | 319,749 194 | 328,318 195 | 623,42 196 | 1258,521 197 | 441,14 198 | 862,802 199 | 1233,459 200 | 1140,791 201 | 994,509 202 | 417,262 203 | 517,712 204 | 1029,507 205 | 52,821 206 | 984,133 207 | 1120,96 208 | 1186,764 209 | 482,760 210 | 1114,754 211 | 190,96 212 | 728,494 213 | 1071,602 214 | 1019,122 215 | 92,726 216 | 1221,856 217 | 710,686 218 | 1140,49 219 | 485,208 220 | 661,775 221 | 774,511 222 | 154,123 223 | 383,816 224 | 986,3 225 | 455,409 226 | 200,614 227 | 584,749 228 | 616,662 229 | 493,353 230 | 572,649 231 | 80,171 232 | 932,177 233 | 1237,614 234 | 574,171 235 | 480,247 236 | 656,193 237 | 1153,449 238 | 502,257 239 | 1303,632 240 | 733,403 241 | 1274,672 242 | 872,515 243 | 219,631 244 | 353,9 245 | 1020,47 246 | 683,602 247 | 62,169 248 | 734,586 249 | 325,614 250 | 482,323 251 | 1160,143 252 | 987,5 253 | 982,766 254 | 1115,495 255 | 1086,894 256 | 441,546 257 | 816,675 258 | 1272,662 259 | 929,618 260 | 865,480 261 | 237,262 262 | 729,728 263 | 1197,159 264 | 880,289 265 | 68,148 266 | 816,227 267 | 924,198 268 | 113,686 269 | 704,245 270 | 745,486 271 | 500,550 272 | 510,824 273 | 817,353 274 | 708,571 275 | 1021,77 276 | 929,276 277 | 704,79 278 | 1061,51 279 | 1002,514 280 | 351,152 281 | 934,693 282 | 216,709 283 | 736,469 284 | 130,385 285 | 1221,546 286 | 276,254 287 | 1054,556 288 | 1164,7 289 | 2,107 290 | 812,626 291 | 610,383 292 | 1069,164 293 | 738,848 294 | 462,73 295 | 618,579 296 | 299,516 297 | 126,775 298 | 415,389 299 | 1044,113 300 | 1115,47 301 | 1062,586 302 | 746,129 303 | 976,397 304 | 1309,166 305 | 602,323 306 | 494,227 307 | 888,820 308 | 835,662 309 | 728,400 310 | 1009,397 311 | 410,718 312 | 273,618 313 | 253,453 314 | 865,59 315 | 528,775 316 | 1002,380 317 | 224,446 318 | 1305,327 319 | 364,166 320 | 189,744 321 | 67,658 322 | 1307,67 323 | 572,848 324 | 957,9 325 | 454,602 326 | 743,837 327 | 811,593 328 | 113,882 329 | 77,275 330 | 1232,138 331 | 482,326 332 | 216,653 333 | 987,827 334 | 984,761 335 | 1121,150 336 | 90,765 337 | 274,323 338 | 701,123 339 | 308,514 340 | 1159,178 341 | 80,723 342 | 1154,268 343 | 1255,16 344 | 20,77 345 | 745,856 346 | 729,726 347 | 628,229 348 | 475,662 349 | 120,659 350 | 1091,711 351 | 688,476 352 | 500,312 353 | 408,171 354 | 996,543 355 | 323,453 356 | 826,66 357 | 508,775 358 | 89,632 359 | 601,52 360 | 686,18 361 | 638,455 362 | 1094,67 363 | 562,509 364 | 102,593 365 | 517,312 366 | 435,263 367 | 594,829 368 | 930,323 369 | 1073,348 370 | 524,509 371 | 659,653 372 | 895,690 373 | 443,372 374 | 1149,670 375 | 763,408 376 | 160,227 377 | 36,628 378 | 571,28 379 | 687,70 380 | 1307,889 381 | 200,539 382 | 87,243 383 | 818,753 384 | 1230,873 385 | 152,173 386 | 408,738 387 | 219,711 388 | 890,845 389 | 606,79 390 | 1168,77 391 | 1200,78 392 | 688,154 393 | 335,184 394 | 629,173 395 | 793,712 396 | 318,571 397 | 769,686 398 | 1310,141 399 | 234,354 400 | 3,403 401 | 155,322 402 | 579,516 403 | 706,53 404 | 977,241 405 | 1178,652 406 | 1054,637 407 | 5,791 408 | 316,593 409 | 681,528 410 | 226,152 411 | 838,53 412 | 316,301 413 | 947,283 414 | 647,262 415 | 833,427 416 | 1014,233 417 | 1058,502 418 | 562,207 419 | 224,894 420 | 836,775 421 | 869,14 422 | 256,273 423 | 541,686 424 | 1227,345 425 | 1074,99 426 | 92,562 427 | 489,306 428 | 1149,801 429 | 172,760 430 | 811,301 431 | 1230,425 432 | 512,718 433 | 1084,152 434 | 213,14 435 | 813,445 436 | 628,665 437 | 1011,516 438 | 850,773 439 | 21,40 440 | 1033,406 441 | 783,43 442 | 719,840 443 | 1225,164 444 | 219,152 445 | 1222,173 446 | 1038,686 447 | 236,99 448 | 1220,99 449 | 761,824 450 | 909,814 451 | 740,756 452 | 676,278 453 | 219,183 454 | 1218,168 455 | 1233,485 456 | 281,682 457 | 403,465 458 | 672,455 459 | 1037,276 460 | 649,775 461 | 622,292 462 | 1303,262 463 | 435,94 464 | 68,631 465 | 402,201 466 | 326,133 467 | 574,51 468 | 986,891 469 | 1120,350 470 | 912,621 471 | 970,792 472 | 549,824 473 | 253,677 474 | 536,292 475 | 1290,177 476 | 28,190 477 | 356,93 478 | 842,40 479 | 769,208 480 | 653,877 481 | 1215,460 482 | 803,8 483 | 1262,245 484 | 417,348 485 | 865,507 486 | 1116,325 487 | 580,828 488 | 314,211 489 | 1233,185 490 | 582,624 491 | 818,499 492 | 661,464 493 | 726,749 494 | 933,488 495 | 77,459 496 | 604,702 497 | 454,7 498 | 435,711 499 | 416,873 500 | 1130,439 501 | 137,312 502 | 113,208 503 | 582,494 504 | 1138,102 505 | 924,310 506 | 281,387 507 | 569,663 508 | 652,143 509 | 353,885 510 | 602,633 511 | 816,219 512 | 606,649 513 | 191,565 514 | 55,464 515 | 846,96 516 | 422,759 517 | 253,441 518 | 560,311 519 | 36,679 520 | 1290,525 521 | 1158,173 522 | 333,775 523 | 704,649 524 | 527,857 525 | 692,579 526 | 72,176 527 | 584,690 528 | 562,687 529 | 1121,822 530 | 599,246 531 | 319,593 532 | 1124,488 533 | 657,877 534 | 457,675 535 | 1179,266 536 | 668,568 537 | 577,171 538 | 170,845 539 | 865,387 540 | 452,578 541 | 174,141 542 | 119,203 543 | 70,674 544 | 745,348 545 | 12,326 546 | 1240,749 547 | 239,389 548 | 574,313 549 | 709,52 550 | 542,282 551 | 31,824 552 | 110,368 553 | 182,219 554 | 353,437 555 | 755,133 556 | 1029,212 557 | 571,866 558 | 494,675 559 | 715,212 560 | 920,844 561 | 1181,667 562 | 704,525 563 | 20,369 564 | 729,814 565 | 1168,176 566 | 290,119 567 | 1196,143 568 | 748,338 569 | 1183,25 570 | 221,376 571 | 1258,883 572 | 331,226 573 | 700,383 574 | 731,516 575 | 649,430 576 | 227,502 577 | 783,3 578 | 959,152 579 | 440,836 580 | 726,369 581 | 70,749 582 | 1171,152 583 | 897,388 584 | 154,344 585 | 661,430 586 | 994,385 587 | 507,8 588 | 800,70 589 | 739,345 590 | 1192,511 591 | 478,751 592 | 853,890 593 | 298,821 594 | 808,39 595 | 687,852 596 | 254,176 597 | 127,473 598 | 1195,553 599 | 733,619 600 | 214,58 601 | 788,793 602 | 723,455 603 | 465,150 604 | 10,471 605 | 373,235 606 | 623,294 607 | 281,212 608 | 1016,173 609 | 912,257 610 | 1061,388 611 | 1190,235 612 | 546,656 613 | 378,389 614 | 676,616 615 | 1240,397 616 | 783,409 617 | 400,64 618 | 569,282 619 | 1292,841 620 | 663,262 621 | 1262,705 622 | 457,4 623 | 1232,308 624 | 226,742 625 | 649,327 626 | 952,381 627 | 567,651 628 | 724,263 629 | 940,316 630 | 956,240 631 | 53,500 632 | 1298,326 633 | 1016,866 634 | 1223,690 635 | 688,292 636 | 1076,332 637 | 632,782 638 | 454,740 639 | 131,495 640 | 152,141 641 | 989,653 642 | 189,488 643 | 1086,224 644 | 1072,873 645 | 651,515 646 | 870,58 647 | 932,169 648 | 1258,409 649 | 700,511 650 | 769,735 651 | 5,567 652 | 321,241 653 | 200,740 654 | 576,586 655 | 581,726 656 | 303,742 657 | 132,652 658 | 1014,428 659 | 380,633 660 | 330,815 661 | 73,280 662 | 1207,488 663 | 536,511 664 | 902,604 665 | 446,852 666 | 448,11 667 | 77,485 668 | 594,65 669 | 1039,78 670 | 708,11 671 | 482,550 672 | 331,345 673 | 373,771 674 | 333,103 675 | 959,742 676 | 1220,795 677 | 36,215 678 | 848,821 679 | 726,145 680 | 482,568 681 | 682,665 682 | 937,771 683 | 298,353 684 | 774,383 685 | 28,373 686 | 826,49 687 | 364,266 688 | 716,0 689 | 883,581 690 | 1094,795 691 | 212,306 692 | 136,789 693 | 401,814 694 | 659,889 695 | 422,74 696 | 541,208 697 | 20,817 698 | 364,728 699 | 785,785 700 | 937,212 701 | 1104,204 702 | 74,290 703 | 855,309 704 | 7,856 705 | 587,439 706 | 663,38 707 | 174,199 708 | 282,679 709 | 1120,798 710 | 845,582 711 | 290,847 712 | 92,332 713 | 1243,236 714 | 1020,266 715 | 443,105 716 | 497,449 717 | 728,270 718 | 124,764 719 | 1280,513 720 | 535,203 721 | 741,771 722 | 278,201 723 | 708,633 724 | 599,269 725 | 842,854 726 | 994,301 727 | 18,841 728 | 324,891 729 | 480,641 730 | 740,138 731 | 95,882 732 | 734,308 733 | 577,282 734 | 793,296 735 | 932,389 736 | 705,488 737 | 52,521 738 | 180,455 739 | 1190,540 740 | 581,814 741 | 420,637 742 | 160,667 743 | 478,359 744 | 190,798 745 | 150,143 746 | 130,826 747 | 89,348 748 | 569,123 749 | 771,301 750 | 853,442 751 | 1146,753 752 | 733,395 753 | 348,333 754 | 441,287 755 | 216,99 756 | 3,67 757 | 176,183 758 | 547,856 759 | 68,711 760 | 605,406 761 | 544,627 762 | 105,152 763 | 856,103 764 | 1032,201 765 | 618,131 766 | 599,878 767 | 604,841 768 | 1233,275 769 | 479,626 770 | 1151,522 771 | 21,854 772 | 668,543 773 | 1028,740 774 | 113,159 775 | 746,577 776 | 586,863 777 | 1247,840 778 | 272,208 779 | 201,138 780 | 249,58 781 | 159,372 782 | 87,725 783 | 776,540 784 | 907,579 785 | 1099,677 786 | 468,854 787 | 748,687 788 | 303,152 789 | 1094,241 790 | 52,373 791 | 85,739 792 | 48,189 793 | 52,883 794 | 157,445 795 | 430,0 796 | 185,234 797 | 127,627 798 | 6,887 799 | 527,627 800 | 582,176 801 | 1118,521 802 | 216,67 803 | 386,198 804 | 1150,227 805 | 172,344 806 | 687,600 807 | 120,690 808 | 387,712 809 | 1203,632 810 | 888,123 811 | 689,676 812 | 468,488 813 | 933,770 814 | 199,267 815 | 256,497 816 | 586,263 817 | 895,389 818 | 110,627 819 | 479,268 820 | 741,730 821 | 694,232 822 | 666,301 823 | 704,490 824 | 129,667 825 | 1242,711 826 | 830,641 827 | 194,325 828 | 618,315 829 | 910,64 830 | 1052,298 831 | 1171,742 832 | 869,546 833 | 1153,880 834 | 1148,128 835 | 381,173 836 | 1233,11 837 | 1240,130 838 | 103,488 839 | 274,267 840 | 402,693 841 | 594,381 842 | 495,267 843 | 246,425 844 | 1292,268 845 | 602,37 846 | 539,677 847 | 793,421 848 | 711,16 849 | 89,856 850 | 1073,460 851 | 1020,180 852 | 763,856 853 | 438,205 854 | 365,817 855 | 1210,600 856 | 372,224 857 | 135,565 858 | 853,452 859 | 239,242 860 | 363,283 861 | 1079,348 862 | 142,176 863 | 107,632 864 | 534,242 865 | 530,130 866 | 142,77 867 | 1290,77 868 | 1218,385 869 | 249,51 870 | 214,57 871 | 687,42 872 | 972,77 873 | 1173,296 874 | 808,305 875 | 813,1 876 | 1225,155 877 | 333,791 878 | 774,607 879 | 1300,23 880 | 1140,103 881 | 340,550 882 | 154,582 883 | 10,423 884 | 38,662 885 | 565,856 886 | 354,688 887 | 333,241 888 | 1256,208 889 | 272,656 890 | 736,51 891 | 880,672 892 | 1145,894 893 | 1022,379 894 | 566,721 895 | 668,102 896 | 503,716 897 | 912,497 898 | 610,511 899 | 1292,828 900 | 282,222 901 | 22,782 902 | 572,718 903 | 904 | fold along x=655 905 | fold along y=447 906 | fold along x=327 907 | fold along y=223 908 | fold along x=163 909 | fold along y=111 910 | fold along x=81 911 | fold along y=55 912 | fold along x=40 913 | fold along y=27 914 | fold along y=13 915 | fold along y=6 916 | -------------------------------------------------------------------------------- /2021/13/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/13 5 | """ 6 | 7 | import sys 8 | 9 | import tensorflow as tf 10 | 11 | 12 | def main(): 13 | """Entrypoint. Suppose the "input" file is in the cwd.""" 14 | 15 | dataset = tf.data.TextLineDataset("input") 16 | # Split in two: coordinates, and fold instructions 17 | # Empty line is the separator 18 | 19 | for idx, line in enumerate(dataset): 20 | if tf.equal(tf.strings.length(line), 0): 21 | coordinates = dataset.take(idx) 22 | instructions = dataset.skip(idx + 1) 23 | break 24 | 25 | coordinates = coordinates.map(lambda line: tf.strings.split(line, ",")).map( 26 | lambda pair: tf.strings.to_number(pair, tf.int32) 27 | ) 28 | 29 | instructions = instructions.map( 30 | lambda line: tf.strings.regex_replace( 31 | line, r"fold along ([x,y])=(\d+)", r"\1=\2" 32 | ) 33 | ).map(lambda line: tf.strings.split(line, "=")) 34 | 35 | coordinates = tf.convert_to_tensor(list(coordinates)) 36 | shape = ( 37 | tf.convert_to_tensor( 38 | [tf.reduce_max(coordinates[:, 0]), tf.reduce_max(coordinates[:, 1])] 39 | ) 40 | + 1 41 | ) 42 | 43 | sheet = tf.Variable(tf.zeros(shape, tf.int32)) 44 | sheet.scatter_nd_update(coordinates, tf.repeat(1, tf.shape(coordinates)[0])) 45 | 46 | for idx, fold in enumerate(instructions): 47 | axis = fold[0] 48 | coord = tf.strings.to_number(fold[1], tf.int32) 49 | 50 | if tf.equal(axis, "y"): 51 | sub = sheet[:, coord + 1 :] 52 | indices_y, indices_x = tf.meshgrid( 53 | tf.range(tf.shape(sheet)[0]), tf.range(coord + 1, tf.shape(sheet)[1]) 54 | ) 55 | indices = tf.stack([indices_y, indices_x], axis=-1) 56 | else: 57 | sub = sheet[coord + 1 :, :] 58 | indices_y, indices_x = tf.meshgrid( 59 | tf.range(coord + 1, tf.shape(sheet)[0]), tf.range(tf.shape(sheet)[1]) 60 | ) 61 | indices = tf.stack([indices_y, indices_x], axis=-1) 62 | 63 | indices = tf.reshape(indices, (-1, 2)) 64 | updates = tf.repeat(0, tf.shape(indices)[0]) 65 | # Set source positions to zero 66 | sheet.scatter_nd_update(indices, updates) 67 | 68 | # If axis == y, fold over left (tensorflow orientation) 69 | if tf.equal(axis, "y"): 70 | # tf.linalg.LinearOperatorPermutation uses the same 71 | # idea of tf.transpose but instead of swapping dimensions it swappes 72 | # ROWS. 73 | 74 | # Hence, for this folding we need to transpose first 75 | sub = tf.transpose(sub) 76 | 77 | perm = tf.range(tf.shape(sub)[0] - 1, -1, -1) 78 | operator = tf.linalg.LinearOperatorPermutation(perm) 79 | sub = tf.cast(operator.matmul(tf.cast(sub, tf.float32)), tf.int32) 80 | 81 | # back to the original position 82 | sub = tf.transpose(sub) 83 | 84 | # add on the left side the submatrix 85 | indices_y, indices_x = tf.meshgrid( 86 | tf.range(tf.shape(sheet)[0]), tf.range(coord) 87 | ) 88 | indices = tf.stack([indices_y, indices_x], axis=-1) 89 | updates = tf.transpose(sub) 90 | sheet.scatter_nd_add(indices, updates) 91 | # If axis == x, fold up (tensorflow orientation) 92 | if tf.equal(axis, "x"): 93 | perm = tf.range(tf.shape(sub)[0] - 1, -1, -1) 94 | operator = tf.linalg.LinearOperatorPermutation(perm) 95 | sub = tf.cast(operator.matmul(tf.cast(sub, tf.float32)), tf.int32) 96 | indices_y, indices_x = tf.meshgrid( 97 | tf.range(coord), tf.range(tf.shape(sheet)[1]) 98 | ) 99 | indices = tf.stack([indices_y, indices_x], axis=-1) 100 | updates = tf.transpose(sub) 101 | sheet.scatter_nd_add(indices, updates) 102 | 103 | if tf.equal(idx, 0): 104 | tf.print( 105 | "Part one: ", 106 | tf.reduce_sum(tf.cast(tf.greater_equal(sheet, 1), tf.int64)), 107 | ) 108 | 109 | display = tf.transpose(sheet) 110 | tf.print(display, summarize=-1) 111 | 112 | 113 | if __name__ == "__main__": 114 | sys.exit(main()) 115 | -------------------------------------------------------------------------------- /2021/2/main-2.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/2 (Part 2) 5 | """ 6 | 7 | import sys 8 | from enum import IntEnum, auto 9 | from typing import Tuple 10 | 11 | import tensorflow as tf 12 | 13 | 14 | class Action(IntEnum): 15 | """Action enum, to map the direction read to an action to perform.""" 16 | 17 | INCREASE_HORIZONTAL = auto() 18 | INCREASE_DEPTH = auto() 19 | DECREASE_DEPTH = auto() 20 | INCREASE_AIM = auto() 21 | DECREASE_AIM = auto() 22 | INCREASE_HORIZONTAL_MUTIPLY_BY_AIM = auto() 23 | 24 | 25 | class PositionCounter: 26 | """Stateful counter. Get the final horizontal position and depth, keeping track of the aim.""" 27 | 28 | def __init__(self): 29 | self._horizontal_position = tf.Variable(0, trainable=False, dtype=tf.int64) 30 | self._depth = tf.Variable(0, trainable=False, dtype=tf.int64) 31 | self._aim = tf.Variable(0, trainable=False, dtype=tf.int64) 32 | 33 | def reset(self): 34 | """Reset the counter state.""" 35 | self._horizontal_position.assign(0) 36 | self._depth.assign(0) 37 | 38 | @tf.function 39 | def __call__(self, dataset: tf.data.Dataset) -> Tuple[tf.Tensor, tf.Tensor]: 40 | """ 41 | Args: 42 | dataset: dataset yielding tuples (action, value), where action is 43 | a valida Action enum. 44 | Returns: 45 | (horizontal_position, depth) 46 | """ 47 | for action, amount in dataset: 48 | if tf.equal(action, Action.INCREASE_DEPTH): 49 | self._depth.assign_add(amount) 50 | elif tf.equal(action, Action.DECREASE_DEPTH): 51 | self._depth.assign_sub(amount) 52 | elif tf.equal(action, Action.INCREASE_HORIZONTAL): 53 | self._horizontal_position.assign_add(amount) 54 | elif tf.equal(action, Action.INCREASE_HORIZONTAL_MUTIPLY_BY_AIM): 55 | self._horizontal_position.assign_add(amount) 56 | self._depth.assign_add(self._aim * amount) 57 | elif tf.equal(action, Action.DECREASE_AIM): 58 | self._aim.assign_sub(amount) 59 | elif tf.equal(action, Action.INCREASE_AIM): 60 | self._aim.assign_add(amount) 61 | return self._horizontal_position, self._depth 62 | 63 | 64 | def main(): 65 | """Entrypoint. Suppose the "input" file is in the cwd.""" 66 | 67 | def _processor(line): 68 | splits = tf.strings.split(line) 69 | direction = splits[0] 70 | amount = splits[1] 71 | 72 | if tf.equal(direction, "forward"): 73 | action = Action.INCREASE_HORIZONTAL_MUTIPLY_BY_AIM 74 | elif tf.equal(direction, "down"): 75 | action = Action.INCREASE_AIM 76 | elif tf.equal(direction, "up"): 77 | action = Action.DECREASE_AIM 78 | else: 79 | action = -1 80 | # tf.debugging.Assert(False, f"Unhandled direction: {direction}") 81 | 82 | amount = tf.strings.to_number(amount, out_type=tf.int64) 83 | return action, amount 84 | 85 | dataset = tf.data.TextLineDataset("input").map(_processor) 86 | 87 | counter = PositionCounter() 88 | horizontal_position, depth = counter(dataset) 89 | result = horizontal_position * depth 90 | tf.print("[part two] result: ", result) 91 | 92 | 93 | if __name__ == "__main__": 94 | sys.exit(main()) 95 | -------------------------------------------------------------------------------- /2021/2/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/2 5 | """ 6 | 7 | import sys 8 | from enum import IntEnum, auto 9 | from typing import Tuple 10 | 11 | import tensorflow as tf 12 | 13 | 14 | class Action(IntEnum): 15 | """Action enum, to map the direction read to an action to perform.""" 16 | 17 | INCREASE_HORIZONTAL = auto() 18 | INCREASE_DEPTH = auto() 19 | DECREASE_DEPTH = auto() 20 | 21 | 22 | class PositionCounter: 23 | """Stateful counter. Get the final horizontal position and depth.""" 24 | 25 | def __init__(self): 26 | self._horizontal_position = tf.Variable(0, trainable=False, dtype=tf.int64) 27 | self._depth = tf.Variable(0, trainable=False, dtype=tf.int64) 28 | 29 | def reset(self): 30 | """Reset the counter state.""" 31 | self._horizontal_position.assign(0) 32 | self._depth.assign(0) 33 | 34 | @tf.function 35 | def __call__(self, dataset: tf.data.Dataset) -> Tuple[tf.Tensor, tf.Tensor]: 36 | """ 37 | Args: 38 | dataset: dataset yielding tuples (action, value), where action is 39 | a valida Action enum. 40 | Returns: 41 | (horizontal_position, depth) 42 | """ 43 | for action, amount in dataset: 44 | if tf.equal(action, Action.INCREASE_DEPTH): 45 | self._depth.assign_add(amount) 46 | elif tf.equal(action, Action.DECREASE_DEPTH): 47 | self._depth.assign_sub(amount) 48 | elif tf.equal(action, Action.INCREASE_HORIZONTAL): 49 | self._horizontal_position.assign_add(amount) 50 | return self._horizontal_position, self._depth 51 | 52 | 53 | def main(): 54 | """Entrypoint. Suppose the "input" file is in the cwd.""" 55 | 56 | def _processor(line): 57 | splits = tf.strings.split(line) 58 | direction = splits[0] 59 | amount = splits[1] 60 | 61 | if tf.equal(direction, "forward"): 62 | action = Action.INCREASE_HORIZONTAL 63 | elif tf.equal(direction, "down"): 64 | action = Action.INCREASE_DEPTH 65 | elif tf.equal(direction, "up"): 66 | action = Action.DECREASE_DEPTH 67 | else: 68 | action = -1 69 | # tf.debugging.Assert(False, f"Unhandled direction: {direction}") 70 | 71 | amount = tf.strings.to_number(amount, out_type=tf.int64) 72 | return action, amount 73 | 74 | dataset = tf.data.TextLineDataset("input").map(_processor) 75 | 76 | counter = PositionCounter() 77 | horizontal_position, depth = counter(dataset) 78 | result = horizontal_position * depth 79 | tf.print("[part one] result: ", result) 80 | 81 | 82 | if __name__ == "__main__": 83 | sys.exit(main()) 84 | -------------------------------------------------------------------------------- /2021/3/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/3 5 | """ 6 | 7 | import sys 8 | from typing import Tuple 9 | 10 | import tensorflow as tf 11 | 12 | 13 | @tf.function 14 | def most_frequent_bits(tensor: tf.Tensor) -> Tuple[tf.Tensor, tf.Tensor]: 15 | count = tf.reduce_sum(tensor, axis=0) 16 | tot = tf.cast(tf.shape(tensor)[0], tf.int64) 17 | half = tot // 2 18 | ret = tf.cast(tf.greater(count, half), tf.int64) 19 | return tf.squeeze(ret), tf.squeeze( 20 | tf.logical_and(tf.equal(count, half), tf.equal(tf.math.mod(tot, 2), 0)) 21 | ) # True where #1 == #0 22 | 23 | 24 | @tf.function 25 | def bin2dec(bin_tensor: tf.Tensor): 26 | two = tf.cast(2, tf.int64) 27 | return tf.reduce_sum( 28 | tf.reverse(bin_tensor, axis=[0]) 29 | * two ** tf.range(tf.size(bin_tensor), dtype=tf.int64) 30 | ) 31 | 32 | 33 | class RateFinder(tf.Module): 34 | def __init__(self, bits): 35 | super().__init__() 36 | # Constants 37 | self._zero = tf.constant(0, tf.int64) 38 | self._one = tf.constant(1, tf.int64) 39 | self._two = tf.constant(2, tf.int64) 40 | self._bits = tf.constant(tf.cast(bits, tf.int64)) 41 | # Variables 42 | self._rating = tf.Variable(tf.zeros([bits], dtype=tf.int64), trainable=False) 43 | self._frequencies = tf.Variable( 44 | tf.zeros([bits], dtype=tf.int64), trainable=False 45 | ) 46 | self._ta = tf.TensorArray( 47 | size=1, dtype=tf.int64, dynamic_size=True, clear_after_read=True 48 | ) 49 | 50 | @tf.function(experimental_relax_shapes=True) 51 | def filter_by_bit_criteria( 52 | self, 53 | dataset_tensor: tf.Tensor, 54 | current_bit_position: tf.Tensor, 55 | oxygen: tf.Tensor, 56 | ): 57 | if oxygen: 58 | flag = self._one 59 | frequencies, mask = most_frequent_bits(dataset_tensor) 60 | else: 61 | flag = self._zero 62 | frequencies, mask = most_frequent_bits(dataset_tensor) 63 | frequencies = tf.cast( 64 | tf.logical_not(tf.cast(frequencies, tf.bool)), 65 | tf.int64, 66 | ) 67 | # #0 == #1 pick the elements with the correct bitflag 68 | if mask[current_bit_position]: 69 | indices = tf.where( 70 | tf.equal( 71 | dataset_tensor[:, current_bit_position], 72 | flag, 73 | ) 74 | ) 75 | else: 76 | indices = tf.where( 77 | tf.equal( 78 | dataset_tensor[:, current_bit_position], 79 | frequencies[current_bit_position], 80 | ) 81 | ) 82 | 83 | # All elements with the bit "position" equal to frequencies[position] 84 | gathered = tf.gather_nd(dataset_tensor, indices) 85 | return gathered 86 | 87 | # @tf.function 88 | def find(self, dataset_tensor: tf.Tensor, oxygen: tf.Tensor): 89 | num_bits = tf.shape(dataset_tensor)[-1] 90 | self._ta.unstack(dataset_tensor) 91 | for current_bit_position in tf.range(num_bits): 92 | ta = self._ta.stack() 93 | gathered = tf.squeeze( 94 | self.filter_by_bit_criteria(ta, current_bit_position, oxygen) 95 | ) 96 | if tf.equal(tf.size(gathered), num_bits): 97 | self._rating.assign(gathered) 98 | break 99 | self._ta.unstack(gathered) 100 | 101 | return self._rating 102 | 103 | 104 | def main(): 105 | """Entrypoint. Suppose the "input" file is in the cwd.""" 106 | 107 | dataset = ( 108 | tf.data.TextLineDataset("input") # "0101" 109 | .map(tf.strings.bytes_split) # '0', '1', '0', '1' 110 | .map(lambda digit: tf.strings.to_number(digit, out_type=tf.int64)) # 0 1 0 1 111 | ) 112 | # We can do this in a raw way, treating the whole dataset as a tensor 113 | # so we can know its shape and extract the most frequent elements easily 114 | tensor_dataset = tf.convert_to_tensor(list(dataset)) 115 | gamma_rate, _ = most_frequent_bits(tensor_dataset) 116 | tf.print("gamma rate (bin): ", gamma_rate) 117 | gamma_rate_dec = bin2dec(gamma_rate) 118 | tf.print("gamma rate (dec): ", gamma_rate_dec) 119 | 120 | # epsilon rate is the complement 121 | epsilon_rate = tf.cast(tf.logical_not(tf.cast(gamma_rate, tf.bool)), tf.int64) 122 | tf.print("epsilon rate (bin): ", epsilon_rate) 123 | epsilon_rate_dec = bin2dec(epsilon_rate) 124 | tf.print("epislon rate (dec): ", epsilon_rate_dec) 125 | 126 | power_consuption = gamma_rate_dec * epsilon_rate_dec 127 | tf.print("power consumption: ", power_consuption) 128 | 129 | # -- Part Two --- 130 | 131 | # gamma_rate contains the most frequent bit in each position 0 1 0 1 0 ... 132 | # starting from that, we can gather all the numbers that have the more common bit 133 | # in the "position". 134 | finder = RateFinder(bits=tf.size(epsilon_rate)) 135 | 136 | oxygen_generator_rating = finder.find(tensor_dataset, True) 137 | tf.print("Oxigen generator rating (bin): ", oxygen_generator_rating) 138 | oxygen_generator_rating_dec = bin2dec(oxygen_generator_rating) 139 | tf.print("Oxigen generator rating (dec): ", oxygen_generator_rating_dec) 140 | 141 | co2_generator_rating = finder.find(tensor_dataset, False) 142 | tf.print("C02 scrubber rating (bin): ", co2_generator_rating) 143 | co2_generator_rating_dec = bin2dec(co2_generator_rating) 144 | tf.print("C02 scrubber rating (dec): ", co2_generator_rating_dec) 145 | 146 | tf.print( 147 | "life support rating = ", oxygen_generator_rating_dec * co2_generator_rating_dec 148 | ) 149 | 150 | 151 | if __name__ == "__main__": 152 | sys.exit(main()) 153 | -------------------------------------------------------------------------------- /2021/4/input: -------------------------------------------------------------------------------- 1 | 69,88,67,56,53,97,46,29,37,51,3,93,92,78,41,22,45,66,13,82,2,7,52,40,18,70,32,95,89,64,84,68,83,26,43,0,61,36,57,34,80,39,6,63,72,98,21,54,23,28,65,16,76,11,20,33,96,4,10,25,30,19,90,24,55,91,15,8,71,99,58,14,60,48,44,17,47,85,74,87,86,27,42,38,81,79,94,73,12,5,77,35,9,62,50,31,49,59,75,1 2 | 3 | 78 27 82 68 20 4 | 14 2 34 51 7 5 | 58 57 99 37 81 6 | 9 4 0 76 45 7 | 67 69 70 17 23 8 | 9 | 38 60 62 34 41 10 | 39 58 91 45 10 11 | 66 74 94 50 17 12 | 68 27 75 97 49 13 | 36 64 5 98 15 14 | 15 | 17 50 13 53 20 16 | 68 57 76 10 86 17 | 2 91 67 27 11 18 | 94 70 84 69 25 19 | 32 90 45 75 41 20 | 21 | 71 84 42 49 81 22 | 26 40 24 73 18 23 | 41 37 19 25 75 24 | 76 63 48 56 55 25 | 85 51 29 88 23 26 | 27 | 27 10 11 75 59 28 | 61 96 44 58 64 29 | 24 68 90 60 87 30 | 28 55 34 80 9 31 | 41 98 91 78 62 32 | 33 | 91 95 70 64 30 34 | 34 43 32 16 57 35 | 49 80 87 51 62 36 | 61 10 8 75 21 37 | 85 66 2 55 56 38 | 39 | 50 4 11 58 48 40 | 30 10 57 16 95 41 | 93 96 68 92 81 42 | 94 17 69 86 79 43 | 52 34 99 6 19 44 | 45 | 2 16 50 26 84 46 | 97 24 32 51 8 47 | 70 0 3 52 9 48 | 1 59 43 64 80 49 | 22 23 17 92 88 50 | 51 | 84 7 37 71 81 52 | 80 97 17 94 9 53 | 27 95 39 25 5 54 | 98 46 58 77 2 55 | 60 1 73 23 18 56 | 57 | 1 14 67 20 48 58 | 75 51 36 87 73 59 | 57 84 74 47 19 60 | 89 8 13 50 24 61 | 61 12 65 46 83 62 | 63 | 82 87 8 9 85 64 | 16 22 98 91 55 65 | 26 69 42 11 93 66 | 65 15 2 63 43 67 | 71 37 28 88 12 68 | 69 | 59 7 51 1 43 70 | 17 45 15 96 93 71 | 49 88 79 84 92 72 | 40 36 25 18 22 73 | 70 57 34 62 6 74 | 75 | 1 18 5 47 46 76 | 12 27 24 40 2 77 | 53 54 20 14 42 78 | 15 51 26 58 9 79 | 31 92 34 74 7 80 | 81 | 41 84 14 32 8 82 | 38 1 60 22 88 83 | 64 70 10 91 97 84 | 94 90 65 54 50 85 | 7 58 18 87 33 86 | 87 | 93 25 26 71 42 88 | 86 85 61 32 51 89 | 20 88 67 35 29 90 | 46 28 92 9 16 91 | 34 30 97 91 44 92 | 93 | 34 88 90 99 83 94 | 22 24 4 25 18 95 | 51 41 29 53 72 96 | 75 42 66 98 79 97 | 74 7 0 73 33 98 | 99 | 99 24 44 83 47 100 | 2 21 94 35 4 101 | 96 87 31 1 22 102 | 67 3 37 43 46 103 | 85 55 10 6 80 104 | 105 | 4 75 29 54 15 106 | 66 17 89 98 27 107 | 46 5 64 3 22 108 | 97 50 0 51 52 109 | 26 39 30 32 48 110 | 111 | 39 17 46 48 63 112 | 52 13 98 40 91 113 | 14 80 28 23 60 114 | 90 88 15 89 74 115 | 56 7 2 41 58 116 | 117 | 82 51 6 7 22 118 | 87 9 60 63 95 119 | 80 0 5 8 77 120 | 85 3 68 84 39 121 | 15 45 31 55 26 122 | 123 | 48 82 38 29 55 124 | 87 46 79 61 51 125 | 1 97 69 91 83 126 | 35 89 45 59 39 127 | 43 28 21 44 24 128 | 129 | 71 97 34 43 23 130 | 44 65 92 90 31 131 | 74 87 54 79 93 132 | 55 88 66 12 53 133 | 14 56 17 52 83 134 | 135 | 91 33 20 59 67 136 | 71 78 15 94 68 137 | 8 90 72 57 36 138 | 27 40 92 1 44 139 | 18 80 7 32 19 140 | 141 | 67 20 94 89 10 142 | 85 78 70 35 0 143 | 87 66 75 73 23 144 | 36 8 17 83 21 145 | 40 52 93 62 96 146 | 147 | 8 37 66 26 63 148 | 7 90 21 18 33 149 | 31 56 81 77 55 150 | 34 15 19 27 57 151 | 13 85 0 59 4 152 | 153 | 67 77 48 26 6 154 | 31 72 89 76 45 155 | 66 4 7 43 78 156 | 15 53 81 85 70 157 | 0 10 40 30 94 158 | 159 | 79 37 8 29 27 160 | 41 14 12 99 28 161 | 75 40 30 25 77 162 | 36 78 39 32 11 163 | 91 58 17 96 51 164 | 165 | 36 8 35 30 51 166 | 28 61 4 95 67 167 | 29 69 32 80 48 168 | 55 63 98 10 22 169 | 27 87 83 62 21 170 | 171 | 24 36 52 72 16 172 | 53 1 4 96 37 173 | 31 7 69 47 57 174 | 38 97 3 26 59 175 | 74 14 29 32 40 176 | 177 | 8 73 68 62 38 178 | 43 92 15 69 46 179 | 56 58 48 28 44 180 | 25 64 13 50 97 181 | 66 34 21 49 10 182 | 183 | 63 41 71 22 18 184 | 56 82 95 60 35 185 | 53 48 79 30 86 186 | 17 51 57 70 27 187 | 75 66 42 32 43 188 | 189 | 60 59 40 42 90 190 | 65 22 43 0 49 191 | 82 96 29 52 73 192 | 67 17 20 53 24 193 | 72 5 91 50 85 194 | 195 | 94 47 2 93 74 196 | 90 10 27 17 5 197 | 92 26 28 77 88 198 | 69 43 33 19 53 199 | 34 50 54 36 60 200 | 201 | 73 36 90 50 37 202 | 11 80 81 93 74 203 | 78 56 86 6 39 204 | 15 94 7 91 42 205 | 33 8 64 40 28 206 | 207 | 73 37 57 65 0 208 | 64 26 52 79 69 209 | 15 41 3 2 1 210 | 71 48 8 43 31 211 | 5 93 86 42 27 212 | 213 | 59 35 19 17 83 214 | 15 93 53 2 4 215 | 26 51 85 71 22 216 | 31 52 74 12 57 217 | 70 40 68 39 24 218 | 219 | 3 6 45 81 20 220 | 82 30 15 62 80 221 | 21 70 56 23 32 222 | 68 19 50 16 14 223 | 46 89 72 59 40 224 | 225 | 17 27 72 36 12 226 | 55 30 6 88 69 227 | 34 91 87 45 82 228 | 48 15 18 21 7 229 | 44 4 81 14 93 230 | 231 | 55 84 58 24 53 232 | 99 44 88 54 37 233 | 2 56 57 50 35 234 | 13 90 26 30 96 235 | 7 97 12 19 71 236 | 237 | 31 26 87 54 76 238 | 68 24 20 27 98 239 | 53 75 15 95 8 240 | 63 2 45 50 9 241 | 49 17 88 55 1 242 | 243 | 91 78 45 26 30 244 | 63 95 67 60 58 245 | 34 39 44 20 11 246 | 38 29 73 22 80 247 | 56 12 77 37 4 248 | 249 | 24 18 65 21 6 250 | 76 45 85 2 78 251 | 67 69 55 91 57 252 | 96 61 39 36 83 253 | 8 54 12 38 70 254 | 255 | 33 71 24 82 84 256 | 53 32 45 9 34 257 | 89 28 30 42 96 258 | 49 95 69 51 12 259 | 80 41 31 48 75 260 | 261 | 40 60 0 92 13 262 | 87 9 45 98 77 263 | 14 91 35 1 95 264 | 79 39 19 89 51 265 | 61 56 8 97 32 266 | 267 | 89 70 2 81 34 268 | 21 59 39 84 64 269 | 28 94 97 29 30 270 | 35 27 99 32 55 271 | 23 47 14 88 0 272 | 273 | 46 14 92 49 94 274 | 90 80 2 65 30 275 | 54 32 35 56 27 276 | 29 55 97 39 37 277 | 81 72 47 66 42 278 | 279 | 53 1 0 34 82 280 | 26 28 30 65 41 281 | 17 4 57 49 40 282 | 84 46 27 35 91 283 | 56 38 20 81 86 284 | 285 | 10 31 98 66 22 286 | 87 99 24 34 93 287 | 7 95 28 78 73 288 | 61 25 14 5 1 289 | 42 85 16 47 43 290 | 291 | 92 43 9 68 40 292 | 41 65 18 69 89 293 | 35 88 62 67 75 294 | 64 4 17 42 93 295 | 78 33 94 87 81 296 | 297 | 18 61 10 19 87 298 | 46 99 55 3 28 299 | 16 41 45 39 27 300 | 8 13 43 64 52 301 | 23 34 47 11 92 302 | 303 | 21 59 74 36 38 304 | 81 29 79 80 44 305 | 84 30 37 62 57 306 | 69 82 60 10 52 307 | 7 55 93 12 0 308 | 309 | 37 23 52 2 94 310 | 19 96 8 68 29 311 | 99 57 53 9 48 312 | 62 11 35 95 98 313 | 93 72 58 16 36 314 | 315 | 80 53 82 29 76 316 | 77 17 85 62 81 317 | 34 92 25 55 20 318 | 91 39 23 50 31 319 | 64 37 79 96 2 320 | 321 | 40 5 57 36 14 322 | 91 53 56 73 27 323 | 11 55 74 7 9 324 | 90 58 12 22 26 325 | 82 38 59 97 85 326 | 327 | 54 79 75 0 30 328 | 7 15 26 84 40 329 | 91 76 42 3 19 330 | 65 77 53 21 67 331 | 45 50 2 14 46 332 | 333 | 23 51 40 13 72 334 | 54 61 59 18 14 335 | 0 41 5 24 82 336 | 73 11 46 36 17 337 | 16 28 25 60 4 338 | 339 | 85 42 22 54 18 340 | 3 27 12 15 99 341 | 13 26 89 93 76 342 | 23 87 77 64 25 343 | 9 17 74 57 81 344 | 345 | 47 64 85 69 89 346 | 59 17 4 83 88 347 | 80 70 53 7 67 348 | 73 18 81 44 30 349 | 45 37 90 57 3 350 | 351 | 72 48 35 39 31 352 | 44 85 91 52 46 353 | 73 61 68 66 12 354 | 74 95 76 75 36 355 | 83 21 15 2 10 356 | 357 | 63 82 95 31 51 358 | 93 3 53 15 70 359 | 0 36 44 19 5 360 | 11 17 62 55 83 361 | 80 91 4 18 66 362 | 363 | 44 8 45 90 64 364 | 30 33 9 27 47 365 | 68 53 81 77 35 366 | 63 4 82 80 67 367 | 3 28 66 22 43 368 | 369 | 48 86 57 16 7 370 | 69 51 11 8 61 371 | 25 12 43 88 71 372 | 83 36 31 77 5 373 | 50 21 9 76 63 374 | 375 | 27 39 6 87 49 376 | 16 66 3 25 10 377 | 7 70 8 94 42 378 | 95 20 55 9 29 379 | 0 46 36 79 18 380 | 381 | 27 21 36 14 79 382 | 23 48 56 74 94 383 | 18 99 73 93 32 384 | 98 77 37 35 69 385 | 43 34 63 59 9 386 | 387 | 27 96 78 94 20 388 | 34 5 49 84 99 389 | 68 74 21 57 1 390 | 93 85 29 47 65 391 | 54 97 42 70 40 392 | 393 | 29 28 64 26 46 394 | 39 48 13 51 2 395 | 42 91 96 93 66 396 | 12 60 70 8 24 397 | 18 21 83 56 45 398 | 399 | 64 43 76 40 97 400 | 30 10 22 84 53 401 | 51 13 68 93 15 402 | 75 27 18 39 82 403 | 62 61 91 12 88 404 | 405 | 72 6 61 10 45 406 | 65 62 57 2 91 407 | 30 24 76 42 69 408 | 32 36 43 63 75 409 | 92 44 58 82 49 410 | 411 | 30 39 58 75 76 412 | 62 53 59 70 97 413 | 29 31 54 27 89 414 | 90 32 37 86 1 415 | 7 34 42 61 91 416 | 417 | 98 94 10 72 26 418 | 96 78 69 77 44 419 | 45 5 88 42 73 420 | 74 91 25 22 99 421 | 16 79 60 71 37 422 | 423 | 44 33 34 27 87 424 | 46 89 75 37 4 425 | 71 63 16 35 17 426 | 83 99 28 51 97 427 | 66 86 14 61 9 428 | 429 | 2 54 7 32 79 430 | 33 36 37 35 81 431 | 25 50 84 59 21 432 | 18 16 48 26 15 433 | 94 73 61 67 44 434 | 435 | 18 34 66 57 31 436 | 74 92 71 59 19 437 | 36 94 16 80 24 438 | 35 54 58 87 64 439 | 73 90 41 49 88 440 | 441 | 74 5 57 40 21 442 | 61 11 50 80 66 443 | 35 58 52 10 56 444 | 92 67 82 46 72 445 | 32 18 33 34 55 446 | 447 | 66 79 27 24 46 448 | 98 4 30 80 49 449 | 19 23 68 18 90 450 | 41 91 83 63 77 451 | 84 12 8 10 21 452 | 453 | 23 47 58 5 20 454 | 30 32 61 6 28 455 | 24 11 8 33 10 456 | 52 93 95 0 45 457 | 22 27 3 82 40 458 | 459 | 11 51 47 83 38 460 | 28 85 9 10 48 461 | 80 60 46 55 32 462 | 89 14 90 71 50 463 | 0 65 24 40 19 464 | 465 | 12 2 37 62 93 466 | 78 69 53 43 33 467 | 85 76 26 21 92 468 | 36 54 89 46 91 469 | 29 18 72 9 51 470 | 471 | 82 36 47 95 30 472 | 65 2 98 92 12 473 | 93 73 44 48 6 474 | 31 74 62 27 42 475 | 32 13 11 99 50 476 | 477 | 89 31 94 1 78 478 | 77 24 46 64 26 479 | 11 16 28 30 45 480 | 80 22 5 8 52 481 | 32 38 76 65 90 482 | 483 | 92 96 35 86 51 484 | 47 75 17 87 30 485 | 43 29 55 50 11 486 | 77 99 48 24 20 487 | 37 7 91 23 8 488 | 489 | 26 12 82 95 78 490 | 41 65 80 53 44 491 | 75 43 32 46 84 492 | 63 99 69 45 88 493 | 56 48 87 38 49 494 | 495 | 8 87 21 27 15 496 | 84 44 26 61 82 497 | 10 66 29 95 65 498 | 4 86 38 91 28 499 | 14 49 22 52 54 500 | 501 | 27 43 13 35 33 502 | 20 66 77 70 31 503 | 5 17 94 98 83 504 | 11 22 39 55 75 505 | 53 61 46 38 89 506 | 507 | 84 49 52 32 51 508 | 90 46 97 91 54 509 | 2 42 65 10 25 510 | 80 77 31 81 16 511 | 58 17 15 26 55 512 | 513 | 19 83 57 21 95 514 | 4 29 11 64 0 515 | 17 63 13 27 58 516 | 14 96 43 22 56 517 | 97 84 81 67 94 518 | 519 | 47 49 4 70 65 520 | 60 88 9 77 3 521 | 63 72 33 50 97 522 | 68 84 98 78 89 523 | 10 79 25 24 54 524 | 525 | 81 70 39 73 11 526 | 86 30 38 14 91 527 | 9 18 72 21 24 528 | 54 83 80 78 66 529 | 23 93 36 31 53 530 | 531 | 34 58 18 69 28 532 | 57 70 54 50 64 533 | 35 36 4 56 72 534 | 32 16 45 33 17 535 | 83 60 39 22 47 536 | 537 | 31 73 56 21 63 538 | 66 14 42 45 80 539 | 60 57 47 36 78 540 | 93 75 44 22 11 541 | 68 89 58 88 17 542 | 543 | 74 16 65 13 45 544 | 86 20 6 34 15 545 | 70 46 59 75 57 546 | 28 62 67 71 98 547 | 77 63 25 61 64 548 | 549 | 71 20 42 65 47 550 | 29 80 53 78 99 551 | 70 57 18 45 32 552 | 86 46 35 77 26 553 | 15 91 93 55 67 554 | 555 | 27 16 31 41 42 556 | 77 34 10 90 18 557 | 28 99 44 20 68 558 | 98 82 3 75 62 559 | 88 85 47 17 71 560 | 561 | 31 95 98 60 93 562 | 80 81 23 35 70 563 | 4 57 38 69 76 564 | 18 0 41 86 54 565 | 47 26 90 65 39 566 | 567 | 79 86 59 66 50 568 | 49 64 65 95 6 569 | 90 67 36 32 46 570 | 10 20 25 27 1 571 | 87 21 17 78 13 572 | 573 | 16 8 95 35 43 574 | 14 0 72 89 68 575 | 52 11 12 67 25 576 | 63 64 13 32 15 577 | 53 98 55 81 75 578 | 579 | 51 85 15 91 10 580 | 24 68 80 22 8 581 | 55 18 36 30 66 582 | 27 21 46 63 26 583 | 81 5 14 2 13 584 | 585 | 71 39 19 40 69 586 | 58 70 65 46 78 587 | 98 14 59 94 60 588 | 12 55 68 91 0 589 | 18 35 25 61 86 590 | 591 | 85 74 56 43 44 592 | 98 78 17 95 8 593 | 70 30 66 55 94 594 | 57 62 82 49 77 595 | 61 32 97 88 58 596 | 597 | 23 1 53 65 30 598 | 45 15 9 26 28 599 | 2 21 42 27 12 600 | 84 68 71 19 13 601 | 58 57 35 77 14 602 | -------------------------------------------------------------------------------- /2021/4/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/4 5 | """ 6 | 7 | import sys 8 | from typing import Tuple 9 | 10 | import tensorflow as tf 11 | 12 | 13 | class Bingo(tf.Module): 14 | def __init__(self): 15 | # Assign every board in a TensorArray so we can read/write every board 16 | self._ta = tf.TensorArray(dtype=tf.int64, size=1, dynamic_size=True) 17 | 18 | self._stop = tf.Variable(False, trainable=False) 19 | 20 | self._winner_board = tf.Variable( 21 | tf.zeros((5, 5), dtype=tf.int64), trainable=False 22 | ) 23 | self._last_number = tf.Variable(0, trainable=False, dtype=tf.int64) 24 | 25 | @staticmethod 26 | def is_winner(board: tf.Tensor) -> tf.Tensor: 27 | rows = tf.reduce_sum(board, axis=0) 28 | cols = tf.reduce_sum(board, axis=1) 29 | 30 | return tf.logical_or( 31 | tf.reduce_any(tf.equal(rows, -5)), tf.reduce_any(tf.equal(cols, -5)) 32 | ) 33 | 34 | # @tf.function 35 | def __call__( 36 | self, 37 | extractions: tf.data.Dataset, 38 | boards: tf.data.Dataset, 39 | first_winner: tf.Tensor = tf.constant(True), 40 | ) -> Tuple[tf.Tensor, tf.Tensor]: 41 | # Convert the datasaet to a tensor and assign it to the ta 42 | # use the tensor to get its shape and know the numnber of boards 43 | tensor_boards = tf.convert_to_tensor(list(boards)) # pun intended 44 | tot_boards = tf.shape(tensor_boards)[0] 45 | self._ta = self._ta.unstack(tensor_boards) 46 | 47 | # Remove the number from the board when extracted 48 | # The removal is just the set of the number to -1 49 | # When a row or a column becomes a line of -1s then bingo! 50 | for number in extractions: 51 | if self._stop: 52 | break 53 | for idx in tf.range(tot_boards): 54 | board = self._ta.read(idx) 55 | board = tf.where(tf.equal(number, board), -1, board) 56 | if self.is_winner(board): 57 | self._winner_board.assign(board) 58 | self._last_number.assign(number) 59 | if first_winner: 60 | self._stop.assign(tf.constant(True)) 61 | break 62 | # When searching for the last winner 63 | # we just invalidate every winning board 64 | # by setting all the values to zero 65 | board = tf.zeros_like(board) 66 | self._ta = self._ta.write(idx, board) 67 | return self._winner_board, self._last_number 68 | 69 | 70 | def main(): 71 | """Entrypoint. Suppose the "input" file is in the cwd.""" 72 | 73 | dataset = tf.data.TextLineDataset("input") 74 | 75 | # The first row is a csv line containing the number extracted in sequence 76 | extractions = ( 77 | dataset.take(1) 78 | .map(lambda line: tf.strings.split(line, ",")) 79 | .map(lambda string: tf.strings.to_number(string, out_type=tf.int64)) 80 | .unbatch() 81 | ) 82 | 83 | # All the other rows are the boards, every 5 lines containing an input 84 | # is a board. We can organize the boards as elements of the dataset, a dataset of boards 85 | boards = ( 86 | dataset.skip(1) 87 | .filter(lambda line: tf.greater(tf.strings.length(line), 0)) 88 | .map(tf.strings.split) 89 | .map( 90 | lambda string: tf.strings.to_number(string, out_type=tf.int64) 91 | ) # row with 5 numbers 92 | .batch(5) # board 5 rows, 5 columns 93 | ) 94 | 95 | bingo = Bingo() 96 | winner_board, last_number = bingo(extractions, boards) 97 | 98 | def _score(board, number): 99 | tf.print("Winner board: ", board) 100 | tf.print("Last number: ", number) 101 | 102 | # Sum all unmarked numbers 103 | unmarked_sum = tf.reduce_sum( 104 | tf.gather_nd(board, tf.where(tf.not_equal(board, -1))) 105 | ) 106 | tf.print("Unmarked sum: ", unmarked_sum) 107 | 108 | final_score = unmarked_sum * number 109 | tf.print("Final score: ", final_score) 110 | 111 | _score(winner_board, last_number) 112 | 113 | ## --- Part Two --- 114 | # Figure out the last board that will win 115 | bingo = Bingo() 116 | last_winner_board, last_number = bingo(extractions, boards, tf.constant(False)) 117 | _score(last_winner_board, last_number) 118 | 119 | 120 | if __name__ == "__main__": 121 | sys.exit(main()) 122 | -------------------------------------------------------------------------------- /2021/5/input: -------------------------------------------------------------------------------- 1 | 593,10 -> 593,98 2 | 777,236 -> 964,236 3 | 650,575 -> 476,575 4 | 120,612 -> 715,17 5 | 508,707 -> 508,89 6 | 98,834 -> 751,834 7 | 623,554 -> 623,701 8 | 929,976 -> 62,109 9 | 368,893 -> 330,931 10 | 495,335 -> 40,335 11 | 44,704 -> 423,704 12 | 683,711 -> 683,487 13 | 26,940 -> 833,133 14 | 961,183 -> 454,183 15 | 301,306 -> 301,935 16 | 973,822 -> 398,822 17 | 639,911 -> 515,911 18 | 861,180 -> 184,857 19 | 31,97 -> 857,923 20 | 966,376 -> 966,114 21 | 881,485 -> 881,377 22 | 930,98 -> 110,918 23 | 841,889 -> 841,35 24 | 512,261 -> 880,261 25 | 48,533 -> 48,674 26 | 207,226 -> 52,226 27 | 823,952 -> 177,306 28 | 331,566 -> 423,566 29 | 422,418 -> 422,130 30 | 699,517 -> 699,567 31 | 757,784 -> 241,784 32 | 508,445 -> 560,393 33 | 866,275 -> 435,706 34 | 74,41 -> 74,258 35 | 386,369 -> 334,317 36 | 240,94 -> 240,969 37 | 851,197 -> 577,197 38 | 28,906 -> 741,193 39 | 286,227 -> 286,293 40 | 849,800 -> 849,665 41 | 736,307 -> 336,307 42 | 69,701 -> 494,276 43 | 421,823 -> 96,823 44 | 121,626 -> 121,393 45 | 318,351 -> 194,351 46 | 670,671 -> 439,671 47 | 603,914 -> 603,272 48 | 61,507 -> 61,889 49 | 266,39 -> 157,39 50 | 543,664 -> 869,664 51 | 382,709 -> 884,709 52 | 499,80 -> 548,80 53 | 489,79 -> 878,79 54 | 695,86 -> 644,86 55 | 987,585 -> 987,557 56 | 287,67 -> 551,67 57 | 975,983 -> 35,43 58 | 707,351 -> 232,351 59 | 529,175 -> 852,175 60 | 32,811 -> 604,811 61 | 106,153 -> 815,153 62 | 195,268 -> 509,582 63 | 50,922 -> 312,922 64 | 220,500 -> 872,500 65 | 473,33 -> 569,33 66 | 858,847 -> 162,151 67 | 937,947 -> 26,36 68 | 726,435 -> 402,435 69 | 686,601 -> 474,813 70 | 764,880 -> 84,200 71 | 850,950 -> 850,464 72 | 413,620 -> 413,285 73 | 893,560 -> 229,560 74 | 149,100 -> 149,901 75 | 358,613 -> 243,613 76 | 202,445 -> 202,411 77 | 127,153 -> 513,539 78 | 147,846 -> 53,940 79 | 139,920 -> 679,380 80 | 913,953 -> 913,735 81 | 339,466 -> 339,177 82 | 113,882 -> 647,882 83 | 18,880 -> 134,880 84 | 897,152 -> 897,428 85 | 473,511 -> 636,511 86 | 880,370 -> 358,370 87 | 400,244 -> 721,244 88 | 419,987 -> 120,688 89 | 872,224 -> 481,224 90 | 335,302 -> 730,302 91 | 961,324 -> 961,157 92 | 769,301 -> 959,301 93 | 829,124 -> 144,124 94 | 523,372 -> 985,372 95 | 520,33 -> 520,685 96 | 554,644 -> 808,898 97 | 82,676 -> 870,676 98 | 303,612 -> 303,705 99 | 338,40 -> 338,939 100 | 836,47 -> 72,811 101 | 371,751 -> 575,955 102 | 929,505 -> 929,324 103 | 273,181 -> 275,183 104 | 347,595 -> 347,463 105 | 95,629 -> 95,606 106 | 809,188 -> 126,871 107 | 857,924 -> 145,212 108 | 668,277 -> 668,63 109 | 700,904 -> 700,45 110 | 814,899 -> 22,899 111 | 205,98 -> 714,607 112 | 943,28 -> 40,931 113 | 282,620 -> 773,129 114 | 424,803 -> 285,803 115 | 688,329 -> 299,329 116 | 146,628 -> 34,628 117 | 573,417 -> 164,826 118 | 292,232 -> 412,112 119 | 412,508 -> 145,508 120 | 632,648 -> 632,92 121 | 885,904 -> 885,513 122 | 295,981 -> 132,818 123 | 134,681 -> 41,681 124 | 810,531 -> 959,531 125 | 188,590 -> 188,215 126 | 960,795 -> 189,24 127 | 729,211 -> 729,833 128 | 214,817 -> 845,817 129 | 196,609 -> 584,609 130 | 384,908 -> 384,101 131 | 770,907 -> 770,530 132 | 451,469 -> 451,812 133 | 571,261 -> 834,261 134 | 799,436 -> 799,983 135 | 248,105 -> 248,879 136 | 783,906 -> 783,903 137 | 955,670 -> 790,670 138 | 723,750 -> 723,429 139 | 572,427 -> 546,427 140 | 610,341 -> 527,341 141 | 925,426 -> 816,317 142 | 151,403 -> 151,684 143 | 408,969 -> 408,369 144 | 276,425 -> 276,75 145 | 186,86 -> 186,758 146 | 412,420 -> 412,531 147 | 361,60 -> 976,60 148 | 787,649 -> 667,769 149 | 45,866 -> 91,866 150 | 319,963 -> 51,963 151 | 112,866 -> 112,747 152 | 291,475 -> 504,475 153 | 175,116 -> 357,116 154 | 968,961 -> 968,213 155 | 13,12 -> 987,986 156 | 640,728 -> 767,728 157 | 981,505 -> 246,505 158 | 864,981 -> 128,981 159 | 91,66 -> 931,906 160 | 798,116 -> 91,823 161 | 552,74 -> 88,538 162 | 620,872 -> 232,872 163 | 45,229 -> 658,229 164 | 413,75 -> 413,436 165 | 815,257 -> 815,686 166 | 989,22 -> 36,975 167 | 178,904 -> 233,849 168 | 635,128 -> 635,96 169 | 640,820 -> 640,313 170 | 890,787 -> 167,64 171 | 221,22 -> 826,22 172 | 914,132 -> 60,986 173 | 848,31 -> 392,487 174 | 105,969 -> 858,969 175 | 903,868 -> 143,108 176 | 38,941 -> 621,358 177 | 171,340 -> 14,497 178 | 286,460 -> 81,255 179 | 726,688 -> 857,819 180 | 494,689 -> 510,689 181 | 517,913 -> 598,913 182 | 932,66 -> 932,431 183 | 977,982 -> 18,23 184 | 95,101 -> 95,278 185 | 574,467 -> 349,467 186 | 63,803 -> 63,882 187 | 838,874 -> 255,874 188 | 900,752 -> 181,33 189 | 102,897 -> 989,10 190 | 374,439 -> 374,277 191 | 513,504 -> 513,885 192 | 814,932 -> 814,407 193 | 824,656 -> 959,521 194 | 415,570 -> 616,570 195 | 577,880 -> 577,181 196 | 287,524 -> 986,524 197 | 955,665 -> 323,665 198 | 556,365 -> 263,658 199 | 154,226 -> 886,226 200 | 803,750 -> 866,750 201 | 558,725 -> 558,395 202 | 941,115 -> 941,150 203 | 180,410 -> 180,874 204 | 458,753 -> 112,753 205 | 199,253 -> 363,253 206 | 423,650 -> 22,650 207 | 892,851 -> 279,238 208 | 611,109 -> 611,198 209 | 983,344 -> 339,988 210 | 299,47 -> 299,934 211 | 435,652 -> 700,387 212 | 186,775 -> 677,284 213 | 136,576 -> 136,368 214 | 818,744 -> 305,744 215 | 767,171 -> 767,431 216 | 930,842 -> 259,171 217 | 342,831 -> 342,601 218 | 193,672 -> 46,525 219 | 925,164 -> 528,164 220 | 725,92 -> 617,200 221 | 67,729 -> 67,739 222 | 547,153 -> 547,245 223 | 763,434 -> 763,509 224 | 314,888 -> 357,888 225 | 72,645 -> 491,645 226 | 92,67 -> 240,67 227 | 827,936 -> 788,897 228 | 852,378 -> 77,378 229 | 448,337 -> 668,337 230 | 846,739 -> 499,739 231 | 465,691 -> 315,541 232 | 716,163 -> 18,861 233 | 78,965 -> 983,60 234 | 114,952 -> 820,246 235 | 950,351 -> 419,882 236 | 266,36 -> 266,482 237 | 773,841 -> 773,66 238 | 742,198 -> 742,46 239 | 417,512 -> 304,625 240 | 900,277 -> 900,338 241 | 983,431 -> 473,941 242 | 986,282 -> 734,30 243 | 742,19 -> 769,19 244 | 952,320 -> 948,324 245 | 92,590 -> 548,590 246 | 107,39 -> 107,696 247 | 603,749 -> 603,26 248 | 55,282 -> 888,282 249 | 670,848 -> 985,533 250 | 981,982 -> 92,93 251 | 147,428 -> 649,930 252 | 773,737 -> 821,785 253 | 791,576 -> 791,852 254 | 327,672 -> 530,469 255 | 847,122 -> 381,122 256 | 419,493 -> 498,572 257 | 879,842 -> 879,239 258 | 267,717 -> 267,869 259 | 142,449 -> 174,417 260 | 342,718 -> 342,397 261 | 603,207 -> 314,207 262 | 612,648 -> 735,771 263 | 37,10 -> 971,944 264 | 891,716 -> 891,86 265 | 252,217 -> 662,627 266 | 185,165 -> 941,921 267 | 854,717 -> 676,717 268 | 158,791 -> 336,791 269 | 762,226 -> 98,890 270 | 73,189 -> 92,189 271 | 649,511 -> 253,115 272 | 719,456 -> 514,251 273 | 605,286 -> 325,286 274 | 454,609 -> 454,489 275 | 374,541 -> 783,541 276 | 599,177 -> 94,682 277 | 600,384 -> 32,384 278 | 810,933 -> 39,162 279 | 780,871 -> 409,871 280 | 24,639 -> 24,316 281 | 454,80 -> 454,95 282 | 556,541 -> 907,541 283 | 627,295 -> 750,295 284 | 245,71 -> 214,102 285 | 725,445 -> 614,445 286 | 779,538 -> 779,390 287 | 746,667 -> 351,272 288 | 117,776 -> 117,660 289 | 498,495 -> 88,905 290 | 697,721 -> 697,919 291 | 580,314 -> 580,166 292 | 22,656 -> 641,37 293 | 413,433 -> 44,802 294 | 182,305 -> 805,928 295 | 739,277 -> 739,499 296 | 172,210 -> 172,259 297 | 894,576 -> 894,322 298 | 265,263 -> 265,437 299 | 430,228 -> 780,578 300 | 464,531 -> 798,531 301 | 713,63 -> 668,63 302 | 918,831 -> 256,169 303 | 414,375 -> 467,375 304 | 440,32 -> 391,32 305 | 439,806 -> 955,806 306 | 335,820 -> 335,279 307 | 727,458 -> 422,458 308 | 312,274 -> 619,581 309 | 136,724 -> 538,322 310 | 589,680 -> 589,850 311 | 335,648 -> 232,545 312 | 499,216 -> 405,216 313 | 942,710 -> 942,455 314 | 969,556 -> 721,556 315 | 756,552 -> 756,902 316 | 98,870 -> 445,870 317 | 476,833 -> 476,269 318 | 820,127 -> 407,127 319 | 337,519 -> 714,519 320 | 756,95 -> 11,840 321 | 317,339 -> 317,286 322 | 353,86 -> 43,86 323 | 93,950 -> 938,105 324 | 705,509 -> 705,319 325 | 244,879 -> 721,402 326 | 434,794 -> 711,517 327 | 272,381 -> 431,381 328 | 652,104 -> 652,587 329 | 850,866 -> 34,50 330 | 645,902 -> 79,336 331 | 701,39 -> 701,295 332 | 492,793 -> 95,396 333 | 352,554 -> 395,554 334 | 123,405 -> 322,206 335 | 941,745 -> 716,520 336 | 450,512 -> 569,631 337 | 42,25 -> 817,800 338 | 909,387 -> 909,863 339 | 919,934 -> 919,546 340 | 439,881 -> 569,881 341 | 167,866 -> 167,669 342 | 242,264 -> 242,694 343 | 981,786 -> 228,33 344 | 452,434 -> 452,660 345 | 22,26 -> 22,29 346 | 26,155 -> 677,806 347 | 801,627 -> 313,627 348 | 657,135 -> 657,270 349 | 872,875 -> 440,443 350 | 636,248 -> 636,338 351 | 776,51 -> 93,51 352 | 498,600 -> 894,600 353 | 263,984 -> 263,807 354 | 416,390 -> 899,873 355 | 269,137 -> 976,137 356 | 752,12 -> 752,617 357 | 55,925 -> 548,925 358 | 856,551 -> 771,551 359 | 653,93 -> 653,587 360 | 403,286 -> 403,417 361 | 895,706 -> 221,32 362 | 139,822 -> 139,928 363 | 696,194 -> 696,143 364 | 270,678 -> 710,678 365 | 879,353 -> 879,360 366 | 949,712 -> 752,712 367 | 665,661 -> 817,661 368 | 462,952 -> 980,434 369 | 692,766 -> 692,478 370 | 157,117 -> 144,117 371 | 438,701 -> 408,701 372 | 401,703 -> 401,724 373 | 876,831 -> 108,63 374 | 749,892 -> 832,892 375 | 455,124 -> 455,776 376 | 551,222 -> 551,372 377 | 533,80 -> 726,80 378 | 342,740 -> 56,740 379 | 793,370 -> 34,370 380 | 949,614 -> 949,623 381 | 610,287 -> 610,760 382 | 978,834 -> 85,834 383 | 644,894 -> 644,341 384 | 35,887 -> 176,887 385 | 168,958 -> 964,162 386 | 341,886 -> 341,470 387 | 417,845 -> 417,702 388 | 338,347 -> 304,313 389 | 651,10 -> 72,10 390 | 853,160 -> 853,85 391 | 381,568 -> 436,623 392 | 794,437 -> 250,437 393 | 861,72 -> 206,72 394 | 807,813 -> 807,827 395 | 820,502 -> 820,329 396 | 547,508 -> 547,773 397 | 160,129 -> 160,175 398 | 756,468 -> 756,80 399 | 442,661 -> 405,661 400 | 304,817 -> 304,765 401 | 99,42 -> 957,900 402 | 212,110 -> 854,752 403 | 44,620 -> 661,620 404 | 212,311 -> 784,883 405 | 329,671 -> 329,908 406 | 86,359 -> 553,826 407 | 257,799 -> 934,122 408 | 409,663 -> 409,367 409 | 528,623 -> 593,688 410 | 957,525 -> 544,938 411 | 846,766 -> 113,33 412 | 176,680 -> 176,102 413 | 167,287 -> 167,929 414 | 932,870 -> 834,968 415 | 86,774 -> 49,774 416 | 745,231 -> 70,906 417 | 435,760 -> 138,463 418 | 776,810 -> 625,810 419 | 928,930 -> 76,78 420 | 602,24 -> 602,688 421 | 394,424 -> 65,424 422 | 946,966 -> 93,113 423 | 494,39 -> 951,39 424 | 607,699 -> 832,699 425 | 13,403 -> 391,403 426 | 726,475 -> 726,29 427 | 828,625 -> 836,617 428 | 396,770 -> 167,770 429 | 28,546 -> 374,200 430 | 56,113 -> 837,894 431 | 290,589 -> 740,139 432 | 930,805 -> 296,171 433 | 646,895 -> 49,895 434 | 111,15 -> 111,497 435 | 11,274 -> 570,833 436 | 257,624 -> 603,624 437 | 63,844 -> 666,844 438 | 846,661 -> 846,464 439 | 431,72 -> 431,674 440 | 726,674 -> 726,40 441 | 286,660 -> 286,909 442 | 847,222 -> 847,861 443 | 325,896 -> 325,416 444 | 793,953 -> 365,953 445 | 987,956 -> 62,31 446 | 845,853 -> 363,371 447 | 79,782 -> 506,782 448 | 424,21 -> 424,369 449 | 938,162 -> 177,923 450 | 86,193 -> 799,906 451 | 320,164 -> 320,654 452 | 840,306 -> 840,711 453 | 852,736 -> 852,690 454 | 876,966 -> 143,233 455 | 787,926 -> 38,177 456 | 374,112 -> 340,112 457 | 132,541 -> 740,541 458 | 29,28 -> 968,967 459 | 916,212 -> 170,958 460 | 371,553 -> 521,403 461 | 88,796 -> 870,796 462 | 656,367 -> 71,367 463 | 785,166 -> 785,427 464 | 320,30 -> 320,549 465 | 909,527 -> 816,620 466 | 832,965 -> 302,965 467 | 672,259 -> 80,259 468 | 578,513 -> 578,243 469 | 975,561 -> 537,123 470 | 135,330 -> 188,330 471 | 501,695 -> 501,573 472 | 717,230 -> 878,230 473 | 854,501 -> 27,501 474 | 705,885 -> 950,885 475 | 704,338 -> 704,630 476 | 477,485 -> 864,485 477 | 901,42 -> 305,638 478 | 660,540 -> 660,546 479 | 555,79 -> 190,79 480 | 226,126 -> 800,700 481 | 575,908 -> 944,908 482 | 94,478 -> 94,746 483 | 461,425 -> 929,893 484 | 861,429 -> 451,19 485 | 832,825 -> 179,172 486 | 186,133 -> 298,133 487 | 684,270 -> 558,270 488 | 786,872 -> 125,872 489 | 649,178 -> 649,595 490 | 893,738 -> 412,257 491 | 760,854 -> 901,713 492 | 16,914 -> 866,64 493 | 935,928 -> 266,259 494 | 323,229 -> 32,229 495 | 608,828 -> 608,49 496 | 715,892 -> 74,251 497 | 787,187 -> 787,903 498 | 405,793 -> 405,183 499 | 232,704 -> 232,389 500 | 130,706 -> 130,657 501 | -------------------------------------------------------------------------------- /2021/5/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/5 5 | """ 6 | 7 | import sys 8 | from typing import Tuple 9 | 10 | import tensorflow as tf 11 | 12 | 13 | class Grid(tf.Module): 14 | """Grid module. Draws over the grid the various lines and when called 15 | returns the number of intersection, depending on the puzzle part required. 16 | """ 17 | 18 | def __init__(self, dataset): 19 | super().__init__() 20 | bbox_w = tf.reduce_max(list(dataset.map(lambda p1, p2: (p1[0], p2[0])))) + 1 21 | bbox_h = tf.reduce_max(list(dataset.map(lambda p1, p2: (p1[1], p2[1])))) + 1 22 | self._grid = tf.Variable( 23 | tf.zeros((bbox_w, bbox_h), dtype=tf.int64), trainable=False 24 | ) 25 | self._dataset = dataset 26 | 27 | @staticmethod 28 | @tf.function 29 | def interpolate(p1: tf.Tensor, p2: tf.Tensor): 30 | """Linear interpolation from p1 to p2 in the discrete 2D grid. 31 | Args: 32 | p1: Tensor with values (x, y) 33 | p2: Tensor with values (x, y) 34 | Returns: 35 | The linear interpolation in the discrete 2D grid. 36 | """ 37 | # +1 handles the case of p1 - p2 == 1 38 | norm = tf.norm(tf.cast(p1 - p2, tf.float32), ord=tf.experimental.numpy.inf) + 1 39 | return tf.cast( 40 | tf.math.ceil(tf.linspace(p1, p2, tf.cast(norm, tf.int64))), tf.int64 41 | ) 42 | 43 | @tf.function 44 | def __call__(self, part_one: tf.Tensor) -> tf.Tensor: 45 | """Given the required puzzle part, changes the line drawing on the grid 46 | and the intersection couunt. 47 | Args: 48 | part_one: boolean tensor. When true, only consider straight lines and 49 | a threshold of 1. When false, consider straight lines and diagonal 50 | lines. 51 | Returns 52 | the number of intersections 53 | """ 54 | self._grid.assign(tf.zeros_like(self._grid)) 55 | 56 | for start, end in self._dataset: 57 | # Discrete interpolation between start and end 58 | # part 1 requires to consider only straight lines 59 | # (x1 = x2 or y1 = y2) 60 | # but I guess (hope) doing the generic discrete interpolation 61 | # will simplify part 2 (no idea, just a guess) 62 | float_start = tf.cast(start, tf.float32) 63 | float_end = tf.cast(end, tf.float32) 64 | direction = float_start - float_end 65 | angle = ( 66 | tf.math.atan2(direction[1], direction[0]) 67 | * 180 68 | / tf.experimental.numpy.pi 69 | ) 70 | if tf.less(angle, 0): 71 | angle = 360 + angle 72 | if tf.logical_or( 73 | tf.logical_and( 74 | tf.logical_not(part_one), 75 | tf.logical_and( 76 | tf.logical_not(tf.equal(tf.math.mod(angle, 90), 0)), 77 | tf.equal(tf.math.mod(angle, 45), 0), 78 | ), 79 | ), 80 | tf.logical_or( 81 | tf.equal(start[0], end[0]), 82 | tf.equal(start[1], end[1]), 83 | ), 84 | ): 85 | pixels = self.interpolate(start, end) 86 | self._grid.assign( 87 | tf.tensor_scatter_nd_add( 88 | self._grid, pixels, tf.ones(tf.shape(pixels)[0], dtype=tf.int64) 89 | ) 90 | ) 91 | 92 | # tf.print(tf.transpose(grid, perm=(1, 0)), summarize=-1) 93 | threshold = tf.constant(1, tf.int64) 94 | mask = tf.greater(self._grid, threshold) 95 | return tf.reduce_sum(tf.cast(mask, tf.int64)) 96 | 97 | 98 | def main(): 99 | """Entrypoint. Suppose the "input" file is in the cwd.""" 100 | 101 | def _get_segment( 102 | line: tf.Tensor, 103 | ) -> Tuple[tf.Tensor, tf.Tensor]: 104 | points = tf.strings.split(line, " -> ") 105 | p1 = tf.strings.split(points[0], ",") 106 | p2 = tf.strings.split(points[1], ",") 107 | 108 | x1 = tf.strings.to_number(p1[0], tf.int64) 109 | y1 = tf.strings.to_number(p1[1], tf.int64) 110 | x2 = tf.strings.to_number(p2[0], tf.int64) 111 | y2 = tf.strings.to_number(p2[1], tf.int64) 112 | return tf.convert_to_tensor((x1, y1)), tf.convert_to_tensor((x2, y2)) 113 | 114 | dataset = tf.data.TextLineDataset("input").map(_get_segment) 115 | grid = Grid(dataset) 116 | 117 | tf.print("# overlaps (part one): ", grid(tf.constant(True))) 118 | tf.print("# overlaps (part two): ", grid(tf.constant(False))) 119 | 120 | 121 | if __name__ == "__main__": 122 | sys.exit(main()) 123 | -------------------------------------------------------------------------------- /2021/6/input: -------------------------------------------------------------------------------- 1 | 2,5,2,3,5,3,5,5,4,2,1,5,5,5,5,1,2,5,1,1,1,1,1,5,5,1,5,4,3,3,1,2,4,2,4,5,4,5,5,5,4,4,1,3,5,1,2,2,4,2,1,1,2,1,1,4,2,1,2,1,2,1,3,3,3,5,1,1,1,3,4,4,1,3,1,5,5,1,5,3,1,5,2,2,2,2,1,1,1,1,3,3,3,1,4,3,5,3,5,5,1,4,4,2,5,1,5,5,4,5,5,1,5,4,4,1,3,4,1,2,3,2,5,1,3,1,5,5,2,2,2,1,3,3,1,1,1,4,2,5,1,2,4,4,2,5,1,1,3,5,4,2,1,2,5,4,1,5,5,2,4,3,5,2,4,1,4,3,5,5,3,1,5,1,3,5,1,1,1,4,2,4,4,1,1,1,1,1,3,4,5,2,3,4,5,1,4,1,2,3,4,2,1,4,4,2,1,5,3,4,1,1,2,2,1,5,5,2,5,1,4,4,2,1,3,1,5,5,1,4,2,2,1,1,1,5,1,3,4,1,3,3,5,3,5,5,3,1,4,4,1,1,1,3,3,2,3,1,1,1,5,4,2,5,3,5,4,4,5,2,3,2,5,2,1,1,1,2,1,5,3,5,1,4,1,2,1,5,3,5,2,1,3,1,2,4,5,3,4,3 2 | -------------------------------------------------------------------------------- /2021/6/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/6 5 | """ 6 | 7 | import sys 8 | 9 | import tensorflow as tf 10 | 11 | 12 | @tf.function 13 | def evolve(initial_state: tf.Tensor, days: tf.Tensor): 14 | ta = tf.TensorArray(tf.int32, size=tf.size(initial_state), dynamic_size=True) 15 | ta = ta.unstack(initial_state) 16 | 17 | for _ in tf.range(1, days + 1): 18 | yesterday_state = ta.stack() 19 | index_map = tf.equal(yesterday_state, 0) 20 | if tf.reduce_any(index_map): 21 | indices = tf.where(index_map) 22 | transition_state = tf.tensor_scatter_nd_update( 23 | yesterday_state - 1, 24 | indices, 25 | tf.cast(tf.ones(tf.shape(indices)[0]) * 6, tf.int32), 26 | ) 27 | ta = ta.unstack(transition_state) 28 | new_born = tf.reduce_sum(tf.cast(index_map, tf.int32)) 29 | for n in tf.range(new_born): 30 | ta = ta.write(tf.size(transition_state, tf.int32) + n, 8) 31 | else: 32 | transition_state = yesterday_state - 1 33 | ta = ta.unstack(transition_state) 34 | today_state = ta.stack() 35 | # tf.print("after ", day, "days: ", today_state, summarize=-1) 36 | return today_state 37 | 38 | 39 | # Returning the state is useless, we are interested only in the # of elements 40 | # the last state contains. 41 | # Hence we can try to reason about this number instead of the state. 42 | # Given an initial state of 1 2 3 4 1 -> tot = 5 43 | # After a time step, we are in the state 0 1 2 3 0 -> tot 5 44 | # when we have zeroes, it means will span eights on the next time step. 45 | # After a time step we are in the state 6 0 1 2 6 8 8 46 | # If we only keep track of the number of fish in a certain state, we can compress all the information 47 | # Something like 48 | # 1 -> 10 #there are 10 fish at status 1. Nothing happens to the lenght 49 | # 2 -> 1 # there's one fish in status 2. Nothing happens to the lenght 50 | # k -> v # there's v fish in status k. Depends on on k. 51 | # If k = 0, it means there are v fish in status 0, hence the lenght (number of fish) will increment by v 52 | # on the next time step, while the v becomes 0 for k=0 and the number of fishes in status 8 increases by k. 53 | 54 | 55 | class TableCounter(tf.Module): 56 | def __init__(self): 57 | super().__init__() 58 | 59 | self._zero = tf.constant(0, tf.int64) 60 | self._one = tf.constant(1, tf.int64) 61 | self._six = tf.constant(6, tf.int64) 62 | self._eight = tf.constant(8, tf.int64) 63 | self._nine = tf.constant(9, tf.int64) 64 | 65 | @tf.function 66 | def count(self, initial_state: tf.Tensor, days: tf.Tensor): 67 | # BUG. There's ne key int32 with value int64 :< 68 | # Must use both int64 69 | # NOTE NOTE NOTE!! 70 | # Like TensorArrays, the hashmap gives the error: 71 | # Cannot infer argument `num` from shape 72 | # If declared in the init (self._hasmap) and then used 73 | # The definition should be here mandatory for this to work. 74 | 75 | hashmap = tf.lookup.experimental.MutableHashTable( 76 | tf.int64, tf.int64, self._zero 77 | ) 78 | 79 | keys, _, count = tf.unique_with_counts(initial_state, tf.int64) 80 | hashmap.insert(keys, count) 81 | 82 | for _ in tf.range(self._one, days + self._one): 83 | # NOTE: This has no defined shape if the map is not defined in this method!! 84 | yesterday_state = hashmap.lookup(tf.range(self._nine)) 85 | if tf.greater(yesterday_state[0], self._zero): 86 | # handled values in keys [0, 5], [7, 8] 87 | today_state = tf.tensor_scatter_nd_update( 88 | yesterday_state, 89 | tf.concat( 90 | [ 91 | tf.reshape(tf.range(self._eight), (8, 1)), 92 | [[self._eight]], 93 | ], 94 | axis=0, 95 | ), 96 | tf.concat( 97 | [ 98 | hashmap.lookup(tf.range(self._one, self._nine)), 99 | [yesterday_state[0]], 100 | ], 101 | axis=0, 102 | ), 103 | ) 104 | # Add the number of zeros as additional number of six 105 | today_state = tf.tensor_scatter_nd_add( 106 | today_state, [[self._six]], [yesterday_state[0]] 107 | ) 108 | else: 109 | # shift the the left all the map 110 | # put a 0 in 8 position 111 | 112 | updates = tf.concat( 113 | [ 114 | tf.unstack( 115 | tf.gather(yesterday_state, tf.range(self._one, self._nine)) 116 | ), 117 | [self._zero], 118 | ], 119 | axis=0, 120 | ) 121 | indices = tf.reshape(tf.range(self._nine), (9, 1)) 122 | today_state = tf.tensor_scatter_nd_update( 123 | yesterday_state, indices, updates 124 | ) 125 | 126 | hashmap.insert(tf.range(self._nine), today_state) 127 | return tf.reduce_sum(hashmap.lookup(tf.range(self._nine))) 128 | 129 | 130 | def main(): 131 | """Entrypoint. Suppose the "input" file is in the cwd.""" 132 | 133 | initial_state = next( 134 | iter( 135 | tf.data.TextLineDataset("input") 136 | .map(lambda string: tf.strings.split(string, ",")) 137 | .map(lambda numbers: tf.strings.to_number(numbers, out_type=tf.int64)) 138 | .take(1) 139 | ) 140 | ) 141 | 142 | # days = tf.constant(80, tf.int64) 143 | # last_state = evolve(initial_state, days) 144 | # tf.print("# fish after ", days, " days: ", tf.size(last_state)) 145 | 146 | days = tf.constant(256, tf.int64) 147 | counter = TableCounter() 148 | tf.print("# fish after ", days, " days: ", counter.count(initial_state, days)) 149 | 150 | 151 | if __name__ == "__main__": 152 | sys.exit(main()) 153 | -------------------------------------------------------------------------------- /2021/7/input: -------------------------------------------------------------------------------- 1 | 1101,1,29,67,1102,0,1,65,1008,65,35,66,1005,66,28,1,67,65,20,4,0,1001,65,1,65,1106,0,8,99,35,67,101,99,105,32,110,39,101,115,116,32,112,97,115,32,117,110,101,32,105,110,116,99,111,100,101,32,112,114,111,103,114,97,109,10,848,1174,380,1195,277,353,425,292,225,1521,631,76,692,329,1530,445,1625,561,161,1760,88,129,89,866,133,353,372,1456,888,115,347,291,246,8,57,17,869,1230,1224,681,586,1553,183,508,760,1200,812,634,578,126,147,684,1037,822,217,521,136,438,456,26,258,9,395,1,1398,34,13,40,493,871,154,606,716,649,491,416,293,1,107,21,317,1156,194,179,693,1591,894,845,10,18,1314,11,730,408,365,0,516,192,96,1438,1521,787,503,454,84,478,19,26,109,287,657,216,148,155,816,384,288,1121,39,263,488,348,110,1219,321,1101,51,5,95,147,806,17,207,81,139,686,23,231,1154,36,410,1025,2,327,172,307,232,420,88,129,1507,1028,852,467,46,81,16,64,989,558,919,1301,612,278,306,1500,95,507,1034,272,526,68,41,870,37,78,434,459,268,375,1427,1039,1039,1077,1068,445,482,106,1823,218,76,33,131,934,112,72,404,118,848,99,10,816,527,125,279,200,10,248,243,416,242,639,401,66,110,364,243,550,1289,55,30,1246,72,46,251,1225,264,765,137,94,626,252,18,481,250,55,10,284,247,90,277,34,176,118,1178,554,761,3,938,140,890,109,811,760,179,1084,998,16,338,550,120,190,220,74,846,212,30,1098,455,1110,1662,71,1275,609,47,1239,1050,93,169,383,564,323,134,310,208,685,255,219,369,556,272,1604,562,600,493,178,337,33,949,775,588,288,498,40,1056,483,796,8,1588,463,122,120,26,215,786,525,129,259,346,21,35,247,1564,1094,1013,85,54,193,889,590,908,122,158,363,646,1301,533,73,767,405,89,358,936,737,78,982,163,867,373,1460,492,151,560,371,647,110,1779,802,374,147,1279,252,299,244,568,1014,685,912,942,767,3,76,207,296,40,181,312,192,684,988,490,31,410,928,47,1345,943,1013,705,675,543,446,84,869,1173,393,348,99,25,21,20,132,295,4,1273,800,182,140,593,964,55,167,15,219,269,35,848,324,474,189,4,305,62,7,1145,13,975,55,880,43,368,82,1083,6,969,844,1388,185,818,114,641,151,1006,220,599,143,170,36,631,891,531,871,13,77,1261,340,911,159,258,213,1245,222,200,946,875,301,9,381,69,1152,901,490,207,38,104,103,423,311,1532,536,314,73,314,29,844,141,1191,169,136,42,143,163,277,22,1655,1063,45,774,103,83,1253,121,1473,834,245,143,211,1252,1368,97,4,1188,1102,359,134,298,225,292,101,832,426,1204,652,468,110,529,34,384,663,534,80,1,68,117,419,1321,1729,38,1609,416,554,1047,130,145,267,1382,0,237,32,17,237,803,21,888,185,108,79,1,192,286,1167,86,71,464,122,375,0,186,927,525,1127,442,675,373,53,404,624,65,1157,107,687,98,165,891,1186,513,727,46,148,275,516,1412,17,684,113,246,137,1246,1357,143,307,307,656,1030,198,33,955,1765,314,79,5,385,248,402,75,12,76,325,175,1198,258,38,912,795,296,1931,111,72,1619,1563,8,327,867,293,515,1647,1266,128,44,9,431,300,72,501,1181,147,1330,1446,490,297,436,274,512,107,185,57,112,2,121,472,505,860,1650,411,23,328,119,64,0,1684,249,106,167,55,225,501,36,81,1225,270,186,10,477,783,39,319,1151,264,788,42,521,282,257,721,442,250,642,255,63,1015,241,939,86,997,569,330,189,129,547,1424,446,285,8,288,599,372,56,274,245,3,41,943,353,67,294,32,656,879,918,94,314,767,957,1554,854,610,590,403,139,377,129,930,341,678,577,375,809,490,39,153,3,48,133,54,225,1261,677,267,30,28,115,642,1733,12,297,428,155,812,635,16,634,223,823,261,662,652,71,138,103,220,544,211,313,1298,512,559,1243,1319,427,78,765,343,212,557,275,1,551,234,697,1815,1005,275,179,595,1,1658,204,345,794,36,102,912,657,194,8,2,526,350,399,390,1166,469,324,1497,371,759,621,800,895,235,314,579,863,278,54,239,1275,79,1728,934,1145,1008,748,196,1056,339,569,629,47,1437,416,758,1000,287,592,1057,921,170,273,144,133,0,320,216,216,118,229,4,681,410,1801,34,485,769,839,47,950,1694,1222,1199,411,598,1210,7,1644,245,44,428,49,653,7,1037,310,754,32,476,253,124,809,66,539,434,1142,136,4,344,306,628,1332,15,1523,149,191,498,1480,498,385,562,457,199,201,487,608,172,20,584,7,17,342,167,93,60,589,445,926 2 | -------------------------------------------------------------------------------- /2021/7/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/7 5 | """ 6 | 7 | import sys 8 | 9 | import tensorflow as tf 10 | 11 | 12 | def main(): 13 | """Entrypoint. Suppose the "input" file is in the cwd.""" 14 | 15 | dataset = ( 16 | tf.data.TextLineDataset("input") 17 | .map(lambda string: tf.strings.split(string, ",")) 18 | .map(lambda string: tf.strings.to_number(string, out_type=tf.int64)) 19 | .unbatch() 20 | ) 21 | 22 | dataset_tensor = tf.convert_to_tensor(list(dataset)) 23 | y, idx, count = tf.unique_with_counts(dataset_tensor, tf.int64) 24 | 25 | max_elements = tf.reduce_max(count) 26 | most_frequent_position = y[idx[tf.argmax(count)]] 27 | 28 | tf.print(max_elements, " in position ", most_frequent_position) 29 | 30 | neighborhood_size = tf.constant( 31 | tf.shape(dataset_tensor, tf.int64)[0] // tf.constant(2, tf.int64), tf.int64 32 | ) 33 | # for every x in the nehighborhhod of the max_element 34 | # find the sum {p_i - x} < sum {p_i - y} for all x != y 35 | 36 | min_neigh_val = tf.clip_by_value( 37 | most_frequent_position - neighborhood_size, 38 | tf.constant(0, tf.int64), 39 | most_frequent_position, 40 | ) 41 | 42 | max_val = tf.reduce_max(dataset_tensor) + 1 43 | max_neigh_val = tf.clip_by_value( 44 | most_frequent_position + neighborhood_size, 45 | most_frequent_position, 46 | max_val, 47 | ) 48 | 49 | min_cost, found_position = tf.cast(-1, tf.uint64), -1 50 | for x in tf.range(min_neigh_val, max_neigh_val): 51 | cost = tf.cast(tf.reduce_sum(tf.abs(dataset_tensor - x)), tf.uint64) 52 | if tf.less(cost, min_cost): 53 | min_cost = cost 54 | found_position = x 55 | tf.print("(part one) min_cost: ", min_cost, " in position: ", found_position) 56 | 57 | # -- Part 2 -- 58 | min_cost, found_position = tf.cast(-1, tf.uint64), -1 59 | for x in tf.range(min_neigh_val, max_neigh_val): 60 | diff = tf.abs(dataset_tensor - x) 61 | lists = tf.ragged.range(tf.ones(tf.shape(diff)[0], dtype=tf.int64), diff + 1) 62 | cost = tf.cast(tf.reduce_sum(lists), tf.uint64) 63 | if tf.less(cost, min_cost): 64 | min_cost = cost 65 | found_position = x 66 | tf.print("(part two) min_cost: ", min_cost, " in position: ", found_position) 67 | 68 | 69 | if __name__ == "__main__": 70 | sys.exit(main()) 71 | -------------------------------------------------------------------------------- /2021/9/input: -------------------------------------------------------------------------------- 1 | 6587893456954341398543210234567899875421012578932123459998764323569999996432345899989359878998654345 2 | 5476894579873210197654328757678910976532323457891012398997653212998798789521256789879498764679795456 3 | 5345679989864321349865479869799929987643498598932923987989543209876697654320145679965987653567989767 4 | 3235678999965443567976567878999898997654587678949899876878994319965498765421236789764398532479879878 5 | 0124789339897764678987878989998767898986798989298798765756989498754349899535456899879987691298568989 6 | 1235693219799865889698989990987856789997899592139679984445678987643237998646567890998698989987457899 7 | 2348789398678976789569999921976745678998985431095459873234569998765145689987899932349579768976567978 8 | 4567899976599989895478999899875435899359876545984398765145678999843234567898976543998498657697798964 9 | 7688999875487899976789298767986546789999988769873239876234567892954345678909997659876597646579899543 10 | 9799998764325578987899109654397967999879999898764346984345678921986456789319889799989986534466989542 11 | 9989879875413467998998923993239878998758999969975899876476789210987567895498779989992395421355678954 12 | 9876569974324678959987899889101989987646489459896789976567894321297678976987667879893987510134569999 13 | 8765498765456899769996798767992493499832367999789893987678965456998989989876546758789997921235678988 14 | 9874349876567998998965689656789569989753458987678962398989879599879799992965434345678986437367889876 15 | 8765456987678987987654678945679678979768667896569541239899998989965678901977321237899876545459993998 16 | 9876567898789996796543489939798989764979778965499432345789997879974589919898934348901997668567892109 17 | 9987998999896789899755569898987897653989899012378945497899965467893469898789895459919898877679954919 18 | 9898919789965678989876798767346789762399943234569767989999854359932599787698789667899769989789899897 19 | 8759105679654569878998899856235999321239854355878979879998765567893987654589678978998953599896798786 20 | 6543214598979698968899999843123568910198765467989998767899976779964599643279567899687892498945697655 21 | 7954323497898966556789987654013467892499976589099987756789987889995798732153456789576931987432398543 22 | 9899435986577942347896598732123458999987899693239865347991298999989987643012348894345899876541239432 23 | 8798949765456891238965439653234567898976798789399876456789459999879876532145457896876789987832396543 24 | 7686799887347920145976549878347698987755679899989986567897667898965987674234668999987895498743987965 25 | 6595789998967891247897656989498999876544587999879987678998778987864398765545979998998989359654599876 26 | 5434678999298932346789767896569098987623456796567898989539899876543219876656897987889874298765678987 27 | 4323576789399543456789978998789197898212345679335679999421921988754301998767986576569763189896789198 28 | 3212345789987676577899899239899976789853458789124679878999890199865492999878975432478952076999893239 29 | 4523456897898787689998789129998765698767569991012399865788799345987989899989896543578943145698994345 30 | 5635567896789898799029569098899987999878978962193987654647678976799878799998798654567899239987989469 31 | 8756678935978999898998678987799999899989989643989996543234567897898968678987698785678998998795878978 32 | 9897889123467893967899789876678999768393498959878989654346778998987654567896539876789457789654769899 33 | 6998993234679932456999899865589996543212347898765578995487899989998785678974323987992345678943456797 34 | 4349965345789531345799987643499987664324456987654456789598999878999876789865434598943567889752589895 35 | 6459876459999910239898798101989198976535967899532345678949988767899989899989545999759878997643569994 36 | 9878987767899891398989543219879239987649898998721235789039879857999994978998769899998989498957698989 37 | 7999999878986789997679654598768945798959799987632345892129967234678943567899898798767894329878987978 38 | 6989987989345679896598995987654999899898679876543566943298654123487892389989987657656789210989876569 39 | 5978996591236798789456789998743878998786567987664689954679942012456799459876799543234568921298975497 40 | 9769889432445987679345699865432459987675478998785799895799842123867898967995698432123556932967964326 41 | 8958778956759876531256789987543579876564367899896895789998763435798957899854597543012345899859873214 42 | 7643568997969765430347893987654678976444259899987924556799878976899645798643498632124556789645995403 43 | 8532456789879876321346932398767889987320145789699012345679989987896536899854987653246787896539876912 44 | 7421345678989985442457893599898994598321237894556923966889999998998745698765698764345678954321989893 45 | 6530496989498996584668999989999213469644345892349899897899999879799656789876789875456799987553598789 46 | 7699989892397987676789998979992101278955456789498756789999898765688979897987894989667976599679699667 47 | 8987979789986599987899887968989232349866667899976546689998765544567899956598923499878987698798986545 48 | 9976867678965434598999765457678945458978878999895634578987654333456989645459012398989898789987654436 49 | 8765454567897665679998654346567896567989989998764323689999765212345678932399923987698789892498742123 50 | 6874343789799779789876543213458998678999799987653212387921976924566789543989899876554678943985431012 51 | 5432102345678989999965432102349999799987678987654501276892989896677899959876798765432567899876543243 52 | 7643412456789999989878676413456899989876469898876612345999898789998998899995439876753458976987654354 53 | 7655324577897899878989654329567988764514345789987843456798766599889987689984323987764967894398766456 54 | 8786734678976799767998765567978976543101234599998954567899954349778996569876212398876878999989877767 55 | 9897645689565698757899887679999098654332345698999875678999843234567897678965104569997889798976998978 56 | 1998756789434569943535998789899129786445689997999989899998785123456998789874315678998996567895329989 57 | 2349967896513479892124569896788939896576898786789996921989632013667899892965326679019965456996210197 58 | 3959879974323569769023456975767945987687987675678985439876543124589976920976434567997894346789321256 59 | 9899989965434679658934567894556899998999876554567976545987654565697895439876565879876543235678932345 60 | 9768999899565798747895879913445678989678965423456897656898765677796796549987676789998432126799543556 61 | 6756897788979899656789989101236789676569874312345798968999879788954789678999789899987643235689699789 62 | 4346796567898998767899894212345896545456965101276789879998999899432688989989892968998754345678988999 63 | 5657893456987899878998765523466789634349876514567892989987656999543567899876901259789895466789877889 64 | 8789932678976789989579865434567898722298765423698921291296545678957678998765432345678976989898656878 65 | 9897643789865678996469876558778969810129895434789210199986534589998989349876553499899989999986543566 66 | 8987654898754567902345998669899654323345987567994391987673323467899995456987694989999999999986432345 67 | 7898765998843236899458998789998969834589998989879989876542012456945896678998989878998788898976521289 68 | 6569976987654345678967999898587897655678999898765779765432135578935789789459976568986577667898432478 69 | 9439898998765469989998987967476789767789988766434568976543234689124678992349865457997402456789543567 70 | 8998789659989598898989876653345678978999876654323456897654345691012567891998754349876212398897654678 71 | 7987679743497697767878965422234567899999875421012698998765676789323456789876542298765435678998865789 72 | 6592578932398986656467897910123456979899986534123589239876797896454567997984321059876568789999976890 73 | 9421349890199875433356789891954569456798986546254678998987898989765679545975472347998678899897989921 74 | 8210445789987654321245898799895698969897698755378999467998949678976889439986567656999789998796597532 75 | 7421234894599943210134987688789987898987549875489312359879234567987899998797678797899899989689459843 76 | 6439345943239874321345798454698976767895434976678954598762123456899999897698989989989979876568998754 77 | 6598997894999765633466954343567895456795323987889895689953234567999898789439899879876569965457998765 78 | 7987889999889876545789875212398954346789412398998789998764348789898775678919789767996498754345689896 79 | 9895678987676987986791995101457893244898901999975698789765459898799654567997637657984349843234569987 80 | 8654567896565698997892983212346789123457899899894987699986567899654523456789521749873249654445678999 81 | 7643298789434569898939865423567891012346798798795699789297978999543312978898430123965398979966989645 82 | 8964345679323458789920977674578942199967989689589999892198989998662106899986521239877987897897895434 83 | 9876798789012345699891988795689543987899876567477899943019799987543245679199542345998976545789999321 84 | 2989899893123556798789599876897659896789765435356789942165678998765358989098993556799865434668998910 85 | 1094945943238767987699432987999798765679976321234567893234569679878767899987689698899987323457897892 86 | 9943237895445898958567941098998999654789897542348788954345689456989878999865567989989865412368956789 87 | 8832146789556999543479892129987678932396789656569899995456791349897989987654349878878954323779768999 88 | 7654258997697976432358789234987467890145698767678998789767890998756399899785498765469895965689979889 89 | 8765767989989999751234689345986358943235699898989998689878999876543268799996789874399769898789898776 90 | 9979979878879987642956796569875467894346789969799987578989998987654156678998992986987656789899787545 91 | 9898998765968899799899898679989878976459894356678965467899887598721034567899321987896545678998676434 92 | 9767897654656789988778999789398989997568999234579874356789756459842125788965439999999767899876545323 93 | 8656789843234899876567899892127898987678988946698763244578942398763236999877898999899978975975432102 94 | 6545678952123678975457989942016567898789767897987653123569321987654349989989987998799989764986548726 95 | 2135678953234567894345678932123459989894356789796542012459432798789498978990196754678997653297659645 96 | 6545699975545678943234569643234569878943234598679765423498943679899987867891985653589987654398799856 97 | 7678789987656989434126678964365798569892125679569876536567894567998766756789873542357898765989923987 98 | 9989890199788999321018999765456987456789287894379987789879976898987654347898762101237789889875210198 99 | 0198931369899998732129678976569876345994399965456998897989197899599875456999854345345679998764321239 100 | 1987642456910987654334567897679985456895679876597899956791098965431986567899965456786789459875643467 101 | -------------------------------------------------------------------------------- /2021/9/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | https://adventofcode.com/2021/day/9 5 | """ 6 | 7 | import sys 8 | from typing import Tuple 9 | 10 | import tensorflow as tf 11 | 12 | 13 | class Finder(tf.Module): 14 | def __init__(self, dataset: tf.data.Dataset): 15 | 16 | super().__init__() 17 | 18 | self._count = tf.Variable(0) 19 | self._dataset = dataset 20 | self._image = tf.convert_to_tensor(list(self._dataset)) 21 | self._shape = tf.shape(self._image) 22 | self._max_value = tf.reduce_max(self._image) 23 | self._prev = tf.Variable(0) 24 | 25 | if tf.not_equal(tf.math.mod(self._shape[0], 3), 0): 26 | pad_h = self._shape[0] - (self._shape[0] // 3 + 3) 27 | else: 28 | pad_h = 0 29 | if tf.not_equal(tf.math.mod(self._shape[1], 3), 0): 30 | pad_w = self._shape[1] - (self._shape[1] // 3 + 3) 31 | else: 32 | pad_w = 0 33 | 34 | self._padded_image = tf.pad( 35 | self._image, 36 | [[0, pad_w], [1, pad_h]], 37 | mode="CONSTANT", 38 | constant_values=self._max_value, 39 | ) 40 | 41 | self._padded_shape = tf.shape(self._padded_image) 42 | self._norm = tf.Variable(tf.zeros(self._padded_shape, dtype=tf.int32) - 1) 43 | self._stop = tf.Variable(False) 44 | self._neigh_mask = tf.constant([(-1, 0), (0, -1), (1, 0), (0, 1)]) 45 | self._queue = tf.queue.FIFOQueue(-1, [tf.int32]) 46 | 47 | @tf.function 48 | def _four_neigh( 49 | self, grid: tf.Tensor, center: tf.Tensor 50 | ) -> Tuple[tf.Tensor, tf.Tensor]: 51 | y, x = center[0], center[1] 52 | 53 | if tf.logical_and(tf.less(y, 1), tf.less(x, 1)): 54 | mask = self._neigh_mask[2:] 55 | elif tf.less(y, 1): 56 | mask = self._neigh_mask[1:] 57 | elif tf.less(x, 1): 58 | mask = tf.concat([[self._neigh_mask[0]], self._neigh_mask[2:]], axis=0) 59 | else: 60 | mask = self._neigh_mask 61 | 62 | coords = center + mask 63 | 64 | neighborhood = tf.gather_nd(grid, coords) 65 | return neighborhood, coords 66 | 67 | @tf.function 68 | def low_points(self) -> Tuple[tf.Tensor, tf.Tensor]: 69 | self._count.assign(0) 70 | ta = tf.TensorArray(tf.int32, size=0, dynamic_size=True) 71 | 72 | for y in tf.range(self._padded_shape[0] - 1): 73 | for x in tf.range(self._padded_shape[1] - 1): 74 | center = tf.convert_to_tensor([y, x]) 75 | neighborhood, _ = self._four_neigh(self._padded_image, center) 76 | extended_neighborhood = tf.concat( 77 | [tf.expand_dims(self._padded_image[y, x], axis=0), neighborhood], 78 | axis=0, 79 | ) 80 | 81 | minval = tf.reduce_min(extended_neighborhood) 82 | if tf.logical_and( 83 | tf.reduce_any(tf.not_equal(extended_neighborhood, minval)), 84 | tf.equal(minval, self._padded_image[y, x]), 85 | ): 86 | self._count.assign_add(1 + self._padded_image[y, x]) 87 | 88 | ta = ta.write(ta.size(), center) 89 | 90 | return ta.stack(), self._count 91 | 92 | @tf.function 93 | def basins(self) -> tf.Tensor: 94 | batch = tf.reshape( 95 | self._padded_image, (1, self._padded_shape[0], self._padded_shape[1], 1) 96 | ) 97 | gradients = tf.squeeze(tf.image.image_gradients(batch), axis=1) 98 | 99 | y_grad, x_grad = gradients[0], gradients[1] 100 | 101 | # Gradienti magnitude is constant where there are no changes 102 | # Increases or stray constants from the low point (seed) 103 | norm = tf.cast(tf.norm(tf.cast(y_grad + x_grad, tf.float32), axis=-1), tf.int32) 104 | # Set the basin thresholds to -1 (where the 9s are) 105 | norm = tf.where(tf.equal(self._padded_image, 9), -1, norm) 106 | self._norm.assign(norm) 107 | 108 | # For every se_posd, "propagate" in a flood fill-fashion. 109 | # The -1s are the thresholds 110 | seeds = self.low_points()[0] 111 | ta = tf.TensorArray(tf.int32, size=3) 112 | ta.unstack([0, 0, 0]) 113 | for idx in tf.range(2, tf.shape(seeds)[0] + 2): 114 | # Fill with idx (watershed like: different colors) 115 | seed = seeds[idx - 2] 116 | y = seed[0] 117 | x = seed[1] 118 | 119 | # Set the seed position to the label 120 | self._norm.scatter_nd_update([[y, x]], [-idx]) 121 | 122 | # Find the 4 neighborhood, and get the values != -1 123 | neighborhood, neigh_coords = self._four_neigh(self._norm, seed) 124 | update_coords = tf.gather_nd( 125 | neigh_coords, tf.where(tf.not_equal(neighborhood, -1)) 126 | ) 127 | if tf.greater(tf.size(update_coords), 0): 128 | self._queue.enqueue_many(update_coords) 129 | while tf.greater(self._queue.size(), 0): 130 | pixel = self._queue.dequeue() 131 | # Update this pixel to the label value 132 | py, px = pixel[0], pixel[1] 133 | self._norm.scatter_nd_update([[py, px]], [-idx]) 134 | px_neigh_vals, px_neigh_coords = self._four_neigh(self._norm, pixel) 135 | px_update_coords = tf.gather_nd( 136 | px_neigh_coords, 137 | tf.where( 138 | tf.logical_and( 139 | tf.not_equal(px_neigh_vals, -1), 140 | tf.not_equal(px_neigh_vals, -idx), 141 | ) 142 | ), 143 | ) 144 | if tf.greater(tf.size(px_update_coords), 0): 145 | self._queue.enqueue_many(px_update_coords) 146 | 147 | basin_size = tf.reduce_sum(tf.cast(tf.equal(self._norm, -idx), 3)) 148 | if tf.greater(basin_size, ta.read(0)): 149 | first = basin_size 150 | second = ta.read(0) 151 | third = ta.read(1) 152 | ta = ta.unstack([first, second, third]) 153 | elif tf.greater(basin_size, ta.read(1)): 154 | first = ta.read(0) 155 | second = basin_size 156 | third = ta.read(1) 157 | ta = ta.unstack([first, second, third]) 158 | elif tf.greater(basin_size, ta.read(2)): 159 | ta = ta.write(2, basin_size) 160 | 161 | # tf.print(self._norm, summarize=-1) 162 | return tf.reduce_prod(ta.stack()) 163 | 164 | 165 | def main(): 166 | """Entrypoint. Suppose the "input" file is in the cwd.""" 167 | 168 | dataset = ( 169 | tf.data.TextLineDataset("input") 170 | .map(tf.strings.bytes_split) 171 | .map(lambda string: tf.strings.to_number(string, out_type=tf.int32)) 172 | ) 173 | 174 | finder = Finder(dataset) 175 | tf.print("Part one: ", finder.low_points()[1]) 176 | tf.print("Part two: ", finder.basins()) 177 | 178 | 179 | if __name__ == "__main__": 180 | sys.exit(main()) 181 | -------------------------------------------------------------------------------- /2021/requirements.in: -------------------------------------------------------------------------------- 1 | tensorflow 2 | -------------------------------------------------------------------------------- /2021/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile requirements.in 6 | # 7 | absl-py==1.0.0 8 | # via 9 | # tensorboard 10 | # tensorflow 11 | astunparse==1.6.3 12 | # via tensorflow 13 | cachetools==4.2.4 14 | # via google-auth 15 | certifi==2021.10.8 16 | # via requests 17 | charset-normalizer==2.0.9 18 | # via requests 19 | flatbuffers==2.0 20 | # via tensorflow 21 | gast==0.4.0 22 | # via tensorflow 23 | google-auth-oauthlib==0.4.6 24 | # via tensorboard 25 | google-auth==2.3.3 26 | # via 27 | # google-auth-oauthlib 28 | # tensorboard 29 | google-pasta==0.2.0 30 | # via tensorflow 31 | grpcio==1.42.0 32 | # via 33 | # tensorboard 34 | # tensorflow 35 | h5py==3.6.0 36 | # via tensorflow 37 | idna==3.3 38 | # via requests 39 | importlib-metadata==4.8.2 40 | # via markdown 41 | keras-preprocessing==1.1.2 42 | # via tensorflow 43 | keras==2.7.0 44 | # via tensorflow 45 | libclang==12.0.0 46 | # via tensorflow 47 | markdown==3.3.6 48 | # via tensorboard 49 | numpy==1.21.4 50 | # via 51 | # h5py 52 | # keras-preprocessing 53 | # opt-einsum 54 | # tensorboard 55 | # tensorflow 56 | oauthlib==3.1.1 57 | # via requests-oauthlib 58 | opt-einsum==3.3.0 59 | # via tensorflow 60 | protobuf==3.19.1 61 | # via 62 | # tensorboard 63 | # tensorflow 64 | pyasn1-modules==0.2.8 65 | # via google-auth 66 | pyasn1==0.4.8 67 | # via 68 | # pyasn1-modules 69 | # rsa 70 | requests-oauthlib==1.3.0 71 | # via google-auth-oauthlib 72 | requests==2.26.0 73 | # via 74 | # requests-oauthlib 75 | # tensorboard 76 | rsa==4.8 77 | # via google-auth 78 | six==1.16.0 79 | # via 80 | # absl-py 81 | # astunparse 82 | # google-auth 83 | # google-pasta 84 | # grpcio 85 | # keras-preprocessing 86 | # tensorflow 87 | tensorboard-data-server==0.6.1 88 | # via tensorboard 89 | tensorboard-plugin-wit==1.8.0 90 | # via tensorboard 91 | tensorboard==2.7.0 92 | # via tensorflow 93 | tensorflow-estimator==2.7.0 94 | # via tensorflow 95 | tensorflow-io-gcs-filesystem==0.22.0 96 | # via tensorflow 97 | tensorflow==2.7.0 98 | # via -r requirements.in 99 | termcolor==1.1.0 100 | # via tensorflow 101 | typing-extensions==4.0.1 102 | # via tensorflow 103 | urllib3==1.26.7 104 | # via requests 105 | werkzeug==2.0.2 106 | # via tensorboard 107 | wheel==0.37.0 108 | # via 109 | # astunparse 110 | # tensorboard 111 | # tensorflow 112 | wrapt==1.13.3 113 | # via tensorflow 114 | zipp==3.6.0 115 | # via importlib-metadata 116 | 117 | # The following packages are considered to be unsafe in a requirements file: 118 | # setuptools 119 | -------------------------------------------------------------------------------- /2022/1/fake: -------------------------------------------------------------------------------- 1 | 1000 2 | 2000 3 | 3000 4 | 5 | 4000 6 | 7 | 5000 8 | 6000 9 | 10 | 7000 11 | 8000 12 | 9000 13 | 14 | 10000 15 | -------------------------------------------------------------------------------- /2022/1/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/1 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | dataset = dataset.concatenate(tf.data.Dataset.from_tensors([""])) 20 | initial_state = tf.constant(0, dtype=tf.int64) 21 | 22 | @tf.function 23 | def scan_func(state, line): 24 | if tf.strings.length(line) > 0: 25 | new_state = state + tf.strings.to_number(line, tf.int64) 26 | output_element = tf.constant(-1, tf.int64) 27 | else: 28 | new_state = tf.constant(0, tf.int64) 29 | output_element = state 30 | return new_state, output_element 31 | 32 | dataset = dataset.scan(initial_state, scan_func) 33 | dataset = dataset.filter(lambda x: x > 0) 34 | tensor = tf.convert_to_tensor(list(dataset.as_numpy_iterator())) 35 | 36 | max_calories = tf.reduce_max(tensor) 37 | elf_id = tf.argmax(tensor) + 1 38 | tf.print("## top elf ##") 39 | tf.print("max calories: ", max_calories) 40 | tf.print("elf id: ", elf_id) 41 | 42 | tf.print("## top 3 elves ##") 43 | top_calories, top_indices = tf.math.top_k(tensor, k=3) 44 | tf.print("calories: ", top_calories) 45 | tf.print("indices: ", top_indices + 1) 46 | tf.print("sum top calories: ", tf.reduce_sum(top_calories)) 47 | return 0 48 | 49 | 50 | if __name__ == "__main__": 51 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 52 | sys.exit(main(INPUT)) 53 | -------------------------------------------------------------------------------- /2022/10/fake: -------------------------------------------------------------------------------- 1 | addx 15 2 | addx -11 3 | addx 6 4 | addx -3 5 | addx 5 6 | addx -1 7 | addx -8 8 | addx 13 9 | addx 4 10 | noop 11 | addx -1 12 | addx 5 13 | addx -1 14 | addx 5 15 | addx -1 16 | addx 5 17 | addx -1 18 | addx 5 19 | addx -1 20 | addx -35 21 | addx 1 22 | addx 24 23 | addx -19 24 | addx 1 25 | addx 16 26 | addx -11 27 | noop 28 | noop 29 | addx 21 30 | addx -15 31 | noop 32 | noop 33 | addx -3 34 | addx 9 35 | addx 1 36 | addx -3 37 | addx 8 38 | addx 1 39 | addx 5 40 | noop 41 | noop 42 | noop 43 | noop 44 | noop 45 | addx -36 46 | noop 47 | addx 1 48 | addx 7 49 | noop 50 | noop 51 | noop 52 | addx 2 53 | addx 6 54 | noop 55 | noop 56 | noop 57 | noop 58 | noop 59 | addx 1 60 | noop 61 | noop 62 | addx 7 63 | addx 1 64 | noop 65 | addx -13 66 | addx 13 67 | addx 7 68 | noop 69 | addx 1 70 | addx -33 71 | noop 72 | noop 73 | noop 74 | addx 2 75 | noop 76 | noop 77 | noop 78 | addx 8 79 | noop 80 | addx -1 81 | addx 2 82 | addx 1 83 | noop 84 | addx 17 85 | addx -9 86 | addx 1 87 | addx 1 88 | addx -3 89 | addx 11 90 | noop 91 | noop 92 | addx 1 93 | noop 94 | addx 1 95 | noop 96 | noop 97 | addx -13 98 | addx -19 99 | addx 1 100 | addx 3 101 | addx 26 102 | addx -30 103 | addx 12 104 | addx -1 105 | addx 3 106 | addx 1 107 | noop 108 | noop 109 | noop 110 | addx -9 111 | addx 18 112 | addx 1 113 | addx 2 114 | noop 115 | noop 116 | addx 9 117 | noop 118 | noop 119 | noop 120 | addx -1 121 | addx 2 122 | addx -37 123 | addx 1 124 | addx 3 125 | noop 126 | addx 15 127 | addx -21 128 | addx 22 129 | addx -6 130 | addx 1 131 | noop 132 | addx 2 133 | addx 1 134 | noop 135 | addx -10 136 | noop 137 | noop 138 | addx 20 139 | addx 1 140 | addx 2 141 | addx 2 142 | addx -6 143 | addx -11 144 | noop 145 | noop 146 | noop 147 | -------------------------------------------------------------------------------- /2022/10/fake1: -------------------------------------------------------------------------------- 1 | noop 2 | addx 3 3 | addx -5 4 | -------------------------------------------------------------------------------- /2022/10/input: -------------------------------------------------------------------------------- 1 | noop 2 | noop 3 | noop 4 | addx 6 5 | addx -1 6 | noop 7 | addx 5 8 | noop 9 | noop 10 | addx -12 11 | addx 19 12 | addx -1 13 | noop 14 | addx 4 15 | addx -11 16 | addx 16 17 | noop 18 | noop 19 | addx 5 20 | addx 3 21 | addx -2 22 | addx 4 23 | noop 24 | noop 25 | noop 26 | addx -37 27 | noop 28 | addx 3 29 | addx 2 30 | addx 5 31 | addx 2 32 | addx 10 33 | addx -9 34 | noop 35 | addx 1 36 | addx 4 37 | addx 2 38 | noop 39 | addx 3 40 | addx 2 41 | addx 5 42 | addx 2 43 | addx 3 44 | addx -2 45 | addx 2 46 | addx 5 47 | addx -40 48 | addx 25 49 | addx -22 50 | addx 2 51 | addx 5 52 | addx 2 53 | addx 3 54 | addx -2 55 | noop 56 | addx 23 57 | addx -18 58 | addx 2 59 | noop 60 | noop 61 | addx 7 62 | noop 63 | noop 64 | addx 5 65 | noop 66 | noop 67 | noop 68 | addx 1 69 | addx 2 70 | addx 5 71 | addx -40 72 | addx 3 73 | addx 8 74 | addx -4 75 | addx 1 76 | addx 4 77 | noop 78 | noop 79 | noop 80 | addx -8 81 | noop 82 | addx 16 83 | addx 2 84 | addx 4 85 | addx 1 86 | noop 87 | addx -17 88 | addx 18 89 | addx 2 90 | addx 5 91 | addx 2 92 | addx 1 93 | addx -11 94 | addx -27 95 | addx 17 96 | addx -10 97 | addx 3 98 | addx -2 99 | addx 2 100 | addx 7 101 | noop 102 | addx -2 103 | noop 104 | addx 3 105 | addx 2 106 | noop 107 | addx 3 108 | addx 2 109 | noop 110 | addx 3 111 | addx 2 112 | addx 5 113 | addx 2 114 | addx -5 115 | addx -2 116 | addx -30 117 | addx 14 118 | addx -7 119 | addx 22 120 | addx -21 121 | addx 2 122 | addx 6 123 | addx 2 124 | addx -1 125 | noop 126 | addx 8 127 | addx -3 128 | noop 129 | addx 5 130 | addx 1 131 | addx 4 132 | noop 133 | addx 3 134 | addx -2 135 | addx 2 136 | addx -11 137 | noop 138 | noop 139 | noop 140 | -------------------------------------------------------------------------------- /2022/10/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/10 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | cycle = tf.Variable(0, dtype=tf.int32) 19 | X = tf.Variable(1, dtype=tf.int32) 20 | 21 | lut = tf.lookup.StaticHashTable( 22 | tf.lookup.KeyValueTensorInitializer( 23 | tf.constant(["noop", "addx"]), tf.constant([0, 1]) 24 | ), 25 | default_value=-1, 26 | ) 27 | 28 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 29 | 30 | dataset = dataset.map(lambda line: tf.strings.split(line, " ")) 31 | 32 | @tf.function 33 | def opval(pair): 34 | if tf.equal(tf.shape(pair)[0], 1): 35 | return pair[0], tf.constant(0, tf.int32) 36 | 37 | return pair[0], tf.strings.to_number(pair[1], tf.int32) 38 | 39 | dataset = dataset.map(opval) 40 | 41 | noop_id = lut.lookup(tf.constant(["noop"]))[0] 42 | noop = tf.stack((noop_id, 0), axis=0) 43 | invalid = tf.constant((-1, -1)) 44 | 45 | @tf.function 46 | def prepend_noop(op, val): 47 | if tf.equal(op, "noop"): 48 | return tf.stack([noop, invalid], axis=0) 49 | 50 | return tf.stack( 51 | [ 52 | noop, 53 | tf.stack((lut.lookup(tf.expand_dims(op, axis=0))[0], val), axis=0), 54 | ], 55 | axis=0, 56 | ) 57 | 58 | dataset = ( 59 | dataset.map(prepend_noop) 60 | .unbatch() 61 | .filter(lambda op_val: tf.not_equal(op_val[0], -1)) # remove invalid 62 | .map(lambda op_val: (op_val[0], op_val[1])) 63 | ) 64 | # now every element in the dataset is a clock cycle 65 | 66 | prev_x = tf.Variable(X) 67 | 68 | def clock(op, val): 69 | prev_x.assign(X) 70 | if tf.equal(op, noop_id): 71 | pass 72 | else: # addx 73 | X.assign_add(val) 74 | 75 | cycle.assign_add(1) 76 | 77 | if tf.reduce_any([tf.equal(cycle, value) for value in range(20, 221, 40)]): 78 | return [cycle, prev_x, prev_x * cycle] 79 | return [cycle, prev_x, -1] 80 | 81 | strenghts_dataset = dataset.map(clock).filter( 82 | lambda c, x, strenght: tf.not_equal(strenght, -1) 83 | ) 84 | 85 | strenghts = tf.convert_to_tensor((list(strenghts_dataset.as_numpy_iterator()))) 86 | 87 | sumsix = tf.reduce_sum(strenghts[:, -1]) 88 | tf.print("Sum of six signal strenght: ", sumsix) 89 | 90 | crt = tf.Variable(tf.zeros((6, 40, 1), tf.string)) 91 | 92 | # Reset status 93 | cycle.assign(0) 94 | X.assign(1) 95 | 96 | row = tf.Variable(0, dtype=tf.int32) 97 | 98 | def clock2(op, val): 99 | prev_x.assign(X) 100 | if tf.equal(op, noop_id): 101 | pass 102 | else: # addx 103 | X.assign_add(val) 104 | 105 | modcycle = tf.math.mod(cycle, 40) 106 | if tf.reduce_any( 107 | [ 108 | tf.equal(modcycle, prev_x), 109 | tf.equal(modcycle, prev_x - 1), 110 | tf.equal(modcycle, prev_x + 1), 111 | ] 112 | ): 113 | crt.assign( 114 | tf.tensor_scatter_nd_update( 115 | crt, [[row, tf.math.mod(cycle, 40)]], [["#"]] 116 | ) 117 | ) 118 | else: 119 | crt.assign( 120 | tf.tensor_scatter_nd_update( 121 | crt, [[row, tf.math.mod(cycle, 40)]], [["."]] 122 | ) 123 | ) 124 | 125 | cycle.assign_add(1) 126 | 127 | if tf.equal(tf.math.mod(cycle, 40), 0): 128 | row.assign_add(1) 129 | return "" 130 | 131 | list(dataset.map(clock2).as_numpy_iterator()) 132 | 133 | tf.print(tf.squeeze(crt), summarize=-1) 134 | 135 | return 0 136 | 137 | 138 | if __name__ == "__main__": 139 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 140 | sys.exit(main(INPUT)) 141 | -------------------------------------------------------------------------------- /2022/11/fake: -------------------------------------------------------------------------------- 1 | Monkey 0: 2 | Starting items: 79, 98 3 | Operation: new = old * 19 4 | Test: divisible by 23 5 | If true: throw to monkey 2 6 | If false: throw to monkey 3 7 | 8 | Monkey 1: 9 | Starting items: 54, 65, 75, 74 10 | Operation: new = old + 6 11 | Test: divisible by 19 12 | If true: throw to monkey 2 13 | If false: throw to monkey 0 14 | 15 | Monkey 2: 16 | Starting items: 79, 60, 97 17 | Operation: new = old * old 18 | Test: divisible by 13 19 | If true: throw to monkey 1 20 | If false: throw to monkey 3 21 | 22 | Monkey 3: 23 | Starting items: 74 24 | Operation: new = old + 3 25 | Test: divisible by 17 26 | If true: throw to monkey 0 27 | If false: throw to monkey 1 28 | -------------------------------------------------------------------------------- /2022/11/input: -------------------------------------------------------------------------------- 1 | Monkey 0: 2 | Starting items: 66, 59, 64, 51 3 | Operation: new = old * 3 4 | Test: divisible by 2 5 | If true: throw to monkey 1 6 | If false: throw to monkey 4 7 | 8 | Monkey 1: 9 | Starting items: 67, 61 10 | Operation: new = old * 19 11 | Test: divisible by 7 12 | If true: throw to monkey 3 13 | If false: throw to monkey 5 14 | 15 | Monkey 2: 16 | Starting items: 86, 93, 80, 70, 71, 81, 56 17 | Operation: new = old + 2 18 | Test: divisible by 11 19 | If true: throw to monkey 4 20 | If false: throw to monkey 0 21 | 22 | Monkey 3: 23 | Starting items: 94 24 | Operation: new = old * old 25 | Test: divisible by 19 26 | If true: throw to monkey 7 27 | If false: throw to monkey 6 28 | 29 | Monkey 4: 30 | Starting items: 71, 92, 64 31 | Operation: new = old + 8 32 | Test: divisible by 3 33 | If true: throw to monkey 5 34 | If false: throw to monkey 1 35 | 36 | Monkey 5: 37 | Starting items: 58, 81, 92, 75, 56 38 | Operation: new = old + 6 39 | Test: divisible by 5 40 | If true: throw to monkey 3 41 | If false: throw to monkey 6 42 | 43 | Monkey 6: 44 | Starting items: 82, 98, 77, 94, 86, 81 45 | Operation: new = old + 7 46 | Test: divisible by 17 47 | If true: throw to monkey 7 48 | If false: throw to monkey 2 49 | 50 | Monkey 7: 51 | Starting items: 54, 95, 70, 93, 88, 93, 63, 50 52 | Operation: new = old + 4 53 | Test: divisible by 13 54 | If true: throw to monkey 2 55 | If false: throw to monkey 0 56 | -------------------------------------------------------------------------------- /2022/11/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/11 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | dataset = dataset.concatenate(tf.data.Dataset.from_tensors([""])) 20 | 21 | monkey = tf.Variable(["", "", "", "", "", ""], dtype=tf.string) 22 | monkey_id = tf.Variable(-1) 23 | pos = tf.Variable(0) 24 | 25 | initial_state = tf.constant(["", "", "", "", "", ""]) 26 | 27 | def init(old_state, line): 28 | 29 | if tf.equal(line, ""): 30 | monkey.assign(old_state, use_locking=True) 31 | pos.assign(0) 32 | return initial_state, True 33 | 34 | if tf.strings.regex_full_match(line, r"^Monkey \d*:$"): 35 | items = tf.strings.split(tf.strings.split([line], " ")[0][1], ":")[0] 36 | updates = [items] 37 | elif tf.equal(pos, 1): 38 | items = tf.strings.strip(tf.strings.split([line], ":")[0][1]) 39 | updates = [items] 40 | elif tf.equal(pos, 2): 41 | op = tf.strings.strip(tf.strings.split([line], "="))[0][1] 42 | updates = [op] 43 | elif tf.equal(pos, 3): 44 | divisible_by = tf.strings.strip(tf.strings.split([line], " "))[0][-1] 45 | updates = [divisible_by] 46 | else: # if tf.reduce_any([tf.equal(pos, 4), tf.equal(pos, 5)]): 47 | monkey_dest = tf.strings.strip(tf.strings.split([line], " "))[0][-1] 48 | updates = [monkey_dest] 49 | 50 | indices = tf.reshape(pos, (1, 1)) 51 | new_state = tf.tensor_scatter_nd_update(old_state, indices, updates) 52 | pos.assign_add(1) 53 | 54 | return new_state, False 55 | 56 | dataset = dataset.scan(initial_state, init) 57 | 58 | monkey_count = tf.Variable(0) 59 | for monkey_ready in dataset: 60 | if monkey_ready: 61 | # tf.print(monkey) 62 | monkey.assign(tf.zeros_like(monkey)) 63 | monkey_count.assign_add(1) 64 | 65 | inspected_count = tf.Variable(tf.zeros((monkey_count), tf.int64)) 66 | part = tf.Variable(1) 67 | 68 | @tf.function 69 | def apply_operation(worry_level, op): 70 | op = tf.strings.split([op], " ")[0] # lhs, op, rhs 71 | ret = tf.constant(0, tf.int64) 72 | # lhs always = "old" 73 | if tf.strings.regex_full_match(op[2], r"^\d*$"): 74 | val = tf.strings.to_number(op[2], tf.int64) 75 | else: 76 | val = worry_level 77 | if tf.equal(op[1], "+"): 78 | ret = worry_level + val 79 | if tf.equal(op[1], "*"): 80 | ret = worry_level * val 81 | 82 | return ret 83 | 84 | @tf.function 85 | def monkey_play(rounds): 86 | items = tf.TensorArray(tf.int64, size=1, dynamic_size=True) 87 | operation = tf.TensorArray(tf.string, size=1, dynamic_size=True) 88 | divisible_test = tf.TensorArray(tf.int64, size=1, dynamic_size=True) 89 | throw_if_true = tf.TensorArray(tf.int32, size=1, dynamic_size=True) 90 | throw_if_false = tf.TensorArray(tf.int32, size=1, dynamic_size=True) 91 | 92 | for monkey_ready in dataset: 93 | if monkey_ready: 94 | idx = tf.strings.to_number(monkey[0], tf.int32) 95 | items = items.write( 96 | idx, 97 | tf.strings.to_number(tf.strings.split(monkey[1], ","), tf.int64), 98 | ) 99 | operation = operation.write(idx, monkey[2]) 100 | divisible_test = divisible_test.write( 101 | idx, tf.strings.to_number(monkey[3], tf.int64) 102 | ) 103 | throw_if_true = throw_if_true.write( 104 | idx, tf.strings.to_number(monkey[4], tf.int32) 105 | ) 106 | throw_if_false = throw_if_false.write( 107 | idx, tf.strings.to_number(monkey[5], tf.int32) 108 | ) 109 | 110 | if tf.equal(part, 1): 111 | divisor = tf.constant(3, tf.int64) 112 | else: 113 | divisor = tf.reduce_prod(divisible_test.stack()) 114 | 115 | for r in tf.range(rounds): 116 | # Now items contains all the starting items for every monkey 117 | # Let's play 118 | for m in tf.range(monkey_count): 119 | m_items = items.read(m) 120 | op = operation.read(m) 121 | test = divisible_test.read(m) 122 | 123 | # tf.print("Monkey ", m, ":") 124 | for i in tf.range(tf.shape(m_items)[0]): 125 | # tf.print( 126 | # " Monkey inspects an item with a worry level of ", m_items[i] 127 | # ) 128 | worry_level = apply_operation(m_items[i], op) 129 | # tf.print( 130 | # " Worry level is processed accoring to: ", 131 | # op, 132 | # " becoming: ", 133 | # worry_level, 134 | # ) 135 | if tf.equal(part, 1): 136 | worry_level //= divisor 137 | # tf.print( 138 | # " Monkey gets bored with item. Worry level is divided by 3 to ", 139 | # worry_level, 140 | # ) 141 | else: 142 | worry_level = tf.math.mod(worry_level, divisor) 143 | 144 | if tf.equal(tf.math.mod(worry_level, test), 0): 145 | dest = throw_if_true.read(m) 146 | else: 147 | dest = throw_if_false.read(m) 148 | 149 | # tf.print("dest items before: ", items.read(dest)) 150 | 151 | items = items.write( 152 | dest, 153 | tf.concat( 154 | [items.read(dest), tf.expand_dims(worry_level, axis=0)], 155 | axis=0, 156 | ), 157 | ) 158 | # tf.print("dest items: ", items.read(dest)) 159 | 160 | update = tf.tensor_scatter_nd_add( 161 | inspected_count, 162 | [[tf.cast(m, tf.int64)]], 163 | [tf.constant(1, tf.int64)], 164 | ) 165 | inspected_count.assign(update) 166 | 167 | items = items.write(m, []) 168 | 169 | # tf.print("after: ", items.concat(), summarize=-1) 170 | 171 | monkey_play(20) 172 | top_values, _ = tf.math.top_k(inspected_count, k=2) 173 | monkey_business = tf.reduce_prod(top_values) 174 | tf.print("Part 1: ", monkey_business) 175 | 176 | inspected_count.assign(tf.zeros_like(inspected_count)) 177 | part.assign(2) 178 | monkey_play(10000) 179 | top_values, _ = tf.math.top_k(inspected_count, k=2) 180 | monkey_business = tf.reduce_prod(top_values) 181 | tf.print("Part 2: ", monkey_business) 182 | 183 | return 0 184 | 185 | 186 | if __name__ == "__main__": 187 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 188 | sys.exit(main(INPUT)) 189 | -------------------------------------------------------------------------------- /2022/12/fake: -------------------------------------------------------------------------------- 1 | Sabqponm 2 | abcryxxl 3 | accszExk 4 | acctuvwj 5 | abdefghi 6 | -------------------------------------------------------------------------------- /2022/12/input: -------------------------------------------------------------------------------- 1 | abccccccccccccccccaaccccccccccccccccccccaaaaaaaaaaaaacccccccccccccccccccccccccccccccccccccccccccccccccccccccaaaaaa 2 | abcccccccccccccaaaaaccccccccccccccccccccaaaaaaaaaaaaaccccccccccccccccccccccccccccccccccccccccccccccccccccccccaaaaa 3 | abccccccccccccccaaaaaccccccccccccccaaaaacccaaaaaacccccaaccccccccccccccccccccccccccccccccccccccccccccccccccccccaaaa 4 | abccccccccccccccaaaaacccccccccaacccaaaaacccaaaaaaaccccaaaacaacaaccccccccccccccccccccccccaaaccccaaaccccccccccccaaaa 5 | abcccccccccccccaaaaacccccccaaaaaccaaaaaacccaaaaaaaacaaaaaacaaaaaccccccccccccccccccccccccaaacccaaaaccccccccccccaaac 6 | abccccccaacaaaccccaaccccccccaaaaacaaaaaaccaaaacaaaacaaaaaccaaaaaaccccccccccccccccccccccccaaaaaaaacccccccccccccaacc 7 | abccccccaaaaaaccccccccccccccaaaaacaaaaaaccaaaaccaaaacaaaaacaaaaaacccccccccccccccccccccccaaaaaaaaaccccccccccccccccc 8 | abccccccaaaaaacccccccccccccaaaaaccccaaccccaacccccaaccaacaacaaaaaccccccccccccccccccccccccccaaakkkkllllcccaaaccccccc 9 | abccccccaaaaaaacccccccccccccccaaccccaacccccccccccccccccccccccaaaccccccaaaacccccccccjjjjkkkkkkkkkkllllccccaacaccccc 10 | abcccccaaaaaaaacccccaaccccccccccccccaaaaaaccccccccccccccccccaaccccccccaaaaccccccccjjjjjkkkkkkkkkppllllcccaaaaacccc 11 | abcccccaaaaaaaaccaaaacccccccccccccccaaaaaccccccccccccccccaacaaccccccccaaaacccccccjjjjjjjkkkkkppppppplllccaaaaacccc 12 | abccccccccaaaccccaaaaaacccccccccccaaaaaaaccccccccccccccccaaaaacccccccccaacccccccjjjjoooooooppppppppplllcccaaaccccc 13 | abccccccccaaccccccaaaaaccccaacccccaaaaaaaaccccaaacccccccccaaaaaaacccccccccccccccjjjooooooooppppuuppppllcccaaaccccc 14 | abccccccaacccccccaaaaacccccaaaccaaaaaaaaaaccaaaaaaccccccaaaaaaaaaacaaaccccccccccjjjoooouuuoopuuuuupppllcccaaaccccc 15 | abacccccaaccccccccccaacccccaaaaaaaccaaaaaaccaaaaaaccccccaaaaaccaaaaaaaccccaaccccjjoootuuuuuuuuuuuuvpqlllcccccccccc 16 | abaccaaaaaaaacccccccccccccccaaaaaaccaacccccccaaaaacccccccacaaaccaaaaaaccaaaacaccjjooottuuuuuuuxyuvvqqljjccddcccccc 17 | abcccaaaaaaaaccccccccccccaaaaaaaaacaacaaccccaaaaaccccccccccaaaaaaaaaacccaaaaaacciijootttxxxuuxyyyvvqqjjjjdddcccccc 18 | abcccccaaaaccccaaacccccccaaaaaaaaacaaaaaccccaaaaaccccccccccccaaaaaaaaacccaaaaccciiinntttxxxxxxyyvvqqqqjjjddddccccc 19 | abccccaaaaaccccaaaaacccccaaaaaaaaaaaaaaaaccccccccccccccccccccaaaaaaaaaaccaaaaccciiinntttxxxxxxyyvvvqqqqjjjdddccccc 20 | abccccaaaaaaccaaaaaccccccccaaaaaaaaaaaaaacccccccccccccccccccccccaaacaaacaacaaccciiinnnttxxxxxyyyvvvvqqqqjjjdddcccc 21 | SbccccaaccaaccaaaaacccccccccaaaaaaaaaaaaacccccccccccccccccccccccaaacccccccccccciiinnntttxxxEzzyyyyvvvqqqjjjdddcccc 22 | abcccccccccccccaaaaacccccccaaaaaaaaacaaaccccccccccccccccccccccccaaccccccccccccciiinnnttxxxxyyyyyyyyvvvqqqjjjdddccc 23 | abcccccccccccccaaccccccccccaaaaaaaaccccccccccccccccccccccccccccccccccccccccccciiinnntttxxyyyyyyyyyvvvvqqqjjjdddccc 24 | abccccccccccccccccccccccccaaaaaaaacccccccccccccccccccccccccccccccccccccccccccciiinntttxxxwwwyyywwvvvvrqqjjjjdddccc 25 | abcccccccccccccccccccccccccccaaaaaaccccccccccccccccccccccccccccccccccccccccccciinnntttxwwwwwyyywwvvvrrrqkkkeddcccc 26 | abcccccccccccccccccccccccccccaaaaaaccccccccccccccccccccccccccccccccccccccccccchhnnntttsswwswwyywwrrrrrrkkkkeeecccc 27 | abcccccccccccccccccccccccccccaaaaaacccccccccccccccccccaccccccccccccaaacccccccchhhnmmssssssswwwwwwrrrkkkkkeeeeecccc 28 | abcccccccccccccccccccccccccccccaaacccccccccccccccccccaaccccccccccaaaaaacccccaahhhmmmmmsssssswwwwrrrkkkkkeeeeeccccc 29 | abaacccccccccccccaccccccccccccccccccccccccccccccccaaaaacaacccccccaaaaaacaaaaaahhhhmmmmmmmmssswwwrrkkkkeeeeeacccccc 30 | abacccccccccccccaaaaaaaaccccccccaaacccccccaaccccccaaaaaaaacccccccaaaaaacaaaaaaahhhhmmmmmmmmsssrrrrkkkeeeeeaacccccc 31 | abaaaccccaaccccccaaaaaacccccccccaaacccaacaaaccccccccaaaacccccccccaaaaacccaaaaaaahhhhhhhmmmmlsssrrllkfeeeeaaaaacccc 32 | abaaaccaaaaccccccaaaaaacccccccccaaaaaaaaaaaaaacccccaaaaacccccccccaaaaacccaaaaaaachhhhhgggmllsssrrllkffeaaaaaaacccc 33 | abaacccaaaaaacccaaaaaaaacccccaaaaaaaaaaaaaaaaacccccaacaaacccccccccccccccaaaaaacccccchggggglllllllllfffaaaaaaaacccc 34 | abaaccccaaaacccaaaaaaaaaaccccaaaaaaaaacaaaaaaaccaccaccaaacccccccccccccccaaaaaacccccccccgggglllllllffffaaaaaacccccc 35 | abcccccaaaaacccaaaaaaaaaacccccaaaaaaaccaaaaacccaaaccccccccccccccccccccccccccaacccccccccagggglllllffffccccaaacccccc 36 | abcccccaacaaccccccaaaaacaccaacccaaaaaaaaaaaaaccaaacccccccccccccccccccccccccccccccccccccaagggggffffffcccccccccccccc 37 | abcccccccccccaaaaaaaaacccccaaccaaaaaaaccaaaaacaaaaccccccccccccccccccccccccccccccccccccaaaacgggfffffccccccccccccccc 38 | abcccccccccccaaaaacaacccaaaaaaaaaaccaacccaaaaaaaacccaaccccccccccccccccccccccccccccccccccccccggfffccccccccccccaaaca 39 | abccccccccccaaaaaaccccccaaaaaaaaacccccccccaaaaaaaaaaaacccccccccccccaaaccccccccccccccccccccccaaaccccccccccccccaaaaa 40 | abccccccccccaaaaaaccccccccaaaacccccccccccccaaaaaaaaaaaaccccccccccccaaaaccccccccccccccccccccccaaaccccccccccccccaaaa 41 | abcccccccccccaaaaacccccccaaaaaaccccccccccaaaaaaaaaaaaaaccccccccccccaaaaccccccccccccccccccccccccccccccccccccccaaaaa 42 | -------------------------------------------------------------------------------- /2022/12/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/12 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import string 10 | import sys 11 | from pathlib import Path 12 | 13 | import tensorflow as tf 14 | 15 | 16 | def main(input_path: Path) -> int: 17 | """entrypoint""" 18 | 19 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 20 | dataset = dataset.map(tf.strings.bytes_split) 21 | 22 | keys_tensor = tf.concat( 23 | [tf.strings.bytes_split(string.ascii_lowercase), tf.constant(["S", "E"])], 24 | axis=0, 25 | ) 26 | values_tensor = tf.concat([tf.range(0, 26), tf.constant([-1, 26])], axis=0) 27 | lut = tf.lookup.StaticHashTable( 28 | tf.lookup.KeyValueTensorInitializer(keys_tensor, values_tensor), 29 | default_value=-1, 30 | ) 31 | 32 | dataset = dataset.map(lut.lookup) 33 | 34 | grid = tf.convert_to_tensor(list(dataset)) 35 | visited = tf.Variable(tf.zeros_like(grid)) 36 | 37 | @tf.function 38 | def _neighs(grid: tf.Tensor, center: tf.Tensor): 39 | y, x = center[0], center[1] 40 | 41 | shape = tf.shape(grid) - 1 42 | 43 | if tf.logical_and(tf.less(y, 1), tf.less(x, 1)): # 0,0 44 | mask = tf.constant([(1, 0), (0, 1)]) 45 | elif tf.logical_and(tf.equal(y, shape[0]), tf.equal(x, shape[1])): # h,w 46 | mask = tf.constant([(-1, 0), (0, -1)]) 47 | elif tf.logical_and(tf.less(y, 1), tf.equal(x, shape[1])): # top right 48 | mask = tf.constant([(0, -1), (1, 0)]) 49 | elif tf.logical_and(tf.less(x, 1), tf.equal(y, shape[0])): # bottom left 50 | mask = tf.constant([(-1, 0), (0, 1)]) 51 | elif tf.less(x, 1): # left 52 | mask = tf.constant([(1, 0), (-1, 0), (0, 1)]) 53 | elif tf.equal(x, shape[1]): # right 54 | mask = tf.constant([(-1, 0), (1, 0), (0, -1)]) 55 | elif tf.less(y, 1): # top 56 | mask = tf.constant([(0, -1), (0, 1), (1, 0)]) 57 | elif tf.equal(y, shape[0]): # bottom 58 | mask = tf.constant([(0, -1), (0, 1), (-1, 0)]) 59 | else: # generic 60 | mask = tf.constant([(-1, 0), (0, -1), (1, 0), (0, 1)]) 61 | 62 | coords = center + mask 63 | neighborhood = tf.gather_nd(grid, coords) 64 | return neighborhood, coords 65 | 66 | queue = tf.queue.FIFOQueue( 67 | tf.cast(tf.reduce_prod(tf.shape(grid)), tf.int32), 68 | tf.int32, 69 | (3,), # x,y,distance 70 | ) 71 | 72 | ends = tf.cast(tf.where(tf.greater_equal(grid, 25)), tf.int32) 73 | start = tf.cast(tf.where(tf.equal(grid, -1))[0], tf.int32) 74 | 75 | def bfs(part2=tf.constant(False)): 76 | if tf.logical_not(part2): 77 | queue.enqueue(tf.concat([start, tf.constant([0])], axis=0)) 78 | dest_val = 25 79 | 80 | def condition(n_vals, me_val): 81 | return tf.where(tf.less_equal(n_vals, me_val + 1)) 82 | 83 | else: 84 | end = tf.cast(tf.where(tf.equal(grid, 26)), tf.int32)[0] 85 | queue.enqueue(tf.concat([end, tf.constant([0])], axis=0)) 86 | dest_val = 1 87 | 88 | def condition(n_vals, me_val): 89 | return tf.where(tf.greater_equal(n_vals, me_val - 1)) 90 | 91 | while tf.greater(queue.size(), 0): 92 | v = queue.dequeue() 93 | me, distance = v[:2], v[2] 94 | me_val = tf.gather_nd(grid, [me]) 95 | already_visited = tf.squeeze(tf.cast(tf.gather_nd(visited, [me]), tf.bool)) 96 | if tf.logical_not(already_visited): 97 | if tf.reduce_all(tf.equal(me_val, dest_val)): 98 | return distance - 1 99 | visited.assign(tf.tensor_scatter_nd_add(visited, [me], [1])) 100 | 101 | n_vals, n_coords = _neighs(grid, me) 102 | potential_dests = tf.gather_nd( 103 | n_coords, 104 | condition(n_vals, me_val), 105 | ) 106 | 107 | not_visited = tf.equal(tf.gather_nd(visited, potential_dests), 0) 108 | neigh_not_visited = tf.gather_nd(potential_dests, tf.where(not_visited)) 109 | 110 | to_visit = tf.concat( 111 | [ 112 | neigh_not_visited, 113 | tf.reshape( 114 | tf.repeat(distance + 1, tf.shape(neigh_not_visited)[0]), 115 | (-1, 1), 116 | ), 117 | ], 118 | axis=1, 119 | ) 120 | queue.enqueue_many(to_visit) 121 | 122 | return -1 123 | 124 | tf.print("Steps: ", bfs()) 125 | queue.dequeue_many(queue.size()) 126 | visited.assign(tf.zeros_like(visited)) 127 | 128 | tf.print("Part 2: ", bfs(True)) 129 | return 0 130 | 131 | 132 | if __name__ == "__main__": 133 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 134 | sys.exit(main(INPUT)) 135 | -------------------------------------------------------------------------------- /2022/2/fake: -------------------------------------------------------------------------------- 1 | A Y 2 | B X 3 | C Z 4 | -------------------------------------------------------------------------------- /2022/2/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/2 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | 20 | keys_tensor = tf.constant(["X", "Y", "Z"]) 21 | vals_tensor = tf.constant([1, 2, 3]) 22 | 23 | action_to_score = tf.lookup.StaticHashTable( 24 | tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), 25 | default_value=-1, 26 | ) 27 | 28 | opponent_action = dataset.map(lambda line: tf.strings.split(line, " ")) 29 | 30 | def play(opponent_action): 31 | opponent = opponent_action[0] 32 | action = opponent_action[1] 33 | outcome = 3 34 | my_action_score = action_to_score.lookup(action) 35 | if tf.equal(opponent, "A"): 36 | if tf.equal(action, "Y"): 37 | outcome = 6 38 | if tf.equal(action, "Z"): 39 | outcome = 0 40 | if tf.equal(opponent, "B"): 41 | if tf.equal(action, "X"): 42 | outcome = 0 43 | if tf.equal(action, "Z"): 44 | outcome = 6 45 | if tf.equal(opponent, "C"): 46 | if tf.equal(action, "X"): 47 | outcome = 6 48 | if tf.equal(action, "Y"): 49 | outcome = 0 50 | return outcome + my_action_score 51 | 52 | opponent_action_played = opponent_action.map(play) 53 | 54 | tf.print( 55 | "sum of scores according to strategy: ", 56 | tf.reduce_sum( 57 | tf.convert_to_tensor(list(opponent_action_played.as_numpy_iterator())) 58 | ), 59 | ) 60 | 61 | outcome_to_score = tf.lookup.StaticHashTable( 62 | tf.lookup.KeyValueTensorInitializer( 63 | tf.constant(["X", "Y", "Z"]), tf.constant([0, 3, 6]) 64 | ), 65 | default_value=-1, 66 | ) 67 | 68 | @tf.function 69 | def play_knowing_outcome(opponent_outcome): 70 | opponent = opponent_outcome[0] 71 | outcome = opponent_outcome[1] 72 | 73 | # draw 74 | my_action = tf.constant("Z") 75 | if tf.equal(outcome, "Y"): 76 | if tf.equal(opponent, "A"): 77 | my_action = tf.constant("X") 78 | if tf.equal(opponent, "B"): 79 | my_action = tf.constant("Y") 80 | # lose 81 | if tf.equal(outcome, "X"): 82 | if tf.equal(opponent, "A"): 83 | my_action = tf.constant("Z") 84 | if tf.equal(opponent, "B"): 85 | my_action = tf.constant("X") 86 | if tf.equal(opponent, "C"): 87 | my_action = tf.constant("Y") 88 | 89 | # win 90 | if tf.equal(outcome, "Z"): 91 | if tf.equal(opponent, "A"): 92 | my_action = tf.constant("Y") 93 | if tf.equal(opponent, "B"): 94 | my_action = tf.constant("Z") 95 | if tf.equal(opponent, "C"): 96 | my_action = tf.constant("X") 97 | 98 | return action_to_score.lookup(my_action) + outcome_to_score.lookup(outcome) 99 | 100 | opponent_outcome = opponent_action 101 | opponent_outcome_played = opponent_outcome.map(play_knowing_outcome) 102 | 103 | tf.print( 104 | "sum of scores according to new strategy: ", 105 | tf.reduce_sum( 106 | tf.convert_to_tensor(list(opponent_outcome_played.as_numpy_iterator())) 107 | ), 108 | ) 109 | 110 | return 0 111 | 112 | 113 | if __name__ == "__main__": 114 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 115 | sys.exit(main(INPUT)) 116 | -------------------------------------------------------------------------------- /2022/3/fake: -------------------------------------------------------------------------------- 1 | vJrwpWtwJgWrhcsFMMfFFhFp 2 | jqHRNqRjqzjGDLGLrsFMfFZSrLrFZsSL 3 | PmmdzqPrVvPwwTWBwg 4 | wMqvLMZHhHMvwLHjbvcjnnSBnvTQFn 5 | ttgJtRGJQctTZtZT 6 | CrZsJsPPZsGzwwsLwLmpwMDw 7 | -------------------------------------------------------------------------------- /2022/3/input: -------------------------------------------------------------------------------- 1 | NLBLfrNNLvqwbMfDqSjSzzSJjjggcdVs 2 | lTRGPPZnRRHszcsZdSsccZ 3 | CFTTFtFHTtCtDDzrmBtrBD 4 | BJldgBWnRgWNWtllSlWShMcLcVSvVjbVVVvDVVVL 5 | HFGFwqQPQGwHrTFpwmThMbDDVcVmLvvshj 6 | HrpHrGPZZCQrfqlNdtMlzfMltlgn 7 | hQLhBtBtQNQjBjNLvtLjzLJpWbjJdppSwjpCCplllJdj 8 | FGFsmccSPTVPfVVHpJJgwlJwwWJWpCmR 9 | sFPfPFHZTHScnzBttqzvQzqZ 10 | MNTGMTnGWvTwwwnZhNZnWDPPdSjqsSPWjmBCSBWS 11 | RJrtVfRlLrfHgblHJVBjqqFmjCdBJjDmJdSD 12 | tgRftftRcRLftrpHpflHlctVwNNvZNcTwZnznQzwTzmhQwQh 13 | sQPpQpQhnlNsJpJSQphHcZffLfgLHSfHVHHFZZ 14 | zBCvrrWzTwqzcbtbqCbrCCwWLMfVVmmHVfqHFHFgGHLZGmVG 15 | rvvjBzTjrwQRcpjNsRss 16 | RrnNWJJNrplbLJBBWWZstVpmtZftptfmfsMM 17 | GHjnwndzGcqjGgqtfMsvfsMmMvZZ 18 | cQgwHTPjPwGwjdHHTjwccQBDLlWNrLJLNrnWrBRBlS 19 | BBBQJGQslJtcGqfgHpPnfftwqw 20 | RDMLDWLNTLTTNjNgvdqbqRnwqbfwPRzbVHHV 21 | mgdNgdTSMWmSQsQsBQcFSQJr 22 | RqQhRpsdqnvdlPBfzdVlVJPM 23 | SSZsDmSmssGZbJVwPBSzBBMfCf 24 | LFFNGLgLHFWrWHFmLWrLWLrsQshqQnspNcRTjnpTtjRRjh 25 | DshNcgmDVClpCfRs 26 | TnZjTWrtrqtWnGTrbqqTTZZwMpSVRSflRMflMjRCSfpJMSJl 27 | wHbGHrWHWrbnbFtTZcLzLgHzzgcmpNzzzz 28 | hfWQdhQHmPWhqdhQqpdQqWtzvwtCMCRvNCwNzMtNsHsz 29 | lBLnJZLlFBlZjGFbVjjlJRSMzzSszzpGpstSpvMNtN 30 | rVZVgZVgLnjFVlVQDDfhcfmWrQdTfp 31 | zqTrVZvDLGdMMLtcpR 32 | bClsCmQbjFtjljllntsGjGWPdcRWhMppPcpddR 33 | mbggmBtQtlVqVzgrzDzv 34 | LtpnGnGNFtbGntbbQPhTlRpRTzDlcClPCl 35 | mSZHgZMhZVmWPHccllzPzcCP 36 | sZhWvSsBqmBSqmgMqWZjQjfjrLbvGbtNFjvLtb 37 | TvMZMTTzWHNNFPsNbvDG 38 | dhVmwfhcnhRnRfdlGsDNNGqNLFNNTGdq 39 | JTcVVTlThmfmrrWQZHMrpZtJ 40 | zGMBMzPNDNcNZLBzcmLvbHltDbWjbthhqvvHtg 41 | rdJSQSTfQrRnsRfJJQHhWgbhtQblgHWgWH 42 | nTrlpVfSpswsrsTSdnRsfnJJPBZmMBcBZZGmZBmBMmcCNzpC 43 | nfzcnSlRJJScTZTzJZnsNjNrHQqrWBjsBRdWBr 44 | LgHwDLwmMDCphttsqDjNNssBGNsGQB 45 | hvwgwvghPbpggLtmmbCmSfzFfVSlZnncJTPZHSnF 46 | DbsnzDCsBPHDQHFD 47 | GGcWWnrGSjBMrMlhfr 48 | GNpqddqWLqdScWqcVnCswmzJRVzVVbJp 49 | NzPpPBppzjbpCrrQhggqvwwqRwrwQl 50 | SDddnLcDLncghQBWvvgR 51 | tfSLLBmmmDJGFDLJmMMsZZssZzPTzjTpzZzP 52 | RRCrJbSfNrRQjvvHppmpbZvv 53 | llhVGGGMPVTMlTdVzcPVHZmvqpvqZFhHFqmjFrHF 54 | ccGlzPMVwBGfBrLCDJrDLf 55 | VcVGZZVMlncjTqcjsWWf 56 | hzJRtRphQJtBRhzFpdrfrqrFsqswWrmsTmFr 57 | LJHzBQJRhPHpzQWBRzphHRQSMZlnbGMVMVnLMGbDvvbMVl 58 | sVdHFFmhPGVTdFmVFsgPdBBtBZjSpGSvtpBztpGjzt 59 | HCHwlncHfpnjSSpBzz 60 | wWQwlWWlfWcQMfCrfwTRDrHsDmPDgFVTRVsV 61 | qllqNlmglNNdzLDddGGNSHScMHMWPcPSqptQSSHJ 62 | bhhbChVsRjwGRCbZCcSZPpPMMWJSPMtPpW 63 | BhTVBsbrhCTrhfbrCTTTRRfngzrnnLvdzgGvNzdzLNvrLm 64 | nNwNPnjzPsNRHpFDHLLsLVHF 65 | MSBMgMZmWqScCFGWWDFGVvwW 66 | JBghBwTrgchrTbQRjztQPQbfhQ 67 | PPBpBHGfBHGpRRPDLMmnscRLdnzmdw 68 | bMFVTNVTVjbbrCWCsndsDwjDzwmwsnms 69 | QQbJrCCMWCVCVMShHGPQlHhghGlt 70 | dBQMdJQHbWMWHZLRRsmPVJmppJqG 71 | FSrzFnPnGNrlsGps 72 | FvwTnCzDznTwzhtHjZvbdbjQfZgPMv 73 | gJjVQzLgLvPJdMrsDsQtdQrw 74 | hBpmWfSfHCWNfmSppMrDDMwwMbDMlMcbcB 75 | fhphGpfNCpNSNRhGhqPVvjvjjjTzVRPzLr 76 | TsnznnrZsNwGNrbWbSvVgWzVSbgv 77 | mBBFBFQFBhSHggVnmvfW 78 | BJFcRLFFBhLpMNcdNCscZNnqld 79 | vqwQGZNSwNQHQQZNSwvpwMdlnMfBClZBTzBnTfTJCB 80 | sbcrjscccmPmrtFRrtcsPssmVJBfTCldnJJdVzMlBnBJTBlR 81 | tbDmhtdDrPjbDcrDWSHGqQqvHpWSgNHh 82 | VVWSwCpWTVWWwVbbvPJDwvDtwtMttLtH 83 | nfNLcNsfZNnGggZNNqGlMPPDDrlvGHHrtPJMHP 84 | fhgqfznczcjpVRjFLSLz 85 | pvcBCrPrcPBpTccGjrQhQdwMsqdGQddswqhS 86 | FggLnnFzzNFNmstlShMVwQtsgq 87 | RnbzHmNfRHmmnLzRnLDRZHRrCPJBvCWWpcjvJpwWfjwvrc 88 | HfdzzrGfRrQqrGVnznQvgjcjhhlMTlFjchFMVL 89 | swwWWBPNwPwZbvPMFTLjTlgP 90 | BJBJJDZtSrJqnFFfFJ 91 | lqqMSMBMttLMjtHjqjrdBnSfcpfwCTGbCffwCcwbSfTcJf 92 | gVFhVRZgVzJshFZVTbbFfvpcwCTCfbcG 93 | hRWZzRVVZmsWJVRQsQmqqndQrnqnQLqnQqtBlr 94 | SgPhCGGzczlCDVDWrlTL 95 | jvdvFvjqwfdrNfNDlzLzRW 96 | jzjFHnvdtdnmHZttqmbFdFqFsSBJspcgcSPQpsQPBPgpgmSG 97 | qqmQFmrbbWWrtqTVVrgLJTzzNzrJ 98 | nCjMGncHMJvzmmHmVV 99 | DpjPDGwnmDhbwQqZtqqW 100 | JlTTLLMRqlMlJMJgBLLnnCZCFrrrdTGrjPjGFr 101 | vwVpHVHVwvHmQVsFFPZQrjrrrZPNdn 102 | wtvmtwvpmbwVvssPflSBlRBqLMlLJBzSLb 103 | rtrTtBwTsfjZrnqJQplNTcqqlvQT 104 | sHzdWFzSzmGDDRVGVDGHWVhvcLLpNpqJCQqLhClhlcvqpC 105 | VRbmRmRHGdsnggbPMMftZB 106 | LMhtCSSftfTzdCdMhSCdMsQGQbGnbGQQMQggDNgR 107 | FjFHWJwJjRNvQggwnDsm 108 | plBVRRqWRHVHWFTdTthTLCfzflzh 109 | VjVdrHFWPmTjRGSRGq 110 | DMWMZDncQDcfpQzmTQTSQRGTGqNz 111 | WMnsCZJCffDnfCfvnZCPhwVrHBVrBlVHrhswLh 112 | TCZltglCZWQsMhqRHhsrHC 113 | vbbNBbGBmNLzczNmNjrRVbhqHMsVqwHVRwqH 114 | mzBSmzDLvPDPzcLPvGzWWSnsJstWlSsSlddWZJ 115 | nlFJZTlBbFBVZldFnlZlCQvQrsMQzzsCdCLszvLD 116 | hPwgVqSwmRcgSRmWgSwmsfrLPssLvQQfDPDvfMfD 117 | htSwtWHWVRNtWmwgtnJplnbFpBbNTnBTFN 118 | vnhBfSSvRttPJnlctl 119 | frHVDHFwfDLVzVlJMNTHllJHMNlZ 120 | bGGFFbqVLVVbzrFwGfdgFdwvhpCqmBpRqWpBpQpSSpSQSm 121 | RMBMMZBBmmmhZmPjTZhZRPnNQvwWfcSvDfQWBSfdQSNdDc 122 | LHzlVGHqVGzHGzsbCbqglbJddCcvJNDDvdDCJSQvWfwf 123 | HlzrHHgsqbHsVGHqbsGsbbsqFmmjnTTFmnjmRQRPFTFPZtrj 124 | LSLWRMLrLHqqwCBJqCstsG 125 | vbQfPjndQnbcQfmndRwttBNZRsGdsCBJ 126 | bmcnTfbvvPRRRFcmfhjHgzMrSrSMSLzSWgVhML 127 | cqWNtsdsWdlsnBsDJwZJSzFFBZ 128 | RhfvggPfffbVbfPmpMvRRFrZDFFbzDDZZrHwJDbwzb 129 | QhRgvpTVpPgJVGTWWNcTtqNLtG 130 | nppPsSPtPZtFdSWdvFvSnnPscRjjHRTLLjCmRLTmCCscrRcc 131 | wwGqDqfMrGqlhllqhhNwzGNTjCRTmRLTHzJjzBmmRmjCLc 132 | qrblfrVwGwbhwqghfqVhNMhtWSvFdPdQtQdgtWpvWPQWQv 133 | cLJvcccHNcLDwCdRDvjdDR 134 | ttPChbqhZmtWGCtZQwBdsQPQdwwsddQF 135 | WnqbbgGVZCZnnlWhCVtbtVgMMrJLLJNrNcHMJNJTJNMp 136 | vLvWghFhBWqGsVTV 137 | JdpdmbrBmsQGGlVqdw 138 | CJZMHPMZJHmzCnZHHrMjSvcDLDccNSBCDDFDjj 139 | mDgnmRVmqgCSScsVllCj 140 | HLTTMTHZQjZzTzprTGPwtcdlLcllWllWtCSwld 141 | QMHHPzNrQBQGNHzQqbjnBbBbmbfjbqjb 142 | tgPNgzzsSPhjSgbPztSbpDJZRJDTRLTTpRHpNRHZ 143 | crlfGGFlBGBrBcrnFlrFFFCrLpHHJTcLRJJVJvDHtHZRDDRR 144 | tFFtrdmGffnndmzhbWPgzPdsWQPW 145 | JHhvgvzJhBGSLHhgBBSBHzdBflDfllTqLlwLqflfMcctCcfl 146 | RjWQWrnjpjjdNQmmNNWZWpCZtqtDtMwwwcwtcDqcTDqC 147 | PpNPjQspmWpPWRWnVQQpQsWVvvggJBvBSGGdJVhJSJBFdb 148 | FrPTcrCGbcTCChrwNMRDMRvWRdHvzVRVTR 149 | LJmQSmQfJnssmjsHSRFHSHzdVzSFHV 150 | nQtgssgfstjLnmplttgFLLPPpGBrcrchBhCbhhqwPPCC 151 | qFtZtFzstvvPvqttNrCJFWJRFCJFRRWR 152 | ffBBfjQdmdQBfQfmLVQRPRpNNCgPNNRThdWPrr 153 | fQVQlHnBQjBLjlvDqsvPqHMsctSb 154 | rqhJnTTJqTchnTdhncmmgMVqtSBsBspgBtHLLWsBBWpWBHSH 155 | bPldNljGZjNCbFCbwwGDWtBDDtsDtLwt 156 | NjvlvvzQFFQhQqdQnMTM 157 | DJHGghhFhHgsGgThrtrQWBPPJWWCzzzP 158 | lTpffNTdZfrcwlCwCrWz 159 | dmvdvffSSpjTLjFhFMRRbnRbjj 160 | LfSqfmvfWPBPdljNNFVFzVJLNjJz 161 | QZQnQcpMhwhZchQnwbvCCDNDCNpzpFsJpsRsRj 162 | rchgQnvHHhQgvnwHGTffdmdTddTGfWHW 163 | SzZGtmTjgzQCpJwpVqrVzz 164 | NWddPllPDvdbccgcHJLCpClFLLVpFLLVLV 165 | bbdRRWDNdPfgfWPWhdccNddRmBQTSGTTTZnmBQZjmsmnhGst 166 | LgvFffmfVFczCWWmWCSh 167 | MbwbTBDwbZtwBDMhSCGhscWSwVCsSw 168 | QMtdQbqtbZTjVbMtZDMgffnFnJpFvrvFprgvgq 169 | pztdqqzCrpvFqpJQwCvWBRGRWLWcWNBsNNQcNR 170 | HdbjSbVhfhcRscRmNm 171 | MDPffbjbjgFgzCZdFdgt 172 | BmDQZbmmfbmbvhvhbgCsCl 173 | GqVqMHwpGTLHLzwqJlCgsgShhvGvJgGS 174 | LTpzpLFprpfmNrBBlfQP 175 | RjRhBqZbwBbjcwgjPmRtZjZfWFfFznWQNVzQFQQnFzWmMN 176 | vpTPDCdpPSpTSSMzNHzMvFQNNWNM 177 | PDCpLGlGPdrlqRqbqbBhRLqR 178 | PmHZWmJzzzppHfHdHfddDMDLhRbMRgRMNNnPgNMM 179 | TCwBCSSjwqwVqQldTSQTtjVhtbbhbgLLbLLbMggMbDRttc 180 | QBrwFlqCfdzHdvzF 181 | GvgGvgfvlzlHGQWRjGMpjZLjZpGW 182 | DVsqJtnDsJTsTqjpLTdcmWWLpTMp 183 | NNqVhsPrrhqnJNnJNzgBvvjHCCPSjCvQQQ 184 | pqnswpqrrtqrnMsMPMqzVfgGzHBVGVftfBGzGG 185 | QWFQhhmDhJDmJJhhJLcTcfHVvTlTFTfVvgzG 186 | ZDZLddWWSgDCggChRSMPspMjpnqjMPjj 187 | MGwMFLFfssfffcGcDrnCllZtnHQCnDCZWD 188 | dbTvTThtvVVVNWVHClWQzzlQ 189 | TjbgBqTBvBvjRvbqvRmPGMcwSPJPfstSsfMBMf 190 | VtCjjqgwvhCCQdSPJJdGnwwLTT 191 | brrBsmNWlzBpSDcpSWLcWD 192 | SSFsrrrBrCqHVVQFjj 193 | LQQNLgvNDnNPHPDQjtGjnmjttBjVhSmJ 194 | sbWfsMFwdCpdCdwWJVVSltVJlLSlLSft 195 | TFcdMTbpdbwdwgTDQLgDNNrTNz 196 | gfgSsnmnWnhhctcJ 197 | ljjMfwwRTNbRqNlzVzjbtDvPvchvPCccChtJtPVW 198 | GjwpwMpbjMbRMNwqzwpQgQQBfdHfSFrBmQBg 199 | FmcmmTTMdPTGHjtGGnctcN 200 | DgqzTqCgDgpZTrqhSbSpzZfpnHjHlnbtbHBGnGjtQHnlNGWt 201 | zppLhfZTfDqsLMPdMVRwwM 202 | RtsMZJSFRWbRsJbFnFzVBpBqgdRdGzGBpDDj 203 | cTmvrlMQLHLllrhwlmfdQqBpdVpDqGdVpjVzBq 204 | wvTfcHhhmHlhTNLFCnFnNnFnFnMFZJ 205 | grjsjJhhNscgJFgPBnbHwLsRHzHfRLbH 206 | ldMMSSvqtSMGmSSMqLRnlRwbrLlRLRRWwL 207 | VtvDdTGGGCvMDMDTvdjhQjZppPNrJVpZPVFg 208 | wctlscwwBTDnJcLNLHDN 209 | bhhMnhqjzFRjjjPdNDDSvLdJ 210 | MWzMzbrZZZmWQzhWbMhwlspstmnswswllBCgpG 211 | rzmddBcmgFjRzSHHDR 212 | vqpgbnGpqwgbpHtbtRjHTjTfFH 213 | WWqCwvCqCJvCJvwpqvMvnvJMdPgZQQdZcWhBBBrPlLlmdQdm 214 | ZdHTtNPNPSRBbFjjTTsr 215 | WmDhGggmgWWJcZmMhVllzjJCrbjFzbsFFRCj 216 | MMGDmMGGgDGgnWGWpNnvSHStLnwffZtHnw 217 | ddZqRdqjvjZdndlfjwZQQCzmqcHLzzTTHTHzchHTmT 218 | BPVPBBWVLbFFrWgJLpNHcPSHCPSCSCChcCPHTH 219 | VFNbBJrGGJVZGGLwQGnjQL 220 | NllFnzNNnNnNzmrHmDFGLGcccRGjGwHChGwwGh 221 | StMZgPdBgbbBLLvCwCvgGwwj 222 | PsfPtBJMtPZMJPbZVVPPMMDnjDlNlmrnmWnmqzpqmVFm 223 | mGGCppgGWWgmGBzMVzBBBbBS 224 | HnrRdvZvTMtSBtbZ 225 | rHwRrjlrRwrnJrCsCDlLWCqcmCMM 226 | zHhDNmDMNNJHfMNJzjsdvvsvbvjGdCGW 227 | tVwttwwVVFBSFSZqSLjsqLdLCWCvGWcdLs 228 | ZwZgwgpBFGlHgNQmGM 229 | TNqhqvqFNWFrlqFqtDTrhTSTbLfjmjzbwMmMbjzLPDwGLPPP 230 | scVRRQHVQVVHcRHpVgJJCRHMMZGMzCwwLZPZGMMCzLGwZw 231 | dHsnQdHHdnBHspJRsVppFlNTSGGNBWBtTShNTFvG 232 | hdZthMghfbbHCgQgBp 233 | mLjTTjWrTrSCbZsLSbCS 234 | VVPJrjqcWVmrjcmWRWTZTPcWldMNqvhnhMFdvdMhfNdldGNM 235 | sFlsgtZFLFZzSZzpnQrJ 236 | DjRbcjRdBrpRQpMJMJ 237 | jNcfDqqfcDBbmqDFggpFCTpgCNhWWG 238 | LMGGbbpLcpVVbfcpcpdvPVQPmZzJZjqSjSjgZgzqZgzTmm 239 | BrRnBWrtRlhBjmqZCnqJgCSM 240 | FDWWrBHHBBDHhFHttrWFttNpfLppbfcGGsfcGsFfccpcMd 241 | jzHqjHLVqQQlHfzqlbbzqHQscvNsVrvnNZTtvNvvvcrGtv 242 | gJCSRwRpJRtNNSTstnTT 243 | wCMnFgnpCMPnJgpDQbqdQdQQLbzqDHfH 244 | MpqJWmqlNNHmmwwBLLvL 245 | QzFDFfdfQTtSGzTDVMdSFQDwHLBhHLjHjbTbHvLggccwHb 246 | VQfsSDfGftfsdGSDSSQSFssZJCCMlMWWZPWPJMZlRp 247 | lcqqhSsgTMgcqBBZnqZTBJJpdGpGVdRNMJHNGjRJdd 248 | VbfCmPbtttfwwWHdGGrjHPdrRrHN 249 | CffFFmwmDWmtCtvQbSVnTlBSDsqZhVBBSc 250 | gPZTgmwvcnqPzhnW 251 | GJVbDhpjsbWzjfNNNNMj 252 | DFCbrBJsFJpBhbVFJCtvTgmtRTtQQltmwm 253 | BLZgTJPqZzFgCGgCFlFF 254 | ljfcDvNDtHcftNdMCQnCRnhnGjCChG 255 | mVvSdDNDHlmHfNVlSWcSDmtpbpTzppwLPPLPPJLwTwBLPS 256 | FHRzMqvQHvndJnFlNdhZ 257 | fcjWWsjsSmmrgsGgjGcGWsPsnhZddffRdTtNDnZlnDnDThhT 258 | WSPcPsGPSRGCmLcGgpHCzBVqzbQBVpqwwQ 259 | PJzwjrVHzLPrZJHgSsNWbNbmNQtnLSSs 260 | hGhqpTBRRGFFpMpBqGpSNlQQmWlntDbmTQSsml 261 | MpcvMqBRhpFRNCcjwZwPZwJfwjHz 262 | QWJsVCQDbVWbprrWSZWFcmrS 263 | wMwvjRftMLhHfjhdMhRhjtMZrmrmZqBSpBSprvSpTzBTSF 264 | dLNNjhhhVDlNDspN 265 | MNmmtzlQPQmlttlQlHBGFFsHsPnGnFGWgs 266 | CwhhwVZcRVRcCRDWLDFHWWFGss 267 | hwdwdCwCZVSwZcrvhVwCJtbtQtpzmzQHvtpzQmmmpp 268 | CccMdVLJcnCVhCfmjGjlfwwwMwWG 269 | HDSbggDTNbRDHtTgrDpwmnGFfpGgfWfBFmlm 270 | HbDzvQNzHbQLnQddZCcn 271 | jWlqRjWwsqjHHqRDDPMPgpMLpgSMnggC 272 | VQvFfFbdTcfhbcvCpvPrnZgLLpSgLp 273 | PNQVbNTTcbdbfQdbmdVVGfbhBJlHWqGljJqBlqJJlsJJwqqR 274 | WFGnWBTrvtgnjBWsFWggTPlhSfmRSRhZMcSfhZZpRmtZ 275 | CdswHJHNsCbHLVVcZclphwcchfphZZ 276 | LdDCLHsHzbNNNQDsJLNGgPPBvFzjggPrPTrrFB 277 | pGFwwLTPjDcSCPpSdsqtMRMDdVQdVVQz 278 | JBJjZgWgJHvHJgJJbBhNJvgZzsQVRqzdfQQQMMBszRzRzRfV 279 | nlNZWZlJngbvNjgZhNvHhJvprcTclFCcTPSlTCcSpFcLrG 280 | PdHJVCbSJmSVHdLdHbsbsqRwnlDWhZnZccWqDwqDVw 281 | NvMFlGrQTvgpggFNwZhwWWhhqRWRhTqz 282 | gMjvtMpNMrfFrvlffgmdjLLjCmmLHBddLJBS 283 | zNrlzhJGdlHGHplCJQQVbLhRFRbccDSbVDLqRb 284 | WwmwnWjvjmjZPPFFFRDZqVbqqJBS 285 | tmjMJstnWnjvnsTnQMfrQMldrGlCrGfl 286 | MqWfZlpjMPBgffgPNNQnVnnqRsNVLVmR 287 | TcwGCTSvthpzCCTNVnsQVSnRnRQnNn 288 | TbrpDvvCvCwTGDzvzhpzDzljHBZbHWZgHPZJZjJJHfPf 289 | DWNNQQHRpsRWDQPQqHqqgJBCsjjsFFFngBzgjJzl 290 | tMhMwTrTDLMdmMLtMMrbmVbZhJJnnFCCjnlJjjjjBzFBgZ 291 | ttTtDmbfqWcWfqPp 292 | QhvTQqggFsmvjsFTmqZrzzwZrHnwpnplpZ 293 | WCJVGCSLtDPPtHDbHDbdpnrMnMrrpwlZrwpznLpl 294 | VVJbbVfStVHJJVtGmvsfjvssFFTvvsQj 295 | pBCqCqhWjpnWCnffJDjfWzJBZdcvwcPdvJvJcgcrdGdvggrv 296 | tlhbHbmNTbQgbGRvbZGrcg 297 | tVFLQNVlmTmQLQhpzMCBzCpzjjFMnz 298 | qhWHwNqLHrLJjqgHddFchMdnnGnRhMcR 299 | pTzTPVfZQPffNVtVVZfptRGsRbbbbcDsMMZsMZMdRn 300 | CfzPVzCfPBzPBqvWqgBwjNLjjS 301 | -------------------------------------------------------------------------------- /2022/3/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/3 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import string 10 | import sys 11 | from pathlib import Path 12 | 13 | import tensorflow as tf 14 | 15 | 16 | def main(input_path: Path) -> int: 17 | """entrypoint""" 18 | 19 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 20 | 21 | keys_tensor = tf.concat( 22 | [ 23 | tf.strings.bytes_split(string.ascii_lowercase), 24 | tf.strings.bytes_split(string.ascii_uppercase), 25 | ], 26 | 0, 27 | ) 28 | vals_tensor = tf.concat([tf.range(1, 27), tf.range(27, 53)], 0) 29 | 30 | item_priority_lut = tf.lookup.StaticHashTable( 31 | tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), default_value=-1 32 | ) 33 | 34 | @tf.function 35 | def split(line): 36 | length = tf.strings.length(line) // 2 37 | position = length 38 | 39 | return tf.strings.substr(line, pos=0, len=length), tf.strings.substr( 40 | line, pos=position, len=length 41 | ) 42 | 43 | splitted_dataset = dataset.map(split) 44 | 45 | @tf.function 46 | def to_priority(first, second): 47 | first = tf.strings.bytes_split(first) 48 | second = tf.strings.bytes_split(second) 49 | return item_priority_lut.lookup(first), item_priority_lut.lookup(second) 50 | 51 | splitted_priority_dataset = splitted_dataset.map(to_priority) 52 | 53 | @tf.function 54 | def to_common(first, second): 55 | first = tf.expand_dims(first, 0) 56 | second = tf.expand_dims(second, 0) 57 | intersection = tf.sets.intersection(first, second) 58 | return tf.squeeze(tf.sparse.to_dense(intersection)) 59 | 60 | common_elements = splitted_priority_dataset.map(to_common) 61 | tensor = tf.convert_to_tensor(list(common_elements.as_numpy_iterator())) 62 | tf.print("sum of priorities of common elements: ", tf.reduce_sum(tensor)) 63 | 64 | grouped_dataset = dataset.batch(3) 65 | grouped_priority_dataset = grouped_dataset.map( 66 | lambda line: item_priority_lut.lookup(tf.strings.bytes_split(line)) 67 | ) 68 | 69 | @tf.function 70 | def to_common_in_batch(batch): 71 | intersection = tf.sets.intersection( 72 | tf.sets.intersection( 73 | tf.expand_dims(batch[0], 0), tf.expand_dims(batch[1], 0) 74 | ), 75 | tf.expand_dims(batch[2], 0), 76 | ) 77 | return tf.squeeze(tf.sparse.to_dense(intersection)) 78 | 79 | grouped_common_elements = grouped_priority_dataset.map(to_common_in_batch) 80 | tensor = tf.convert_to_tensor(list(grouped_common_elements.as_numpy_iterator())) 81 | tf.print("sum of priorities of grouped by 3 elements: ", tf.reduce_sum(tensor)) 82 | 83 | return 0 84 | 85 | 86 | if __name__ == "__main__": 87 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 88 | sys.exit(main(INPUT)) 89 | -------------------------------------------------------------------------------- /2022/4/fake: -------------------------------------------------------------------------------- 1 | 2-4,6-8 2 | 2-3,4-5 3 | 5-7,7-9 4 | 2-8,3-7 5 | 6-6,4-6 6 | 2-6,4-8 7 | -------------------------------------------------------------------------------- /2022/4/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/4 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | pairs = dataset.map(lambda line: tf.strings.split(line, ",")) 20 | ranges = pairs.map( 21 | lambda pair: tf.strings.to_number(tf.strings.split(pair, "-"), tf.int64) 22 | ) 23 | 24 | contained = ranges.filter( 25 | lambda pair: tf.logical_or( 26 | tf.logical_and( 27 | tf.math.less_equal(pair[0][0], pair[1][0]), 28 | tf.math.greater_equal(pair[0][1], pair[1][1]), 29 | ), 30 | tf.logical_and( 31 | tf.math.less_equal(pair[1][0], pair[0][0]), 32 | tf.math.greater_equal(pair[1][1], pair[0][1]), 33 | ), 34 | ) 35 | ) 36 | contained_tensor = tf.convert_to_tensor( 37 | list(iter(contained.map(lambda ragged: tf.sparse.to_dense(ragged.to_sparse())))) 38 | ) 39 | tf.print("Fully contained ranges: ", tf.shape(contained_tensor)[0]) 40 | 41 | overlapping = ranges.filter( 42 | lambda pair: tf.logical_not( 43 | tf.logical_or( 44 | tf.math.less(pair[0][1], pair[1][0]), 45 | tf.math.less(pair[1][1], pair[0][0]), 46 | ) 47 | ) 48 | ) 49 | 50 | overlapping_tensor = tf.convert_to_tensor( 51 | list( 52 | iter(overlapping.map(lambda ragged: tf.sparse.to_dense(ragged.to_sparse()))) 53 | ) 54 | ) 55 | 56 | tf.print("Overlapping ranges: ", tf.shape(overlapping_tensor)[0]) 57 | return 0 58 | 59 | 60 | if __name__ == "__main__": 61 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 62 | sys.exit(main(INPUT)) 63 | -------------------------------------------------------------------------------- /2022/5/fake: -------------------------------------------------------------------------------- 1 | [D] 2 | [N] [C] 3 | [Z] [M] [P] 4 | 1 2 3 5 | 6 | move 1 from 2 to 1 7 | move 3 from 1 to 3 8 | move 2 from 2 to 1 9 | move 1 from 1 to 2 10 | -------------------------------------------------------------------------------- /2022/5/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/5 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | 20 | @tf.function 21 | def to_array(line): 22 | length = tf.strings.length(line) + 1 23 | stacks = length // 4 24 | ta = tf.TensorArray(tf.string, size=0, dynamic_size=True) 25 | for i in tf.range(stacks): 26 | substr = tf.strings.strip(tf.strings.substr(line, i * 4, 4)) 27 | stripped = tf.strings.regex_replace(substr, r"\[|\]", "") 28 | ta = ta.write(i, stripped) 29 | 30 | return ta.stack() 31 | 32 | stacks_dataset = dataset.filter( 33 | lambda line: tf.strings.regex_full_match(line, r".*\[.*") 34 | ).map(to_array) 35 | 36 | stacks_tensor = tf.convert_to_tensor(list(stacks_dataset)) 37 | num_stacks = tf.shape(stacks_tensor, tf.int64)[1] + 1 38 | 39 | moves_dataset = dataset.skip(tf.shape(stacks_tensor, tf.int64)[0] + 2) 40 | 41 | # stacks = tf.Variable( 42 | # stacks_tensor, validate_shape=False, dtype=tf.string, shape=tf.TensorShape(None) 43 | # ) 44 | 45 | max_stack_size = 200 46 | stacks = tf.Variable(tf.zeros((max_stack_size, num_stacks - 1, 1), dtype=tf.string)) 47 | 48 | def initialize_stacks(): 49 | stacks.assign(tf.zeros_like(stacks)) 50 | 51 | # shape = tf.shape(stacks) 52 | # stacks.assign( 53 | # tf.reshape( 54 | # stacks, 55 | # [ 56 | # shape[0], 57 | # shape[1], 58 | # 1, 59 | # ], 60 | # ) 61 | # ) 62 | 63 | indices_x, indices_y = tf.meshgrid( 64 | tf.range(max_stack_size - tf.shape(stacks_tensor)[0], max_stack_size), 65 | tf.range(tf.shape(stacks_tensor)[1]), 66 | ) 67 | indices = tf.stack([indices_x, indices_y], axis=-1) 68 | 69 | updates = tf.expand_dims(tf.transpose(stacks_tensor), axis=2) 70 | stacks.assign(tf.tensor_scatter_nd_update(stacks, indices, updates)) 71 | 72 | initialize_stacks() 73 | 74 | num_elements = tf.lookup.experimental.MutableHashTable( 75 | tf.int64, tf.int64, default_value=-1 76 | ) 77 | 78 | def update_num_elements(): 79 | num_elements.insert( 80 | tf.range(num_stacks - 1), 81 | tf.squeeze( 82 | tf.reduce_sum(tf.cast(tf.not_equal(stacks, ""), tf.int64), axis=[0]) 83 | ), 84 | ) 85 | 86 | update_num_elements() 87 | 88 | one_at_a_time = tf.Variable(True) 89 | 90 | # move 1 from 2 to 1 91 | def move(line): 92 | amount = tf.strings.to_number( 93 | tf.strings.regex_replace( 94 | tf.strings.regex_replace(line, "move ", ""), r" from \d* to \d*$", "" 95 | ), 96 | tf.int64, 97 | ) 98 | 99 | source_dest = tf.strings.regex_replace(line, r"move \d* from ", "") 100 | source = ( 101 | tf.strings.to_number( 102 | tf.strings.regex_replace(source_dest, r" to \d*$", ""), tf.int64 103 | ) 104 | - 1 105 | ) 106 | 107 | dest = ( 108 | tf.strings.to_number( 109 | tf.strings.regex_replace(source_dest, r"\d* to ", ""), tf.int64 110 | ) 111 | - 1 112 | ) 113 | 114 | num_element_source = num_elements.lookup([source])[0] 115 | top = max_stack_size - num_element_source 116 | 117 | read = stacks[top : top + amount, source] 118 | 119 | # remove from source 120 | indices_x, indices_y = tf.meshgrid(tf.range(top, top + amount), [source]) 121 | indices = tf.reshape(tf.stack([indices_x, indices_y], axis=-1), (-1, 2)) 122 | updates = tf.reshape(tf.repeat("", amount), (-1, 1)) 123 | 124 | stacks.assign( 125 | tf.tensor_scatter_nd_update(stacks, indices, updates), use_locking=True 126 | ) 127 | 128 | num_element_dest = num_elements.lookup([dest])[0] 129 | top = max_stack_size - num_element_dest - 1 130 | 131 | # one a at a time -> reverse 132 | if one_at_a_time: 133 | insert = tf.reverse(read, axis=[0]) 134 | insert = tf.reshape(insert, (-1, 1)) 135 | else: 136 | insert = tf.reshape(read, (-1, 1)) 137 | 138 | indices_x, indices_y = tf.meshgrid(tf.range(top - amount + 1, top + 1), [dest]) 139 | indices = tf.reshape(tf.stack([indices_x, indices_y], axis=-1), (-1, 2)) 140 | 141 | stacks.assign( 142 | tf.tensor_scatter_nd_update(stacks, indices, insert), use_locking=True 143 | ) 144 | 145 | update_num_elements() 146 | return stacks 147 | 148 | tf.print("part 1") 149 | play = moves_dataset.map(move) 150 | 151 | list(play) 152 | 153 | indices_x = tf.range(num_stacks - 1) 154 | indices_y = max_stack_size - tf.reverse(num_elements.export()[1], axis=[0]) 155 | 156 | indices = tf.reshape(tf.stack([indices_y, indices_x], axis=-1), (-1, 2)) 157 | 158 | tf.print(tf.strings.join(tf.squeeze(tf.gather_nd(stacks, indices)), "")) 159 | 160 | tf.print("part 2") 161 | initialize_stacks() 162 | update_num_elements() 163 | one_at_a_time.assign(False) 164 | play = moves_dataset.map(move) 165 | list(play) 166 | 167 | indices_x = tf.range(num_stacks - 1) 168 | indices_y = max_stack_size - tf.reverse(num_elements.export()[1], axis=[0]) 169 | 170 | indices = tf.reshape(tf.stack([indices_y, indices_x], axis=-1), (-1, 2)) 171 | 172 | tf.print(tf.strings.join(tf.squeeze(tf.gather_nd(stacks, indices)), "")) 173 | 174 | return 0 175 | 176 | 177 | if __name__ == "__main__": 178 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 179 | sys.exit(main(INPUT)) 180 | -------------------------------------------------------------------------------- /2022/6/fake: -------------------------------------------------------------------------------- 1 | mjqjpqmgbljsphdztnvjfqwrcgsmlb 2 | bvwbjplbgvbhsrlpgdmjqwftvncz 3 | nppdvjthqldpwncqszvftbrmjlhg 4 | nznrnfrfntjfmvfwmzdfjlvtqnbhcprsg 5 | zcfzfwzzqfrljwzlrfnpqdbhtmscgvjw 6 | -------------------------------------------------------------------------------- /2022/6/input: -------------------------------------------------------------------------------- 1 | llqnqffqsqttfffbcfcbcbdcczccfssvwswrwddzlddpdhdwwlvlffjllnjjwjqwjjttwbwcwfccdmmnddgvvpwvvgsshnshsgglljfjzjpjfpfjpplddjcchdhvhlhvllvflfbllsdllgppwjjprjpjrrdwrdrggjvjppgbgttdppwhhcshsvvgpvggsllstsggdjdmjjrvjjszjsjbbsffjwjnwwzjjjvqvfftbffbpffndfdzfdfvdfdggmpmbbwgbgnnbtnnnhggdmdffrqrlrhrzzrmzzmbzzcdcwwzffsrrnfnvfnnvppwjjndjnndtdppgcppsmppljlpjjmlldlsltlglwgwcwnwvwddzrrllwjjnvjvwvppjssncnfcnfcfcczfccpjphjphjjjsgszzhthghjhrjrbrtrjrhrsrfftfzftfmmwmpmgghbggjrrsdswddtjjvnnrwrzrpzzlglwggrnrgrfftnffwwgllrqqzbqbbtltbbgdgpgphggspggplggmcmscsffzcfzzbggdrgrqgrrnlrnrbnnzsnnzcctvvnvwvnwnhhwpwtptllpflfcfttwtjjhwjhhbwhbbtppwhwvhvghvhphpwwcgwwhbbfvbffzpzlllrzlrrbnnrngrnrpnnsszbbqffpsffhfshfhzzqhhcgcgfggzmmdllthhrhnrrwggdqdsstccqllflmflfddjwjzjffvjjfgjgdgbdgdngnpgpnpffsnsjnnbbjdbjbtbmmbrrlbbqmqpqrprjjrbbvnbbzvvcwwlfwfggmhhdhsdhsdhshhqfhfrhhqlqttffpmmjzjqjggqzzdfzflfsllshhvjvfvbfvbbjljhhzrzqqszqzsqqswswbsbzszgzdgzzhjzhhvffhthvtthltthghzhvvjttczttlssvvgjjmsjstjjrfjjhbjbnjbjddqrddnbdnbnwbnbqbmqqgtgqtttcmmqbqrrgrrsrszssvpsvvjqjttjpjwwmwfwttczttgccwhcwwrzwzbwwqbqmqnmqqnfnmmmzdmzmpmssdpsslbbmgmbmlmnlldlccvzzlrzzqbqfqlflwlvlhhtrtcttgnnqhnqqtjjphjhwjhwhpwwvdvfddmndncnppcffhllfvfdfllhgslvtsqhtlfdflcjfmqbnctnfnwqrlqbzrcbvldrffcptsgslqcszqcfdvtpggvdqblwcgmdjqrpjdhtrmvrfrzznspqlfhnjsppbpjdggcwjwprpnlnntgfgmflctqphdmzfvpzzmbzmvrqdgchzmdvjdzmfsslpqvhpgznmpspjpdmlfwwjbbwqbfthghclldpmnsbcwlzswrsnfzbdzpcnrrpspdpfqhvmtfjlppqtphvzzqrwhzccnrgrtgfbfgtwvlwsmcvzmqmhsvztmmvpjzfwzgfwntbrsfthdgrcmgtdsvzcllmcshrlqldrvrnmdgbwttmhczvscrdvfgdvrhfvlghhsfbmrptbwmpnvtsrjlpjlbmmjzwwzbdtjlqqdczqgpzfjslccrcrblhplndblghchczbjjfzlsvvrqhvgdsncgpjhjlprhfhswwbmrnszqzhhlrbqpphvgtfsgmdpjwgcmqnvfdhrqmbspjpdrtdbqnbmbpgqwgmltqwrjprvsfjsmpldcqqbvmfhgzltzfvhlnfdqrphzzjrbdvnnjspvnlnnsdzvgqsqztndjpmnbqtwnpzmmfhsswwnnwwlbnpgbrhzchbnsrwwpprhntngsjzvssttqwfvjrdddtfpgtqqzcwljzmdjtgzdqjjvbqgdttdgvqvlfdsgcjhsmdmwrwdcqdflpfjbfzsvjrzrhhcnvcjblwcdvtbgfhfgcwrcjsrzcdrfwtvdqrghdtrjgdmhrfcsnwwwdpvjtpzdqfgrlmrqscjbfgdbgvflhvdjmnmslvsbcbgwplgqljmlzpgrfjwmvqfwmwrhnmdjhdwgjrngvccrbzmhcqthvvtdtmfqvfczhqbfgzgrmdtprznfzjtrcwqgztchtdmzmnwbfbnbttbvzsflcpsjshgphfdlvhdrcpsqnhjjggbnsqrfpwsdznzcwjbcswwndzbpdnfcbdrfgrmqzvtjttltbntznmqfsmqlgqvlqnrvgrnggslqhbplmgpzwlfzbvwdvrchsnhrnvgmzjdprvvspltcdzmdnlgtmrwnwpdndpdqjltcnmsggrvbprslqhfgmzqtppdpsjcmmbvfgmbpdnwdcgnssfgjhzhrjljdwhrzznscndgbscdmbbtbrnzbqzvcjgjgljbjlrrvdhjdllsnjzhwlmjslghrqplwjwssbzzpdzdfhhsqctlcddnfnnvbcwpdvzdcsgcqpctsjtdtnzpggpzsrrhfjtthqcqhtvwzltbdvdnbgwlppblwzjsqqbcpcrthhrhdnzhdnflqlvbzmcjfcrbmgdgqptfqfbmlfbblqdfmnwgvbdhmcmtmvtggqstjpwhvzjhbgpblmdrnggvrvphbglqgfcphmrgfmrwcdchtwfllqwsnbqttwdcvrwgzjfztmcffppqtmnwpgcrgwtjbdtjlmnpmvlzndljglzblwdrggqvbbfvqcbcbpqttrmqlcqnqvrfqsnlpmwlcgfwfcqpgmszfccbqtcqfwlwqrjjhrdbjqvdmfzjgncjqgqbthpgjgbfdvltbhpnbjqqwrsczrthfhmlzjjjgsjtsvgmwfsjngzfqdqzfhvwjrswvnqvsvvsjdbwlwdcsszdngmmhnnqsgvsrvpnndghrwgzztqczvhcrzdpqtrmrnfsfrlpdnbbtshfhplzqvdvzdvwhwsbpnbzlvcbgptdszjlcgfdzchjcsvhzdljvgpwstzwnssvhztcptnhslggnrschvfnmhcnjvldthtfpqzdvltfgnmtgvlrljhwqdzqfmfblstvfnpfcdsqslrqbztrbfzmsfjtjwhlzfnhrvpfqfqvtdllrvchmqphgljwcspgpwsdwqfdhsqhsflpbcbjjmjrfjrqrqfqcqzqsqcnqhfgsclfnfzblfdhphrvqdpvcqmllrcdnrlwqbrgqsbfqqllcvmglntjwcsjljgntmmldscndfdjcqpwbqpbmfjsgwfwcqbqbbhhgprlbzmvdfjcsmsqvhfhmgrhnwpslztmwbhdgrfzfcmwjswpbpzwstfbfmgwtprmptzjwtrqthrqwgslnmtlfgnvgpwvsfwthtrgwfbnnnwmdcfrpqqztplscvfnfpfwwdnfnzjccnhswwlcrrdqfhvsrnvcdrwmjswzggscplggbwgndsbntqvtrjbmbzrnbbmdjvwrmmtrmfjjhnvrcjcbqlhlthbvtjjczddblbbttmmzgdqmtdqswjdwbjhsrjbvdtqzqdbhhgbttgmgwfgfpczpqpfsddgslltwsvngwbwfbfcdzlqghwdbfzzldjpwpmpjmslwnwbrjjvwcsjgdzjwrrwnvgvrqlgjhwvrgnczspfplhfbtdpbpfqmhbvmcqdgrrjfslzgsqfpwrrrmjdtgbslddwvddrbmrdsdhhnlwsncrmnglrrpvtbrfvjbdmcpgphcdfwnfcglvmlbslttpmjnspqhnmbcqgmncfjjpdfjqhggnswbgppjhllscrvtmtmmbwbpgddtzblscntrmccdpzdnllqpvfdpfpwwvnnbjlzphvqwffwsjmbtllctrjmllwscmldcdrpfrzrqlpwbjwfgmnshzqzgdjqhcwtsqlsjffvzcpnrzmvtlzlgwvrrjtdbcnddbhjgqqzrvhplrbsrwgscjnfmhbcnpdcjqrltgdzzzzbqtsspbcdssbjrzfqdgvhmgdzsjdsqcfwbgrnhrlzgpjmhctqdccmvqzddmcptsjgtfshprqmslvtmtrprfsngrnnpnrccrvnrvcwzrbbnbghlwvcncgzglnqthchhsnzlfrcggdptvwlrbnfwgjpflgrcfzhhgffwcbhwlsdmvmsvvzvdcrlvlnstgz 2 | -------------------------------------------------------------------------------- /2022/6/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/6 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | 20 | chars = tf.convert_to_tensor( 21 | next( 22 | dataset.map(lambda line: tf.strings.bytes_split(line)) 23 | .take(1) 24 | .as_numpy_iterator() 25 | ) 26 | ) 27 | 28 | tf.print(chars) 29 | 30 | dataset = tf.data.Dataset.from_tensors(tf.reshape(chars, [-1, 1])).unbatch() 31 | 32 | interleaved = tf.data.Dataset.range(4).interleave( 33 | lambda offset: dataset.skip(offset).batch(4), 34 | cycle_length=4, 35 | block_length=1, 36 | num_parallel_calls=4, 37 | deterministic=True, 38 | ) 39 | 40 | for count, b in enumerate(interleaved): 41 | y, _ = tf.unique(tf.reshape(b, -1)) 42 | if tf.equal(tf.shape(y)[0], 4): 43 | tf.print(y) 44 | # 1: starts from 0 45 | # 3: the remaining chars in the sequence 46 | tf.print("unique found at char: ", count + 4) 47 | break 48 | 49 | # identical, just range over 14 interleaved datasets with 50 | # a batch of 14 51 | interleaved = tf.data.Dataset.range(14).interleave( 52 | lambda offset: dataset.skip(offset).batch(14), 53 | cycle_length=14, 54 | block_length=1, 55 | num_parallel_calls=14, 56 | deterministic=True, 57 | ) 58 | 59 | for count, b in enumerate(interleaved): 60 | y, _ = tf.unique(tf.reshape(b, -1)) 61 | if tf.equal(tf.shape(y)[0], 14): 62 | tf.print(y) 63 | # 1: starts from 0 64 | # 13: the remaining chars in the sequence 65 | tf.print("unique 14 chars found after reading : ", count + 14, " chars") 66 | break 67 | 68 | return 0 69 | 70 | 71 | if __name__ == "__main__": 72 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 73 | sys.exit(main(INPUT)) 74 | -------------------------------------------------------------------------------- /2022/7/fake: -------------------------------------------------------------------------------- 1 | $ cd / 2 | $ ls 3 | dir a 4 | 14848514 b.txt 5 | 8504156 c.dat 6 | dir d 7 | $ cd a 8 | $ ls 9 | dir e 10 | 29116 f 11 | 2557 g 12 | 62596 h.lst 13 | $ cd e 14 | $ ls 15 | 584 i 16 | $ cd .. 17 | $ cd .. 18 | $ cd d 19 | $ ls 20 | 4060174 j 21 | 8033020 d.log 22 | 5626152 d.ext 23 | 7214296 k 24 | -------------------------------------------------------------------------------- /2022/7/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/7 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | dataset = dataset.concatenate(tf.data.Dataset.from_tensors(tf.constant("$ cd /"))) 20 | 21 | def func(old_state, line): 22 | # tf.print("line: ", line) 23 | is_command = tf.strings.regex_full_match(line, r"^\$.*") 24 | new_state = old_state 25 | if is_command: 26 | if tf.strings.regex_full_match(line, r"\$ cd .*"): 27 | dest = tf.strings.split([line], " ")[0][-1] 28 | if tf.equal(dest, "/"): 29 | new_state = tf.constant("/ 0") 30 | else: 31 | old_path = tf.strings.split([old_state], " ")[0][0] 32 | new_state = tf.strings.join( 33 | [tf.strings.join([old_path, dest], "/"), "0"], " " 34 | ) 35 | else: 36 | split = tf.strings.split([line], " ")[0] 37 | if tf.not_equal(split[0], "dir"): 38 | size = tf.strings.to_number(split[0], tf.int64) 39 | state_size = tf.strings.split([old_state], " ")[0] 40 | if tf.equal(tf.shape(state_size, tf.int64)[0], 1): 41 | old_size = tf.constant(0, tf.int64) 42 | else: 43 | old_size = tf.strings.to_number(state_size[1], tf.int64) 44 | 45 | partial_size = size + old_size 46 | new_state = tf.strings.join( 47 | [ 48 | tf.strings.split(old_state, " ")[0], 49 | tf.strings.as_string(partial_size), 50 | ], 51 | " ", 52 | ) 53 | 54 | if tf.not_equal(new_state, old_state): 55 | # output_value = tf.strings.to_number( 56 | # tf.strings.split([new_state], " ")[0][1], tf.int64 57 | # ) 58 | output_value = new_state 59 | else: 60 | # output_value = tf.constant(-1, tf.int64) 61 | output_value = tf.constant("") 62 | 63 | return new_state, output_value 64 | 65 | initial_state = tf.constant("/ 0") 66 | # ['', '/ 14848514', '/ 23352670', ...., '//a/e/../../d 17719346', '//a/e/../../d 24933642', '/ 0'] 67 | intermediate_dataset = dataset.scan(initial_state, func) 68 | 69 | filtered_dataset = intermediate_dataset.filter( 70 | lambda line: tf.strings.regex_full_match(line, "^.* \d*$") 71 | ).map(lambda line: tf.strings.regex_replace(line, r"\/\/", "/")) 72 | 73 | def gen(ds): 74 | def resolve(): 75 | for pair in ds: 76 | path, count = tf.strings.split([pair], " ")[0] 77 | path = Path(path.numpy().decode("utf-8")).resolve().as_posix() 78 | yield path, count.numpy().decode("utf-8") 79 | 80 | return resolve 81 | 82 | filtered_dataset = tf.data.Dataset.from_generator( 83 | gen(filtered_dataset), tf.string, output_shapes=[2] 84 | ) 85 | 86 | def mapper(old_state, pair): 87 | old_path = old_state[0] 88 | new_path = pair[0] 89 | output_value = tf.constant(["", ""]) 90 | if tf.logical_or( 91 | tf.equal(old_path, "fake_path"), tf.equal(new_path, "fake_path") 92 | ): 93 | output_value = tf.constant(["", ""]) 94 | elif tf.not_equal(old_path, new_path): 95 | output_value = old_state 96 | 97 | return pair, output_value 98 | 99 | initial_state = tf.constant(["fake_path", "-1"]) 100 | filtered_dataset = ( 101 | filtered_dataset.concatenate(tf.data.Dataset.from_tensors(initial_state)) 102 | .scan(initial_state, mapper) 103 | .filter( 104 | lambda pair: tf.logical_and( 105 | tf.greater(tf.strings.length(pair[0]), 0), tf.not_equal(pair[1], "0") 106 | ) 107 | ) 108 | ) 109 | x = list(filtered_dataset.as_numpy_iterator()) 110 | print(x) 111 | 112 | lut = tf.lookup.experimental.MutableHashTable(tf.string, tf.int64, default_value=0) 113 | for pair in filtered_dataset: 114 | path, value = pair[0], tf.strings.to_number(pair[1], tf.int64) 115 | parts = tf.strings.split(path, "/") 116 | tf.print(parts) 117 | if tf.logical_and(tf.equal(parts[0], parts[1]), tf.equal(parts[0], "")): 118 | keys = ["/"] 119 | old = lut.lookup(keys)[0] 120 | new = old + value 121 | lut.insert(keys, [new]) 122 | else: 123 | for idx, part in enumerate(parts): 124 | if tf.equal(part, ""): 125 | keys = ["/"] 126 | else: 127 | tf.print("parts: ", parts[1 : idx + 1]) 128 | l = [tf.constant("")] + parts[1 : idx + 1] 129 | tf.print(l) 130 | j = tf.strings.join(l, "/") 131 | tf.print("j:", j) 132 | keys = [j] 133 | # tf.print("k: ", keys) 134 | old = lut.lookup(keys)[0] 135 | # tf.print("old: ", old) 136 | new = old + value 137 | # tf.print("new: ", new) 138 | lut.insert(keys, [new]) 139 | 140 | paths, sizes = lut.export() 141 | print(paths, sizes) 142 | tf.print( 143 | "part 1: ", 144 | tf.reduce_sum(tf.gather(sizes, tf.where(tf.math.less_equal(sizes, 100000)))), 145 | ) 146 | 147 | update_size = 30000000 148 | free_space = 70000000 - lut.lookup("/") 149 | required_space = update_size - free_space 150 | tf.print(required_space) 151 | 152 | big_enough = tf.gather( 153 | sizes, tf.where(tf.math.greater_equal(sizes - required_space, 0)) 154 | ) 155 | tf.print("part 2: ", tf.gather(big_enough, tf.math.argmin(big_enough, axis=0))) 156 | return 0 157 | 158 | 159 | if __name__ == "__main__": 160 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 161 | sys.exit(main(INPUT)) 162 | -------------------------------------------------------------------------------- /2022/8/fake: -------------------------------------------------------------------------------- 1 | 30373 2 | 25512 3 | 65332 4 | 33549 5 | 35390 6 | -------------------------------------------------------------------------------- /2022/8/input: -------------------------------------------------------------------------------- 1 | 003112220410413101104044022234320204233341435252223642044225451531421012104343030211442433410302111 2 | 301233004003313130222434121135033231250505241131342032404032560542233000343455552123100410402211201 3 | 111301041221333142533352050250154136146324550411565615444115604102531135302000320033233340431313123 4 | 011312210420442043155233305201305643445224334303310253225205601265233454400214114322131420224022313 5 | 130102441231200141254202121022423405224443210463250415204410624313613034320040015223211432442333110 6 | 133121132322223054104323242043651144066341346000104210124535555236324451132555525220523220023202433 7 | 110011123024113203143145243605143331512223606564503661350336662505131254503242354031400131012444222 8 | 221400422202335053520044325014041432662161415523526711633662600635304000112322014001533351130303321 9 | 030222020044115331004065013253364503664435753416641653716424535324654054023321025154331103034342414 10 | 030331241153233040140314112524504535172167445223426653152774166352145410064425434012002122431142343 11 | 224111232453145423354550035141103644127571711431336236226321752314754510214316215104550522141301020 12 | 424321022432003551434012134531644165753146143232242275633762323631713541330463531053004424012234010 13 | 221233440041435131321565604060121637542135243721227576264551457171313165211546132314103242442012133 14 | 301320232222024200651041405631257577625236256225367443317773421262762172454463051224654015452230401 15 | 124314451244222611016300517414362722551374735555353871173242751564427674344212032442221554524035432 16 | 000210412345203005364660077252742673773341654558568632445755573574757433452636410261362004114032013 17 | 333130514223252440150314572132443233363783556666363566733363262631651361523573335361463232143300044 18 | 400403124212626240015541437753711632635435857234865883574647337436376563235264524230225211320425534 19 | 430312520241033613150127256341412352564548243842255622454762832274857163157242366022041262252034150 20 | 125211521340246341461723355443362487532437274863758283488567453538667642367654751321503343220351304 21 | 015045453331430626273664314125778234645658386774837783464366334264476243327176571544650404453253132 22 | 354034323663213666746174754737374552772744668787244468236435632534753452366177147233020313054042345 23 | 305241054630156250416544221136444247327848337737584777837244467756426322644474332262330430251551212 24 | 100144030632532041231474166555527386344645548655948476835366757558682728623657547471260405014131510 25 | 155501551351164622444476154658383338677369856795967346765977585753438378656352723127554031451225340 26 | 213252046404053134327512554784653544697439375473948735593644477348334755453317243676612665133404310 27 | 022440140420166534174335264388542568833984445757393864997986883894574577853347367415453352646352541 28 | 303425632364032563633467847288735478557676734393879573848466463466672455482853676764477400452414425 29 | 421140260266576264621526686828328833799894373434775335763857585383998562862724435337211515426106224 30 | 251234063153345361761483764445395779384568938899876568577359785776678952473247274437226445141011143 31 | 124165362100637756715727365876646574835535677777467785479666667584536895483528343623556220650044411 32 | 523533116461715454347526758528694839737349689796485884675949699354664766465728468324363766204426020 33 | 011655232111314171647378426863359767797699886798595874554677869353593448485338287251255363024600501 34 | 201454444103773472628888556987787587788889498545597569965587748496753787964648276727241677205552530 35 | 022050015673621733475438774338498584975649879876857968775687577968447843939528275674512475556245535 36 | 415145246426622536566872225778388854879897849855475687687595454675468637795674534385355242230005213 37 | 515410462156777244275234668693874665788796767699675757959578987749863457937753862877526274731341533 38 | 532441044352764132482273798897585496755464759898898996955476545867443535986538526852746625533463346 39 | 042116466642255478273735578633656564997786577695957789988869999965866866547392358543326526744105124 40 | 305463642721567264236826357995536786844968968785757997557699474697669835738584868845834527260231552 41 | 066231251534151145564875937345768799895756579656987695996797768779596848379994862646436522463360266 42 | 506513057363414584637639439854956668854878865987777598756576697549657897644947484484534264535522136 43 | 131666244551736787837659743739478747677995596779697659755568679946768553647438363837764527562663652 44 | 424404413335566473356634745356959967996577965786978695878778775749964899347635788855331715645156443 45 | 366410447316422733452686338845457787759555776998978979667795556665499755678449775626855353212651324 46 | 302136251253517685366593955978666557656668675898896669776695986549696689467499828826265743355612425 47 | 546506521534743584874443398869667685788797956777788696997955989646664668879679457843885333126334344 48 | 362262154313747445367249673496655464765879658679769897687679766789496785648597346877862642316202103 49 | 026226125354532563527338467655476495659657769687899897897899856576869585587683926347427571714351012 50 | 324246031677535478765836973778855888768885696676889798687785885666785975868394627754287365751563132 51 | 031351623136767474878299634934847976977866799698978866969865695657475657893553946224747277645245120 52 | 026056143144572747422538333594694756756655696798979976879958687957995676939686658475475623336500612 53 | 662533125413523284737634669357979456995895856799797767679899585888899799675553357648455413641663031 54 | 014206153225663545455694774965848955686699676678777689996997596965668976646983688667633463335404141 55 | 413012617325174456878669973969459678955956597997687976869576778966464554568469886777721516427616513 56 | 502662203642612255238447486939957458967857897679766997696655868558575999589335922456844736112131654 57 | 212661502516734223324889447354689969855787756889968698877896757975877697383989627364337421772343024 58 | 062105566125733258245733457473775547779958796868868895787767755559647545379345865238624356123242000 59 | 402335614754636628236824679555597465548989986755969565969678898559987586856955673832271674166105362 60 | 451334645753376162243335579543677879955598768556565759756869877876795947966569855573232735534601241 61 | 500262052615557247375888358635954578969966856685689678868986899965887995757987425824514566556143415 62 | 025221464647151347477323745438738647477798995699788989796688598687779795846437577837865212462025305 63 | 506261641542645445623578634938859695857548567987658895685668887664467397754563764284864222243613255 64 | 544001251632514343434242438697698767785468785559797877757559476976744589553445545348132556324315401 65 | 312662335054751647576346847944998349986995969646958568675577475645539379875732364426714676752004534 66 | 442336642122316123757636234859863735657746598877646757975758669549947438746824526434264347133600103 67 | 110222605363451462773826376864836593598856459788567868745977445869448964569655348846536363204056350 68 | 000101232124176476234372634789857987399659768658796566947966644948468844584756623732364133065443412 69 | 134465433226374477324823357757785535937975665949586889647565456766846389552458234767667646466124353 70 | 421406224300535763513347544755878674693949665984559799965894778876384773264625738631365460212615513 71 | 452351220036426717772576544574486575445936589965659469684553983864958887588836456352652752044465624 72 | 513131310021516677236857346343788436737376979889979498674877368855367664436225742117611411543534241 73 | 025341550553445563557374338552585545787433393754685485676457956894674542662544752536711135543323540 74 | 535152616216423251726174753447757785566344854848638858756838495369785886445488572266324260520452211 75 | 040010242452167571763413223445374499838658479769797468459895353937866635767567612446231533425561512 76 | 540233435611063354343623584674548775576735445736954663985773597646587532834332126771664204465150223 77 | 115125460640513624264577427672484376565394997956949478368583668555354324253116433334364541633501141 78 | 325235035031330065764773672233563447278366899833655673943396323666327273534372213517324021336301134 79 | 210455052102126060367721574443448283734742249765698995536842573866568587565571317173455040136014133 80 | 130303041444343616551636147765524538863657233824485633222587838375574238775244261440413305250410213 81 | 215411311040450233266672334763832288583822422248882386376826865647256855122326112462046245434214110 82 | 230231555011111141243461222762213386584878775753548542777758687642766773174237314624160241043301041 83 | 323544455001621231632772663352437368846236258422868628887825466644733647626344222306513541033345051 84 | 103143140115330535101443674523313247636278477357763543634765673375252223123145511514215131041302240 85 | 232425305344242122243634141575125456216884578682284677823883878251534364736772005201664151453532422 86 | 102015414143426060302001452111353522415644252842283555326642135161755673127454553246636405351210230 87 | 434225550123100466121402111553624464277266354226553673463535217337233242261322143516321221202244233 88 | 241043514014314144343662453056766274716234347315115145214653247673432611160552306341450310405413111 89 | 242112143335044304622645260260424573275613637141734623226773527441742321265241643455134145215344111 90 | 111301110035433343532532505066367716542154757534155161173124736561116131435620565213531350550432142 91 | 242323242143224035106205224411500635634124323342513747342475634262646442514656422055112331122023144 92 | 002021344233114150010341663531513334522142253265466242366366534525551142525131661102314124401314042 93 | 024004104024242151413420354625126365606642452553232242411563750634302214411623615524332515123123121 94 | 322320201030004104054100060453523451362415465573527326430616603635233444523531105134335533102202414 95 | 013231044323002420215041311645066036204353535300445225661352044506065120312154410321400034312200320 96 | 031030413344440552442114452066553136026033443442226333515665300213012326102141034413121232242040400 97 | 133131302014414220215344004521556454235620502443013233464014064103264525100404050140223431001414000 98 | 221120341111243124154321401255010441026151433422443520262166251632146441120442553301443334201231200 99 | 021222222103332103301112520131442023663055232552042262505152050120510455441325453404132240001402111 100 | -------------------------------------------------------------------------------- /2022/8/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/8 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def main(input_path: Path) -> int: 16 | """entrypoint""" 17 | 18 | dataset = tf.data.TextLineDataset(input_path.as_posix()) 19 | dataset = dataset.map(lambda line: tf.strings.bytes_split(line)) 20 | dataset = dataset.map(lambda x: tf.strings.to_number(x, tf.int64)) 21 | 22 | grid = tf.Variable(list(dataset.as_numpy_iterator())) 23 | 24 | visibles = tf.Variable(0, dtype=tf.int64) 25 | # edges 26 | grid_shape = tf.shape(grid, tf.int64) 27 | visibles.assign_add(tf.reduce_sum(grid_shape * 2) - 4) 28 | 29 | # inner 30 | for col in tf.range(1, grid_shape[0] - 1): 31 | for row in tf.range(1, grid_shape[1] - 1): 32 | x = grid[col, row] 33 | 34 | visible_right = tf.reduce_all(x > grid[col, row + 1 :]) 35 | if visible_right: 36 | visibles.assign_add(1) 37 | continue 38 | visible_left = tf.reduce_all(x > grid[col, :row]) 39 | if visible_left: 40 | visibles.assign_add(1) 41 | continue 42 | 43 | visible_bottom = tf.reduce_all(x > grid[col + 1 :, row]) 44 | if visible_bottom: 45 | visibles.assign_add(1) 46 | continue 47 | visible_top = tf.reduce_all(x > grid[:col, row]) 48 | if visible_top: 49 | visibles.assign_add(1) 50 | continue 51 | 52 | tf.print("part 1: ", visibles) 53 | 54 | scenic_score = tf.Variable(0, tf.int64) # t * l * b * r 55 | t = tf.Variable(0, tf.int64) 56 | l = tf.Variable(0, tf.int64) 57 | b = tf.Variable(0, tf.int64) 58 | r = tf.Variable(0, tf.int64) 59 | for col in tf.range(1, grid_shape[0] - 1): 60 | for row in tf.range(1, grid_shape[1] - 1): 61 | x = grid[col, row] 62 | views = grid - x 63 | 64 | right = views[col, row + 1 :] 65 | # the loop is left to right 66 | left = tf.reverse(views[col, :row], axis=[0]) 67 | # the loop is bottom to top 68 | top = tf.reverse(views[:col, row], axis=[0]) 69 | bottom = views[col + 1 :, row] 70 | 71 | for tree in right: 72 | r.assign_add(1) 73 | if tf.greater_equal(tree, 0): 74 | break 75 | for tree in left: 76 | l.assign_add(1) 77 | if tf.greater_equal(tree, 0): 78 | break 79 | for tree in bottom: 80 | b.assign_add(1) 81 | if tf.greater_equal(tree, 0): 82 | break 83 | for tree in top: 84 | t.assign_add(1) 85 | if tf.greater_equal(tree, 0): 86 | break 87 | scenic_node = t * l * b * r 88 | if tf.greater(scenic_node, scenic_score): 89 | scenic_score.assign(scenic_node) 90 | r.assign(0) 91 | l.assign(0) 92 | t.assign(0) 93 | b.assign(0) 94 | 95 | tf.print("part 2: ", scenic_score) 96 | return 0 97 | 98 | 99 | if __name__ == "__main__": 100 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 101 | sys.exit(main(INPUT)) 102 | -------------------------------------------------------------------------------- /2022/9/fake: -------------------------------------------------------------------------------- 1 | R 4 2 | U 4 3 | L 3 4 | D 1 5 | R 4 6 | D 1 7 | L 5 8 | R 2 9 | -------------------------------------------------------------------------------- /2022/9/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Solution in pure TensorFlow to the puzzle 3 | 4 | https://adventofcode.com/2022/day/9 5 | 6 | of the Advent of Code 2022. 7 | """ 8 | 9 | import sys 10 | from pathlib import Path 11 | 12 | import tensorflow as tf 13 | 14 | 15 | def are_neigh(a, b): 16 | return tf.math.less_equal(tf.norm(a - b, ord=tf.experimental.numpy.inf), 1) 17 | 18 | 19 | # integers to naturals 20 | def to_natural(z): 21 | if tf.greater_equal(z, 0): 22 | return tf.cast(2 * z, tf.int64) 23 | return tf.cast(-2 * z - 1, tf.int64) 24 | 25 | 26 | def pairing_fn(i, j): 27 | # https://en.wikipedia.org/wiki/Pairing_function#Hopcroft_and_Ullman_pairing_function 28 | 29 | i, j = to_natural(i), to_natural(j) 30 | return (i + j) * (i + j + 1) // 2 + j 31 | 32 | 33 | def main(input_path: Path) -> int: 34 | """entrypoint""" 35 | 36 | dataset = ( 37 | tf.data.TextLineDataset(input_path.as_posix()) 38 | .map(lambda line: tf.strings.split(line, " ")) 39 | .map(lambda pair: (pair[0], tf.strings.to_number(pair[1], tf.int64))) 40 | ) 41 | pos = tf.lookup.experimental.MutableHashTable(tf.int64, tf.int64, (-1, 0, 0)) 42 | 43 | def get_play(nodes): 44 | rope = tf.Variable(tf.zeros((nodes, 2), tf.int64)) 45 | 46 | def play(direction, amount): 47 | 48 | sign = tf.constant(-1, tf.int64) 49 | if tf.logical_or(tf.equal(direction, "U"), tf.equal(direction, "R")): 50 | sign = tf.constant(1, tf.int64) 51 | 52 | axis = tf.constant((0, 1), tf.int64) 53 | if tf.logical_or(tf.equal(direction, "R"), tf.equal(direction, "L")): 54 | axis = tf.constant((1, 0), tf.int64) 55 | 56 | for _ in tf.range(amount): 57 | rope.assign(tf.tensor_scatter_nd_add(rope, [[0]], [sign * axis])) 58 | for i in tf.range(1, nodes): 59 | if tf.logical_not(are_neigh(rope[i - 1], rope[i])): 60 | distance = rope[i - 1] - rope[i] 61 | 62 | rope.assign( 63 | tf.tensor_scatter_nd_add( 64 | rope, [[i]], [tf.math.sign(distance)] 65 | ) 66 | ) 67 | 68 | if tf.equal(i, nodes - 1): 69 | mapped = pairing_fn(rope[i][0], rope[i][1]) 70 | info = pos.lookup([mapped])[0] 71 | visited, first_coord, second_coord = ( 72 | info[0], 73 | info[1], 74 | info[2], 75 | ) 76 | if tf.equal(visited, -1): 77 | # first time visited 78 | pos.insert( 79 | [mapped], 80 | [ 81 | tf.stack( 82 | [ 83 | tf.constant(1, tf.int64), 84 | rope[i][0], 85 | rope[i][1], 86 | ] 87 | ) 88 | ], 89 | ) 90 | 91 | return 0 92 | 93 | return play 94 | 95 | tf.print("Part 1: ") 96 | pos.insert([pairing_fn(0, 0)], [(1, 0, 0)]) 97 | list(dataset.map(get_play(2))) 98 | tail_positions = pos.export()[1] 99 | visited_count = tf.reduce_sum(tail_positions[:, 0]) 100 | tf.print(visited_count) 101 | 102 | tf.print("Part 2: ") 103 | pos.remove(pos.export()[0]) 104 | pos.insert([pairing_fn(0, 0)], [(1, 0, 0)]) 105 | list(dataset.map(get_play(10))) 106 | tail_positions = pos.export()[1] 107 | visited_count = tf.reduce_sum(tail_positions[:, 0]) 108 | tf.print(visited_count) 109 | 110 | return 0 111 | 112 | 113 | if __name__ == "__main__": 114 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 115 | sys.exit(main(INPUT)) 116 | -------------------------------------------------------------------------------- /2022/9/main_keras.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from pathlib import Path 3 | 4 | import tensorflow as tf 5 | from tensorflow.keras.layers import Conv2D 6 | 7 | 8 | def main(input_path: Path): 9 | # N is the size of the board for simulation and L is number of knots in the rope 10 | # Change 2 to 10 for solving part 2 11 | N, L = 512, 2 12 | # Common setting of the layers. 13 | common_kw = dict(padding="same", kernel_initializer=tf.keras.initializers.Zeros()) 14 | # Define the architecture of the model. Will fill the weight later. 15 | model = tf.keras.models.Sequential() 16 | # The input has 1+L channels of size NxN. The first channel is used for storing visited/unvisited information. 17 | # The rest L channels are used for representing the position of each knot. 18 | # The first convolution layer simulates head movement 19 | model.add( 20 | Conv2D( 21 | filters=1 + L, 22 | kernel_size=3, 23 | input_shape=(N, N, 1 + L), 24 | use_bias=False, 25 | **common_kw 26 | ) 27 | ) 28 | # For each non-head knot, use the following two layers to simulate the movement of the knot. 29 | for i in range(1, L): 30 | # the additional 9 channels corresponds 9 different patterns for non-head knot movement. 31 | model.add( 32 | Conv2D( 33 | filters=1 + L + 9, 34 | kernel_size=3, 35 | activation="relu", 36 | bias_initializer=tf.keras.initializers.Constant(-1), 37 | **common_kw 38 | ) 39 | ) 40 | # This convolution layer collects the 9 different situations and update the knot into the new position. 41 | model.add( 42 | Conv2D( 43 | filters=1 + L, 44 | kernel_size=1, 45 | activation="relu", 46 | use_bias=False, 47 | **common_kw 48 | ) 49 | ) 50 | 51 | # %% Fill weights in the layers 52 | # For the head knot, the construction is straightforward. For example, 53 | # a 3x3 convolution kernel `[[0 0 0] [1 0 0] [0 0 0]]` can move the knot one step right. 54 | # We will rotate the kernel on the fly when we want to move the head to another direction. 55 | # This looks a bit like cheating. We choose this approach so that we can focus on explaining the main idea. 56 | # It is not difficult to construct a conditional network to avoid switching the kernel for different directions. 57 | head_move = model.layers[0] 58 | (head_W,) = head_move.get_weights() 59 | for i in range(1 + L): 60 | head_W[1, 1, i, i] = 1 # copy all 61 | head_W[:, :, 1, 1] = 0 62 | head_W[1, 0, 1, 1] = 1 63 | head_WT = head_W.transpose((3, 0, 1, 2)) # prepare for rotation 64 | 65 | # The following layers are for non-head knots movement 66 | for i in range(1, L): 67 | # there are a 'move layer' and a 'collect layer' for each knot. 68 | move, collect = model.layers[i * 2 - 1 : i * 2 + 1] 69 | W, b = move.get_weights() 70 | # First 1+L channels are unmodified. 71 | for t in range(1 + L): 72 | W[1, 1, t, t] = 2 # copy all, note that b=-1, so 1=2*1-1 unchanged. 73 | # The new position of knot j=i+1 depends on the current position of knot i, and knot j. 74 | j = i + 1 # knot j follows knot i 75 | # If knot i is adjacent to knot j(taxicab distance<=1), then knot j stays the same position. 76 | W[:, :, i, 1 + L] = W[1, 1, j, 1 + L] = 1 77 | # the following kernels will match patterns like 78 | # X X X 79 | # _ _ _ 80 | # _ i _ 81 | # where knot j is at one of the X position and knot j is expected to moved to the center position. 82 | for n, k in enumerate([0, 2]): 83 | W[:, k, j, 1 + L + 1 + n] = W[1, 2 - k, i, 1 + L + 1 + n] = 1 84 | W[k, :, j, 1 + L + 3 + n] = W[2 - k, 1, i, 1 + L + 3 + n] = 1 85 | # the following kernels match the patterns like 86 | # j _ _ 87 | # _ _ _ 88 | # _ _ i 89 | # knot j is expected to moved to the center position. 90 | for n, (y, x) in enumerate(zip([0, 0, 2, 2], [0, 2, 0, 2])): 91 | W[y, x, j, 1 + L + 5 + n] = W[2 - y, 2 - x, i, 1 + L + 5 + n] = 1 92 | move.set_weights([W, b]) 93 | # The collect layer collect the results matched by above patterns 94 | (W,) = collect.get_weights() 95 | # Copy the first 1+L channels, except channel j for knot j. 96 | for t in range(1 + L): 97 | W[..., t, t] = 1 # copy 98 | W[..., j, j] = 0 99 | # For channel j, sum up the last 9 channels. There will be exactly one position has value 1, and rest of the position are all 0. 100 | W[..., 1 + L :, j] = 1 # collect moves 101 | collect.set_weights([W]) 102 | # For the last layer, also collect the position of the tail. 0 represents and 1 represent unvisited. 103 | # Because the non-linear function is relu, it will clip the negative values into 0. 104 | W[..., 1 + L :, 0] = -1 # collect unvisited 105 | collect.set_weights([W]) 106 | 107 | # %% run the simulation 108 | state = tf.zeros((1, N, N, 1 + L), dtype=tf.float32).numpy() 109 | # Starts with every knot at the center position 110 | state[0, N // 2, N // 2, :] = 1 111 | # Every position is marked as unvisited. 112 | state[..., 0] = 1 - state[..., 0] 113 | 114 | for n, line in enumerate(open(input_path.as_posix()).read().splitlines()): 115 | tf.print(n, line) 116 | direction, num = line.split(" ") 117 | # Rotate the kernel of the first layer according to the direction. 118 | angle = {"R": 0, "U": 1, "L": 2, "D": 3}[direction] 119 | head_move.set_weights( 120 | [tf.transpose(tf.image.rot90(head_WT, angle), (1, 2, 3, 0))] 121 | ) 122 | # Simulate the movement num times 123 | for i in range(int(num)): 124 | state = model(state) 125 | # Count visited positions. 126 | print(state[..., 0]) 127 | print("Ans:", int(tf.reduce_sum(1 - state[..., 0]))) 128 | return 0 129 | 130 | 131 | if __name__ == "__main__": 132 | INPUT: Path = Path(sys.argv[1] if len(sys.argv) > 1 else "fake") 133 | sys.exit(main(INPUT)) 134 | -------------------------------------------------------------------------------- /2022/requirements.in: -------------------------------------------------------------------------------- 1 | tensorflow 2 | -------------------------------------------------------------------------------- /2022/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with python 3.10 3 | # To update, run: 4 | # 5 | # pip-compile requirements.in 6 | # 7 | absl-py==1.3.0 8 | # via 9 | # tensorboard 10 | # tensorflow 11 | astunparse==1.6.3 12 | # via tensorflow 13 | cachetools==5.2.0 14 | # via google-auth 15 | certifi==2022.9.24 16 | # via requests 17 | charset-normalizer==2.1.1 18 | # via requests 19 | flatbuffers==22.11.23 20 | # via tensorflow 21 | gast==0.4.0 22 | # via tensorflow 23 | google-auth==2.15.0 24 | # via 25 | # google-auth-oauthlib 26 | # tensorboard 27 | google-auth-oauthlib==0.4.6 28 | # via tensorboard 29 | google-pasta==0.2.0 30 | # via tensorflow 31 | grpcio==1.51.1 32 | # via 33 | # tensorboard 34 | # tensorflow 35 | h5py==3.7.0 36 | # via tensorflow 37 | idna==3.4 38 | # via requests 39 | keras==2.11.0 40 | # via tensorflow 41 | libclang==14.0.6 42 | # via tensorflow 43 | markdown==3.4.1 44 | # via tensorboard 45 | markupsafe==2.1.1 46 | # via werkzeug 47 | numpy==1.23.5 48 | # via 49 | # h5py 50 | # opt-einsum 51 | # tensorboard 52 | # tensorflow 53 | oauthlib==3.2.2 54 | # via requests-oauthlib 55 | opt-einsum==3.3.0 56 | # via tensorflow 57 | packaging==21.3 58 | # via tensorflow 59 | protobuf==3.19.6 60 | # via 61 | # tensorboard 62 | # tensorflow 63 | pyasn1==0.4.8 64 | # via 65 | # pyasn1-modules 66 | # rsa 67 | pyasn1-modules==0.2.8 68 | # via google-auth 69 | pyparsing==3.0.9 70 | # via packaging 71 | requests==2.28.1 72 | # via 73 | # requests-oauthlib 74 | # tensorboard 75 | requests-oauthlib==1.3.1 76 | # via google-auth-oauthlib 77 | rsa==4.9 78 | # via google-auth 79 | six==1.16.0 80 | # via 81 | # astunparse 82 | # google-auth 83 | # google-pasta 84 | # tensorflow 85 | tensorboard==2.11.0 86 | # via tensorflow 87 | tensorboard-data-server==0.6.1 88 | # via tensorboard 89 | tensorboard-plugin-wit==1.8.1 90 | # via tensorboard 91 | tensorflow==2.11.0 92 | # via -r requirements.in 93 | tensorflow-estimator==2.11.0 94 | # via tensorflow 95 | tensorflow-io-gcs-filesystem==0.28.0 96 | # via tensorflow 97 | termcolor==2.1.1 98 | # via tensorflow 99 | typing-extensions==4.4.0 100 | # via tensorflow 101 | urllib3==1.26.13 102 | # via requests 103 | werkzeug==2.2.2 104 | # via tensorboard 105 | wheel==0.38.4 106 | # via 107 | # astunparse 108 | # tensorboard 109 | wrapt==1.14.1 110 | # via tensorflow 111 | 112 | # The following packages are considered to be unsafe in a requirements file: 113 | # setuptools 114 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TensorFlow Advent of Code 2 | --------------------------------------------------------------------------------