feat: days 1 2 3 done with duckdb

This commit is contained in:
Xavier Morel
2025-12-03 17:00:56 +01:00
commit 687b84d9a7
7 changed files with 461 additions and 0 deletions

5
.envrc Normal file
View File

@@ -0,0 +1,5 @@
export DIRENV_WARN_TIMEOUT=20s
eval "$(devenv direnvrc)"
use flake --no-pure-eval

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
.devenv/
.direnv/
inputs/

225
flake.lock generated Normal file
View File

@@ -0,0 +1,225 @@
{
"nodes": {
"cachix": {
"inputs": {
"devenv": [
"devenv"
],
"flake-compat": [
"devenv",
"flake-compat"
],
"git-hooks": [
"devenv",
"git-hooks"
],
"nixpkgs": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1760971495,
"narHash": "sha256-IwnNtbNVrlZIHh7h4Wz6VP0Furxg9Hh0ycighvL5cZc=",
"owner": "cachix",
"repo": "cachix",
"rev": "c5bfd933d1033672f51a863c47303fc0e093c2d2",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "latest",
"repo": "cachix",
"type": "github"
}
},
"devenv": {
"inputs": {
"cachix": "cachix",
"flake-compat": "flake-compat",
"flake-parts": "flake-parts",
"git-hooks": "git-hooks",
"nix": "nix",
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1764449550,
"narHash": "sha256-7ReZCvkQYKHX6gaQaNioROrpk6rPmIBwlRwWZKlfGvs=",
"owner": "cachix",
"repo": "devenv",
"rev": "dfb58ac03bed07b93f629df55034bc50394d3971",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "devenv",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1761588595,
"narHash": "sha256-XKUZz9zewJNUj46b4AJdiRZJAvSZ0Dqj2BNfXvFlJC4=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1760948891,
"narHash": "sha256-TmWcdiUUaWk8J4lpjzu4gCGxWY6/Ok7mOK4fIFfBuU4=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "864599284fc7c0ba6357ed89ed5e2cd5040f0c04",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"git-hooks": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"gitignore": "gitignore",
"nixpkgs": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1760663237,
"narHash": "sha256-BflA6U4AM1bzuRMR8QqzPXqh8sWVCNDzOdsxXEguJIc=",
"owner": "cachix",
"repo": "git-hooks.nix",
"rev": "ca5b894d3e3e151ffc1db040b6ce4dcc75d31c37",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "git-hooks.nix",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"devenv",
"git-hooks",
"nixpkgs"
]
},
"locked": {
"lastModified": 1709087332,
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nix": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"flake-parts": [
"devenv",
"flake-parts"
],
"git-hooks-nix": [
"devenv",
"git-hooks"
],
"nixpkgs": [
"devenv",
"nixpkgs"
],
"nixpkgs-23-11": [
"devenv"
],
"nixpkgs-regression": [
"devenv"
]
},
"locked": {
"lastModified": 1761648602,
"narHash": "sha256-H97KSB/luq/aGobKRuHahOvT1r7C03BgB6D5HBZsbN8=",
"owner": "cachix",
"repo": "nix",
"rev": "3e5644da6830ef65f0a2f7ec22830c46285bfff6",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "devenv-2.30.6",
"repo": "nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1761313199,
"narHash": "sha256-wCIACXbNtXAlwvQUo1Ed++loFALPjYUA3dpcUJiXO44=",
"owner": "cachix",
"repo": "devenv-nixpkgs",
"rev": "d1c30452ebecfc55185ae6d1c983c09da0c274ff",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "rolling",
"repo": "devenv-nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1764517877,
"narHash": "sha256-pp3uT4hHijIC8JUK5MEqeAWmParJrgBVzHLNfJDZxg4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2d293cbfa5a793b4c50d17c05ef9e385b90edf6c",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"devenv": "devenv",
"nixpkgs": "nixpkgs_2"
}
}
},
"root": "root",
"version": 7
}

41
flake.nix Normal file
View File

@@ -0,0 +1,41 @@
{
description = "Advent of Code 2025";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
devenv.url = "github:cachix/devenv";
};
outputs =
{
self,
nixpkgs,
devenv,
...
}@inputs:
let
system = "x86_64-linux";
pkgs = nixpkgs.legacyPackages.${system};
lib = pkgs.lib;
in
{
devShells.${system}.default = devenv.lib.mkShell {
inherit inputs pkgs;
modules = [
(
{ pkgs, config, ... }:
{
# languages.enable = true;
packages = with pkgs; [
duckdb
];
enterShell = ''
echo "Thanks Topaz!"
'';
}
)
];
};
};
}

69
solutions/01.sql Normal file
View File

@@ -0,0 +1,69 @@
set preserve_insertion_order = true;
create or replace table day01_data as
select operation,
if(operation[1] == 'L', -1, 1) as multiplier,
operation[2:]::int as add
from read_csv(
'inputs/01/input.txt',
header = false,
columns = { 'operation': 'VARCHAR' }
);
create or replace table day01_test as
select operation,
if(operation[1] == 'L', -1, 1) as multiplier,
operation[2:]::int as add
from read_csv(
'inputs/01/test.txt',
header = false,
columns = { 'operation': 'VARCHAR' }
);
create or replace table day01_data_states as
select
rowid,
operation,
multiplier,
add as raw_add,
floor(add / 100)::int as turns,
add - (turns * 100) as clean_add,
50 + SUM(clean_add * multiplier) OVER (
ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW
) as state
from day01_data
;
with
states as (
select
lag(abs(state + 100) % 100, 1, 50) over () as previous,
operation,
turns,
abs(state + 100) % 100 as state
from day01_data_states
)
select count(1) as part1
from states
where state == 0;
--------------
create or replace table day01_part2 as
select
lag(state, 1, 50) over () as raw_previous,
state as raw_current,
operation,
raw_previous % 100 == 0 as went_from_zero,
floor(raw_previous / 100)::int as prev_hundred,
floor(raw_current / 100)::int as current_hundred,
if(not went_from_zero and prev_hundred != current_hundred, 1, NULL) as hundred_change,
if((state + 100) % 100 == 0, 1, NULL) as exact_0,
turns,
turns + coalesce(exact_0, hundred_change, 0) as total_zero_clicks
from day01_data_states;
select
sum(total_zero_clicks) as part2
from day01_part2;

49
solutions/02.sql Normal file
View File

@@ -0,0 +1,49 @@
create or replace table day02_data as
with input1 as (
select unnest(split(column0, ',')) as ranges
from read_csv('inputs/02/input.txt', header=false, delim='\n'))
select
split(ranges, '-')[1]::long as range_start,
split(ranges, '-')[2]::long as range_end
from input1;
create or replace table day02_test as
with input1 as (
select unnest(split(column0, ',')) as ranges
from read_csv('inputs/02/test.txt', header=false, delim='\n'))
select
split(ranges, '-')[1]::long as range_start,
split(ranges, '-')[2]::long as range_end
from input1;
create or replace table day02_product_list as
select unnest(generate_series(range_start, range_end))::string as product_id
from day02_data;
select sum(product_id::long) as part1
from day02_product_list
where LENGTH(product_id) % 2 == 0
and product_id[:(LENGTH(product_id)/2)] == product_id[(LENGTH(product_id)/2)+1:];
with split_lengths as (
select product_id,
length(product_id) as len,
(
select array_agg(l)
from generate_series(1, length(product_id)-1) as t(l)
where length(product_id) % l == 0
) as lengths
from day02_product_list
),
with_chunks as (
select product_id, chunk_size,
(
select array_agg(substring(product_id, (j-1)*chunk_size+1, chunk_size))
from generate_series(1, ceil(length(product_id)/chunk_size)::int) as u(j)
) as chunks
FROM split_lengths, unnest(lengths) as t(chunk_size)
)
select sum(distinct product_id::long) as part2
from with_chunks
where length(array_distinct(chunks)) = 1
;

69
solutions/03.sql Normal file
View File

@@ -0,0 +1,69 @@
create or replace table day03_data as
select bank
from read_csv(
'inputs/03/input.txt',
header = false,
columns = { 'bank': 'VARCHAR' }
);
create or replace table day03_test as
select bank
from read_csv(
'inputs/03/test.txt',
header = false,
columns = { 'bank': 'VARCHAR' }
);
with with_best as (
select bank,
coalesce((
select max(digit),
from generate_series(2, 9) as t(digit)
where contains(bank[:-2], digit::string) is true
), 1) as best
from day03_test
),
with_best_pos as (
select *,
instr(bank, best::string) as best_pos,
bank[best_pos+1:],
coalesce((
select max(digit)
from generate_series(2, 9) as t(digit)
where contains(bank[best_pos+1:], digit::string) is true
), 1) as second_best
from with_best
)
select sum(best * 10 + second_best) as part1
from with_best_pos
;
with recursive seq as (
select
bank,
12 as step,
'' as digits,
0 as pos,
bank[0:-12] as sub,
from day03_data
union all
select
s.bank,
s.step - 1 as new_step,
s.digits || max_digit::string as digits,
s.pos+instr(s.sub, max_digit::string) as new_pos,
s.bank[new_pos+1:-new_step] as sub,
from seq s
join lateral (
select max(digit) as max_digit
from generate_series(1,9) as t(digit)
where contains(s.sub, digit::string)
) as m
on s.step > 0
)
select sum(digits::hugeint) as total
from seq
where step = 0
;