Compare commits

...

11 Commits

Author SHA1 Message Date
Dylan Thies
16161f6660 doing some clippy and testing gittea 2025-10-04 10:33:33 -04:00
Dylan Thies
951cab12f3 2024 day-6 part one submitted no clippy 2024-12-06 10:26:48 -05:00
Dylan Thies
fb33e62a56 setting up for 2024 day 6 and fixing some of the comments 2024-12-05 16:23:47 -05:00
Dylan Thies
25dedc74fa 2024 day-5 clippy and done 2024-12-05 16:13:13 -05:00
Dylan Thies
d320036973 clippy for 2024 day-4 2024-12-04 21:00:58 -05:00
Dylan Thies
fdb74e6439 2024 day-4 as completed 2024-12-04 08:45:57 -05:00
Dylan Thies
17e5f32800 2024 day 3 clippy of original solution 2024-12-03 13:33:40 -05:00
Dylan Thies
2e7cda7aa3 2024 day-3 as answered 2024-12-03 13:00:11 -05:00
Dylan Thies
cd22838062 decided to clippy 2023 in 2024 2024-12-02 11:49:26 -05:00
Dylan Thies
0e93372b96 why where these changed no one knows but past me 2024-12-02 11:29:22 -05:00
Dylan Thies
0ceb59d424 adding Cargo lock 2024-12-02 11:27:59 -05:00
36 changed files with 2112 additions and 67 deletions

View File

@@ -82,7 +82,6 @@ fn main() -> std::io::Result<()> {
} else { } else {
0 0
}; };
// println!("Right: {}, {:?}", right_score, right_part);
let left_part = board[y].iter().rev().skip(x_inv).collect::<Vec<_>>(); let left_part = board[y].iter().rev().skip(x_inv).collect::<Vec<_>>();
let left_score = if left_part.len() > 1 { let left_score = if left_part.len() > 1 {
let score = left_part let score = left_part
@@ -99,7 +98,6 @@ fn main() -> std::io::Result<()> {
} else { } else {
0 0
}; };
// println!("Left: {}, {:?}", left_score, left_part);
let down_part = board.iter().map(|row| row[x]).skip(y).collect::<Vec<_>>(); let down_part = board.iter().map(|row| row[x]).skip(y).collect::<Vec<_>>();
let down_score = if down_part.len() > 1 { let down_score = if down_part.len() > 1 {
let score = down_part let score = down_part
@@ -116,7 +114,6 @@ fn main() -> std::io::Result<()> {
} else { } else {
0 0
}; };
// println!("Down: {}, {:?}", down_score, down_part);
let up_part = board let up_part = board
.iter() .iter()
.map(|row| row[x]) .map(|row| row[x])
@@ -138,12 +135,7 @@ fn main() -> std::io::Result<()> {
} else { } else {
0 0
}; };
// println!("Up: {}. {:?}", up_score, up_part);
let tree_score = right_score * left_score * down_score * up_score; let tree_score = right_score * left_score * down_score * up_score;
// println!(
// "({}, {})({}) = {} = {} * {} * {} * {}",
// x, y, tree_from_top_left, tree_score, up_score, left_score, down_score, right_score
// );
scores[y][x] = tree_score; scores[y][x] = tree_score;
} }
} }

2
2023/Cargo.lock generated
View File

@@ -274,6 +274,7 @@ dependencies = [
name = "day-21" name = "day-21"
version = "2023.0.0" version = "2023.0.0"
dependencies = [ dependencies = [
"dhat",
"glam", "glam",
"itertools 0.12.0", "itertools 0.12.0",
"nom", "nom",
@@ -285,6 +286,7 @@ dependencies = [
name = "day-22" name = "day-22"
version = "2023.0.0" version = "2023.0.0"
dependencies = [ dependencies = [
"dhat",
"glam", "glam",
"itertools 0.12.0", "itertools 0.12.0",
"nom", "nom",

View File

@@ -23,13 +23,13 @@ impl Drawing {
.iter() .iter()
.filter(|mound| mound.x + span >= reflect_col && mound.x < reflect_col) .filter(|mound| mound.x + span >= reflect_col && mound.x < reflect_col)
.map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into()) .map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into())
.all(|mound_reflect| self.mounds.get(&mound_reflect).is_some()) .all(|mound_reflect| self.mounds.contains(&mound_reflect))
&& self && self
.mounds .mounds
.iter() .iter()
.filter(|mound| mound.x < reflect_col + span && mound.x >= reflect_col) .filter(|mound| mound.x < reflect_col + span && mound.x >= reflect_col)
.map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into()) .map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into())
.all(|mound_reflect| self.mounds.get(&mound_reflect).is_some()) .all(|mound_reflect| self.mounds.contains(&mound_reflect))
}) })
.sum::<u32>(); .sum::<u32>();
let row_score = (1..max_row) let row_score = (1..max_row)
@@ -40,13 +40,13 @@ impl Drawing {
.iter() .iter()
.filter(|mound| mound.y + span >= reflect_row && mound.y < reflect_row) .filter(|mound| mound.y + span >= reflect_row && mound.y < reflect_row)
.map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into()) .map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into())
.all(|mound_reflect| self.mounds.get(&mound_reflect).is_some()) .all(|mound_reflect| self.mounds.contains(&mound_reflect))
&& self && self
.mounds .mounds
.iter() .iter()
.filter(|mound| mound.y < reflect_row + span && mound.y >= reflect_row) .filter(|mound| mound.y < reflect_row + span && mound.y >= reflect_row)
.map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into()) .map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into())
.all(|mound_reflect| self.mounds.get(&mound_reflect).is_some()) .all(|mound_reflect| self.mounds.contains(&mound_reflect))
}) })
.sum::<u32>() .sum::<u32>()
* 100; * 100;

View File

@@ -24,14 +24,14 @@ impl Drawing {
.iter() .iter()
.filter(|mound| mound.x + span >= reflect_col && mound.x < reflect_col) .filter(|mound| mound.x + span >= reflect_col && mound.x < reflect_col)
.map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into()) .map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into())
.filter(|mound_reflect| self.mounds.get(mound_reflect).is_none()) .filter(|mound_reflect| !self.mounds.contains(mound_reflect))
.count() .count()
+ self + self
.mounds .mounds
.iter() .iter()
.filter(|mound| mound.x < reflect_col + span && mound.x >= reflect_col) .filter(|mound| mound.x < reflect_col + span && mound.x >= reflect_col)
.map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into()) .map(|mound| (2 * reflect_col - mound.x - 1, mound.y).into())
.filter(|mound_reflect| self.mounds.get(mound_reflect).is_none()) .filter(|mound_reflect| !self.mounds.contains(mound_reflect))
.count()) .count())
== 1 == 1
}) })
@@ -45,14 +45,14 @@ impl Drawing {
.iter() .iter()
.filter(|mound| mound.y + span >= reflect_row && mound.y < reflect_row) .filter(|mound| mound.y + span >= reflect_row && mound.y < reflect_row)
.map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into()) .map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into())
.filter(|mound_reflect| self.mounds.get(mound_reflect).is_none()) .filter(|mound_reflect| !self.mounds.contains(mound_reflect))
.count() .count()
+ self + self
.mounds .mounds
.iter() .iter()
.filter(|mound| mound.y < reflect_row + span && mound.y >= reflect_row) .filter(|mound| mound.y < reflect_row + span && mound.y >= reflect_row)
.map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into()) .map(|mound| (mound.x, 2 * reflect_row - mound.y - 1).into())
.filter(|mound_reflect| self.mounds.get(mound_reflect).is_none()) .filter(|mound_reflect| !self.mounds.contains(mound_reflect))
.count()) .count())
== 1 == 1
}) })

View File

@@ -57,11 +57,12 @@ pub fn part2(input: &str) -> String {
let pos_in_cycle = start_of_cycle + (cycles - end_of_cycle) % len_of_cyle; let pos_in_cycle = start_of_cycle + (cycles - end_of_cycle) % len_of_cyle;
map = cache map.clone_from(
cache
.values() .values()
.find_map(|(look_at_map, pos)| (*pos == pos_in_cycle).then_some(look_at_map)) .find_map(|(look_at_map, pos)| (*pos == pos_in_cycle).then_some(look_at_map))
.unwrap() .unwrap(),
.clone(); );
let mut total = 0_usize; let mut total = 0_usize;
for col in 0..maxes.x { for col in 0..maxes.x {

View File

@@ -18,9 +18,9 @@ pub enum Day2Part1Error {
#[derive(Debug)] #[derive(Debug)]
struct Round { struct Round {
pub red_n: u32, pub red: u32,
pub green_n: u32, pub green: u32,
pub blue_n: u32, pub blue: u32,
} }
#[derive(Debug)] #[derive(Debug)]
@@ -35,7 +35,7 @@ impl Game {
.iter() .iter()
.find_map(|r| { .find_map(|r| {
//TODO if inverted use find_map //TODO if inverted use find_map
if r.red_n > 12 || r.green_n > 13 || r.blue_n > 14 { if r.red > 12 || r.green > 13 || r.blue > 14 {
Some(self.id) Some(self.id)
} else { } else {
None None
@@ -74,15 +74,15 @@ fn process_block(input: &str) -> nom::IResult<&str, (u32, String)> {
fn process_round(input: &str) -> nom::IResult<&str, Round> { fn process_round(input: &str) -> nom::IResult<&str, Round> {
let (i, blocks) = separated_list1(tag(", "), process_block)(input)?; let (i, blocks) = separated_list1(tag(", "), process_block)(input)?;
let mut round = Round { let mut round = Round {
red_n: 0, red: 0,
green_n: 0, green: 0,
blue_n: 0, blue: 0,
}; };
for (cnt, color) in blocks { for (cnt, color) in blocks {
match color.as_str() { match color.as_str() {
"red" => round.red_n = cnt, "red" => round.red = cnt,
"green" => round.green_n = cnt, "green" => round.green = cnt,
"blue" => round.blue_n = cnt, "blue" => round.blue = cnt,
_ => panic!("this should be a color name"), _ => panic!("this should be a color name"),
}; };
} }

View File

@@ -17,9 +17,9 @@ pub enum Day2Part2Error {
#[derive(Debug)] #[derive(Debug)]
struct Round { struct Round {
pub red_n: u32, pub red: u32,
pub green_n: u32, pub green: u32,
pub blue_n: u32, pub blue: u32,
} }
#[derive(Debug)] #[derive(Debug)]
@@ -32,14 +32,14 @@ impl Game {
fn to_power(&self) -> u64 { fn to_power(&self) -> u64 {
let (r, g, b) = self.rounds.iter().fold((0_u64, 0_u64, 0_u64), |acc, x| { let (r, g, b) = self.rounds.iter().fold((0_u64, 0_u64, 0_u64), |acc, x| {
let (mut val_r, mut val_g, mut val_b) = acc; let (mut val_r, mut val_g, mut val_b) = acc;
if u64::from(x.red_n) > acc.0 { if u64::from(x.red) > acc.0 {
val_r = x.red_n.into(); val_r = x.red.into();
} }
if u64::from(x.green_n) > acc.1 { if u64::from(x.green) > acc.1 {
val_g = x.green_n.into(); val_g = x.green.into();
} }
if u64::from(x.blue_n) > acc.2 { if u64::from(x.blue) > acc.2 {
val_b = x.blue_n.into(); val_b = x.blue.into();
} }
(val_r, val_g, val_b) (val_r, val_g, val_b)
}); });
@@ -70,15 +70,15 @@ fn process_block(input: &str) -> nom::IResult<&str, (u32, String)> {
fn process_round(input: &str) -> nom::IResult<&str, Round> { fn process_round(input: &str) -> nom::IResult<&str, Round> {
let (i, blocks) = separated_list1(tag(", "), process_block)(input)?; let (i, blocks) = separated_list1(tag(", "), process_block)(input)?;
let mut round = Round { let mut round = Round {
red_n: 0, red: 0,
green_n: 0, green: 0,
blue_n: 0, blue: 0,
}; };
for (cnt, color) in blocks { for (cnt, color) in blocks {
match color.as_str() { match color.as_str() {
"red" => round.red_n = cnt, "red" => round.red = cnt,
"green" => round.green_n = cnt, "green" => round.green = cnt,
"blue" => round.blue_n = cnt, "blue" => round.blue = cnt,
_ => panic!("this should be a color name"), _ => panic!("this should be a color name"),
}; };
} }

View File

@@ -12,6 +12,10 @@ nom = { workspace = true }
itertools = {workspace = true } itertools = {workspace = true }
nom_locate.workspace = true nom_locate.workspace = true
glam.workspace = true glam.workspace = true
dhat = { workspace = true }
[dev-dependencies] [dev-dependencies]
rstest.workspace = true rstest.workspace = true
[features]
dhat-heap = []

View File

@@ -3,7 +3,14 @@
use day_21::part1; use day_21::part1;
use day_21::part2; use day_21::part2;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
fn main() { fn main() {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let input = include_str!("./input.txt"); let input = include_str!("./input.txt");
let part1_result = part1(input, 64); let part1_result = part1(input, 64);
println!("part 1: {part1_result}"); println!("part 1: {part1_result}");

View File

@@ -11,3 +11,7 @@ repository.workspace = true
nom = { workspace = true } nom = { workspace = true }
itertools = {workspace = true } itertools = {workspace = true }
glam.workspace = true glam.workspace = true
dhat = { workspace = true }
[features]
dhat-heap = []

View File

@@ -3,7 +3,14 @@
use day_22::part1; use day_22::part1;
use day_22::part2; use day_22::part2;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
fn main() { fn main() {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let input = include_str!("./input.txt"); let input = include_str!("./input.txt");
let part1_result = part1(input); let part1_result = part1(input);
println!("part 1: {part1_result}"); println!("part 1: {part1_result}");

View File

@@ -60,8 +60,7 @@ pub fn part1(input: &str) -> String {
.filter_map(|dir| { .filter_map(|dir| {
let next_pos = dir + *pos; let next_pos = dir + *pos;
node_map node_map
.get(&next_pos) .contains_key(&next_pos)
.is_some()
.then(|| (node_map[pos], node_map[&next_pos], 1)) .then(|| (node_map[pos], node_map[&next_pos], 1))
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@@ -47,8 +47,7 @@ pub fn part2(input: &str) -> String {
.filter_map(|dir| { .filter_map(|dir| {
let next_pos = dir + *pos; let next_pos = dir + *pos;
node_map node_map
.get(&next_pos) .contains_key(&next_pos)
.is_some()
.then(|| (node_map[pos], node_map[&next_pos], 1)) .then(|| (node_map[pos], node_map[&next_pos], 1))
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@@ -34,7 +34,7 @@ pub fn part1(input: &str) -> String {
.filter(|x| { .filter(|x| {
x.generate_adjacent() x.generate_adjacent()
.iter() .iter()
.any(|t| symbols.get(t).is_some()) .any(|t| symbols.contains_key(t))
}) })
.map(|x| x.no) .map(|x| x.no)
.sum::<u64>() .sum::<u64>()

View File

@@ -74,6 +74,7 @@ impl ItemMap {
.expect("always") .expect("always")
} }
} }
/// part1 of day 5 of AOC 2023 /// part1 of day 5 of AOC 2023
/// ///
/// # Arguments /// # Arguments
@@ -81,7 +82,6 @@ impl ItemMap {
/// ///
/// # Panics /// # Panics
/// panics whenever the input isn't parsable /// panics whenever the input isn't parsable
#[must_use] #[must_use]
pub fn part1(input: &str) -> String { pub fn part1(input: &str) -> String {
let (_input, (mut to_process, maps)) = parse_input(input).expect("aoc always has input"); let (_input, (mut to_process, maps)) = parse_input(input).expect("aoc always has input");

1019
2024/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -26,7 +26,7 @@ rustworkx-core = "0.15.1"
pathfinding = "4.11.0" pathfinding = "4.11.0"
test-log = {version="0.2.16", features=["default", "unstable"]} test-log = {version="0.2.16", features=["default", "unstable"]}
thiserror = "2.0.3" thiserror = "2.0.3"
regex = "1.11.1"
[profile.dhat] [profile.dhat]
inherits = "release" inherits = "release"

View File

@@ -41,8 +41,8 @@ pub enum Day2Part1Error {
ParseError, ParseError,
} }
/// Day-2 Part 1 for 2024 advent of code /// Day-3 Part 1 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/2> /// Problem can be found here: <https://adventofcode.com/2024/day/3>
/// ///
/// # Errors /// # Errors
/// - `ParseError` there was an issue with the parser /// - `ParseError` there was an issue with the parser

View File

@@ -15,6 +15,7 @@ log.workspace = true
error-stack.workspace = true error-stack.workspace = true
thiserror.workspace = true thiserror.workspace = true
dhat.workspace = true dhat.workspace = true
regex.workspace = true
[dev-dependencies] [dev-dependencies]
test-log.workspace = true test-log.workspace = true

View File

@@ -1,6 +1,7 @@
#![warn(clippy::all, clippy::pedantic)] #![warn(clippy::all, clippy::pedantic)]
use error_stack::Result; use error_stack::{Report, Result, ResultExt};
use regex::Regex;
use thiserror::Error; use thiserror::Error;
// day-3 // day-3
@@ -10,21 +11,33 @@ pub enum Day3Part1Error{
ParseError, ParseError,
} }
pub fn part1 (_input: &str) -> Result<String, Day3Part1Error> { /// Day-2 Part 1 for 2024 advent of code
Ok("Not Finished".to_string()) /// Problem can be found here: <https://adventofcode.com/2024/day/2>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part1(input: &str) -> Result<String, Day3Part1Error> {
let re = Regex::new(r"mul\((\d{1,3}),(\d{1,3})\)")
.map_err( Report::from)
.change_context(Day3Part1Error::ParseError)?;
Ok(re
.captures_iter(input)
.map(|x| x[1].parse::<i64>().unwrap_or(0) * x[2].parse::<i64>().unwrap_or(0))
.sum::<i64>()
.to_string())
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
const INPUT: &str = ""; const INPUT: &str = "xmul(2,4)%&mul[3,7]!@^do_not_mul(5,5)+mul(32,64]then(mul(11,8)mul(8,5))";
#[test_log::test] #[test_log::test]
#[test_log(default_log_filter = "trace")] #[test_log(default_log_filter = "trace")]
fn part1_works() { fn part1_works() {
let result = part1(INPUT).unwrap(); let result = part1(INPUT).unwrap();
assert_eq!(result, "Not Finished".to_string()); assert_eq!(result, "161".to_string());
} }
} }

View File

@@ -1,6 +1,7 @@
#![warn(clippy::all, clippy::pedantic)] #![warn(clippy::all, clippy::pedantic)]
use error_stack::Result; use error_stack::{Report, Result, ResultExt};
use regex::Regex;
use thiserror::Error; use thiserror::Error;
// day-3 // day-3
@@ -10,21 +11,84 @@ pub enum Day3Part2Error{
ParseError, ParseError,
} }
pub fn part2 (_input: &str) -> Result<String, Day3Part2Error> { /// Day-3 Part 2 for 2024 advent of code
Ok("Not Finished".to_string()) /// Problem can be found here: <https://adventofcode.com/2024/day/3#part2>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part2(input: &str) -> Result<String, Day3Part2Error> {
let do_re = Regex::new(r"do\(\)")
.map_err( Report::from)
.change_context(Day3Part2Error::ParseError)?;
let dos = do_re
.find_iter(input)
.map(|x| (x.start(), x.end()))
.collect::<Vec<_>>();
let dont_re = Regex::new(r"don't\(\)")
.map_err( Report::from)
.change_context(Day3Part2Error::ParseError)?;
let donts = dont_re
.find_iter(input)
.map(|x| (x.start(), x.end()))
.collect::<Vec<_>>();
let mut dos_index = 0;
let mut donts_index = 0;
let mut white_list = true;
let mut blackout_ranges = Vec::new();
let mut blacklist_start = 0;
while dos_index < dos.len() && donts_index < donts.len() {
if white_list {
if dos[dos_index].1 < donts[donts_index].0 {
//currently whitelisted so dos are no-ops
dos_index += 1;
} else {
blacklist_start = donts[donts_index].0;
white_list = false;
}
} else if donts[donts_index].1 < dos[dos_index].0 {
//in a black list so donts are no-ops
donts_index += 1;
} else {
blackout_ranges.push(blacklist_start..dos[dos_index].1);
blacklist_start = 0;
white_list = true;
}
}
if donts_index < donts.len() {
blackout_ranges.push(donts[donts_index].0..input.len());
} else if dos_index < dos.len() && blacklist_start != 0 {
blackout_ranges.push(blacklist_start..dos[dos_index].1);
}
let re = Regex::new(r"mul\((\d{1,3}),(\d{1,3})\)")
.map_err( Report::from)
.change_context(Day3Part2Error::ParseError)?;
let mut sum = 0;
for mult_match in re.find_iter(input) {
if blackout_ranges
.iter()
.any(|x| x.contains(&mult_match.start()))
{
continue;
}
let values = re.captures(mult_match.as_str())
.ok_or(Report::new( Day3Part2Error::ParseError))?;
sum += values[1].parse::<i64>().unwrap_or(0) * values[2].parse::<i64>().unwrap_or(0);
}
Ok(sum.to_string())
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
const INPUT: &str = ""; const INPUT: &str = "xmul(2,4)&mul[3,7]!^don't()_mul(5,5)+mul(32,64](mul(11,8)undo()?mul(8,5))";
#[test_log::test] #[test_log::test]
#[test_log(default_log_filter = "trace")] #[test_log(default_log_filter = "trace")]
fn part2_works() { fn part2_works() {
let result = part2(INPUT).unwrap(); let result = part2(INPUT).unwrap();
assert_eq!(result, "Not Finished".to_string()); assert_eq!(result, "48".to_string());
} }
} }

24
2024/day-4/Cargo.toml Normal file
View File

@@ -0,0 +1,24 @@
[package]
name = "day-4"
version.workspace = true
edition.workspace = true
authors.workspace = true
repository.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nom.workspace = true
itertools.workspace = true
log.workspace = true
error-stack.workspace = true
thiserror.workspace = true
dhat.workspace = true
glam.workspace = true
[dev-dependencies]
test-log.workspace = true
[features]
dhat-heap = []

4
2024/day-4/src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod part1;
pub use crate::part1::*;
pub mod part2;
pub use crate::part2::*;

31
2024/day-4/src/main.rs Normal file
View File

@@ -0,0 +1,31 @@
#![warn(clippy::all, clippy::pedantic)]
use day_4::part1;
use day_4::part2;
use error_stack::{Result, ResultExt};
use thiserror::Error;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
#[derive(Debug, Error)]
enum Day4Error {
#[error("Part 1 failed")]
Part1Error,
#[error("Part 2 failed")]
Part2Error,
}
fn main() -> Result<(), Day4Error> {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let input = include_str!("./input.txt");
let part1_result = part1(input).change_context(Day4Error::Part1Error)?;
println!("part 1: {part1_result}");
let part2_result = part2(input).change_context(Day4Error::Part2Error)?;
println!("part 2: {part2_result}");
Ok(())
}

111
2024/day-4/src/part1.rs Normal file
View File

@@ -0,0 +1,111 @@
#![warn(clippy::all, clippy::pedantic)]
use error_stack::Result;
use glam::IVec2;
use thiserror::Error;
// day-4
#[derive(Debug, Error)]
pub enum Day4Part1Error {
#[error("Problem parsing Day 4")]
ParseError,
}
#[allow(clippy::cast_sign_loss)]
/// Day-4 Part 1 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/4>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
///
/// # Panics
/// - If there is a catastropic error as it only panics in event that a lenght is negative
pub fn part1(input: &str) -> Result<String, Day4Part1Error> {
//read in grid
let grid = input
.lines()
.map(|line| Vec::from(line.as_bytes()))
.collect::<Vec<_>>();
let num_of_rows = grid
.len()
.try_into()
.expect("length cannot be negative ever");
let num_of_cols = grid[0].len().try_into().unwrap(); //because we know it will be rectangular
//window over each letter (skip over not x's
let total: usize = grid
.iter()
.enumerate()
.map(|(row_num, row)| {
row.iter()
.enumerate()
.map(|(col_num, col)| {
if *col == b'X' {
//window over the rest
let point =
IVec2::new(row_num.try_into().unwrap(), col_num.try_into().unwrap());
[
IVec2::NEG_X,
IVec2::NEG_ONE,
IVec2::NEG_Y,
IVec2::new(1, -1),
IVec2::X,
IVec2::ONE,
IVec2::Y,
IVec2::new(-1, 1),
]
.iter()
.filter(|dir| {
let extent = point + (*dir * 3);
if extent.x < 0
|| extent.x >= num_of_rows
|| extent.y < 0
|| extent.y >= num_of_cols
{
return false;
}
let m = point + *dir;
let a = point + 2 * *dir;
let s = point + 3 * *dir;
grid.get(m.x as u32 as usize)
.map(|g| g.get(m.y as u32 as usize))
.unwrap();
grid[m.x as u32 as usize][m.y as u32 as usize] == b'M'
&& grid[a.x as u32 as usize][a.y as u32 as usize] == b'A'
&& grid[s.x as u32 as usize][s.y as u32 as usize] == b'S'
})
.count()
//todo!("at pos {row_num} - {col_num}")
} else {
0_usize
}
})
.sum::<usize>()
})
.sum();
//count
Ok(total.to_string())
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "MMMSXXMASM
MSAMXMSMSA
AMXSXMAAMM
MSAMASMSMX
XMASAMXAMM
XXAMMXXAMA
SMSMSASXSS
SAXAMASAAA
MAMMMXMMMM
MXMXAXMASX";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part1_works() {
let result = part1(INPUT).unwrap();
assert_eq!(result, "18".to_string());
}
}

102
2024/day-4/src/part2.rs Normal file
View File

@@ -0,0 +1,102 @@
#![warn(clippy::all, clippy::pedantic)]
use error_stack::Result;
use glam::IVec2;
use thiserror::Error;
// day-4
#[derive(Debug, Error)]
pub enum Day4Part2Error {
#[error("Problem parsing Day 4")]
ParseError,
}
/// Day-4 Part 2 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/4#part2>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
///
/// # Panics
/// - If there is a catastropic error as it only panics in event that a lenght is negative
pub fn part2(input: &str) -> Result<String, Day4Part2Error> {
//read in grid
let grid = input
.lines()
.map(|line| Vec::from(line.as_bytes()))
.collect::<Vec<_>>();
let num_of_rows = grid.len();
let num_of_cols = grid[0].len();
//window over each letter (skip over not x's
let total: usize = grid
.iter()
.enumerate()
.skip(1)
.take(num_of_rows - 2)
.map(|(row_num, row)| {
row.iter()
.enumerate()
.skip(1)
.take(num_of_cols - 2)
.map(|(col_num, col)| {
if *col == b'A' {
//window over the rest
let point =
IVec2::new(row_num.try_into().unwrap(), col_num.try_into().unwrap());
let up_forward = point + IVec2::new(-1, 1);
let up_back = point + IVec2::NEG_ONE;
let down_forward = point + IVec2::ONE;
let down_back = point + IVec2::new(1, -1);
#[allow(clippy::cast_sign_loss)]
(((grid[up_back.x as u32 as usize][up_back.y as u32 as usize] == b'M'
&& grid[down_forward.x as u32 as usize]
[down_forward.y as u32 as usize]
== b'S')
|| (grid[up_back.x as u32 as usize][up_back.y as u32 as usize] == b'S'
&& grid[down_forward.x as u32 as usize]
[down_forward.y as u32 as usize]
== b'M'))
&& ((grid[down_back.x as u32 as usize][down_back.y as u32 as usize]
== b'M'
&& grid[up_forward.x as u32 as usize]
[up_forward.y as u32 as usize]
== b'S')
|| (grid[down_back.x as u32 as usize][down_back.y as u32 as usize]
== b'S'
&& grid[up_forward.x as u32 as usize]
[up_forward.y as u32 as usize]
== b'M'))).into()
} else {
0_usize
}
})
.sum::<usize>()
})
.sum();
//count
Ok(total.to_string())
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "MMMSXXMASM
MSAMXMSMSA
AMXSXMAAMM
MSAMASMSMX
XMASAMXAMM
XXAMMXXAMA
SMSMSASXSS
SAXAMASAAA
MAMMMXMMMM
MXMXAXMASX";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part2_works() {
let result = part2(INPUT).unwrap();
assert_eq!(result, "9".to_string());
}
}

23
2024/day-5/Cargo.toml Normal file
View File

@@ -0,0 +1,23 @@
[package]
name = "day-5"
version.workspace = true
edition.workspace = true
authors.workspace = true
repository.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nom.workspace = true
itertools.workspace = true
log.workspace = true
error-stack.workspace = true
thiserror.workspace = true
dhat.workspace = true
[dev-dependencies]
test-log.workspace = true
[features]
dhat-heap = []

4
2024/day-5/src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod part1;
pub use crate::part1::*;
pub mod part2;
pub use crate::part2::*;

31
2024/day-5/src/main.rs Normal file
View File

@@ -0,0 +1,31 @@
#![warn(clippy::all, clippy::pedantic)]
use day_5::part1;
use day_5::part2;
use error_stack::{Result, ResultExt};
use thiserror::Error;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
#[derive(Debug, Error)]
enum Day5Error {
#[error("Part 1 failed")]
Part1Error,
#[error("Part 2 failed")]
Part2Error,
}
fn main() -> Result<(), Day5Error> {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let input = include_str!("./input.txt");
let part1_result = part1(input).change_context(Day5Error::Part1Error)?;
println!("part 1: {part1_result}");
let part2_result = part2(input).change_context(Day5Error::Part2Error)?;
println!("part 2: {part2_result}");
Ok(())
}

119
2024/day-5/src/part1.rs Normal file
View File

@@ -0,0 +1,119 @@
#![warn(clippy::all, clippy::pedantic)]
use std::collections::HashMap;
use error_stack::{Report, Result, ResultExt};
use nom::{
bytes::complete::tag, character::complete, multi::separated_list1, sequence::separated_pair,
IResult,
};
use thiserror::Error;
// day-5
#[derive(Debug, Error)]
pub enum Day5Part1Error {
#[error("Problem parsing Day 5")]
ParseError,
}
type Orderings = HashMap<u32, Vec<u32>>;
/// Day-5 Part 1 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/5>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part1(input: &str) -> Result<String, Day5Part1Error> {
//parse into "bad list where X|Y
let (_, (ordering, updates)) = parse_input(input)
.map_err(|x| Report::from(x.to_owned()))
.change_context(Day5Part1Error::ParseError)?;
let middles: u32 = updates
.iter()
.filter_map(|update| {
let update_len = update.len();
for i in 0..update_len {
let before = &update[..i];
if let Some(a) = update.get(i) {
if let Some(rules) = ordering.get(a) {
if rules.iter().any(|b| before.contains(b)) {
return None;
}
}
}
}
Some(update[update_len / 2])
})
.sum();
Ok(middles.to_string())
}
fn parse_ordering(input: &str) -> IResult<&str, Orderings> {
let (input, rules) = separated_list1(
complete::line_ending,
separated_pair(complete::u32, tag("|"), complete::u32),
)(input)?;
let ordering = rules.iter().fold(HashMap::new(), |mut acc: Orderings, (a, b)| {
acc.entry(*a).or_default().push(*b);
acc
});
Ok((input, ordering))
}
fn parse_update(input: &str) -> IResult<&str, Vec<u32>> {
separated_list1(tag(","), complete::u32)(input)
}
fn parse_updates(input: &str) -> IResult<&str, Vec<Vec<u32>>> {
separated_list1(complete::line_ending, parse_update)(input)
}
fn parse_input(input: &str) -> IResult<&str, (Orderings, Vec<Vec<u32>>)> {
let (input, ordering) = parse_ordering(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, updates) = parse_updates(input)?;
Ok((input, (ordering, updates)))
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "47|53
97|13
97|61
97|47
75|29
61|13
75|53
29|13
97|29
53|29
61|53
97|53
61|29
47|13
75|47
97|75
47|61
75|61
47|29
75|13
53|13
75,47,61,53,29
97,61,53,29,13
75,29,13
75,97,47,61,53
61,13,29
97,13,75,29,47";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part1_works() {
let result = part1(INPUT).unwrap();
assert_eq!(result, "143".to_string());
}
}

127
2024/day-5/src/part2.rs Normal file
View File

@@ -0,0 +1,127 @@
#![warn(clippy::all, clippy::pedantic)]
use std::{cmp::Ordering, collections::HashMap};
use error_stack::{Report, Result, ResultExt};
use nom::{bytes::complete::tag, character::complete, multi::separated_list1, sequence::separated_pair, IResult};
use thiserror::Error;
// day-5
#[derive(Debug, Error)]
pub enum Day5Part2Error{
#[error("Problem parsing Day 5")]
ParseError,
}
type Orderings = HashMap<u32, Vec<u32>>;
/// Day-5 Part 2 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/5#part2>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part2 (input: &str) -> Result<String, Day5Part2Error> {
let (_, (ordering, mut updates)) = parse_input(input)
.map_err(|x| Report::from(x.to_owned()))
.change_context(Day5Part2Error::ParseError)?;
let middles: u32 = updates
.iter_mut()
.filter_map(|update| {
let update_len = update.len();
for i in 0..update_len {
let before = &update[..i];
if let Some(a) = update.get(i) {
if let Some(rules) = ordering.get(a) {
if rules.iter().any(|b| before.contains(b)) {
return Some(update);
}
}
}
}
None
})
.map(|update| {
update.sort_by(|a,b| {
let Some(rule_a) = ordering.get(a) else { return Ordering::Equal;} ;
//let Some(rule_b) = ordering.get(b) else { return Ordering::Equal;} ;
if rule_a.contains(b) {
return Ordering::Less;
}
Ordering::Equal
});
update[update.len()/2]
})
.sum();
Ok(middles.to_string())
}
fn parse_ordering(input: &str) -> IResult<&str, Orderings> {
let (input, rules) = separated_list1(
complete::line_ending,
separated_pair(complete::u32, tag("|"), complete::u32),
)(input)?;
let ordering = rules.iter().fold(HashMap::new(), |mut acc: Orderings, (a, b)| {
acc.entry(*a).or_default().push(*b);
acc
});
Ok((input, ordering))
}
fn parse_update(input: &str) -> IResult<&str, Vec<u32>> {
separated_list1(tag(","), complete::u32)(input)
}
fn parse_updates(input: &str) -> IResult<&str, Vec<Vec<u32>>> {
separated_list1(complete::line_ending, parse_update)(input)
}
fn parse_input(input: &str) -> IResult<&str, (Orderings, Vec<Vec<u32>>)> {
let (input, ordering) = parse_ordering(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, updates) = parse_updates(input)?;
Ok((input, (ordering, updates)))
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "47|53
97|13
97|61
97|47
75|29
61|13
75|53
29|13
97|29
53|29
61|53
97|53
61|29
47|13
75|47
97|75
47|61
75|61
47|29
75|13
53|13
75,47,61,53,29
97,61,53,29,13
75,29,13
75,97,47,61,53
61,13,29
97,13,75,29,47";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part2_works() {
let result = part2(INPUT).unwrap();
assert_eq!(result, "123".to_string());
}
}

24
2024/day-6/Cargo.toml Normal file
View File

@@ -0,0 +1,24 @@
[package]
name = "day-6"
version.workspace = true
edition.workspace = true
authors.workspace = true
repository.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nom.workspace = true
itertools.workspace = true
log.workspace = true
error-stack.workspace = true
thiserror.workspace = true
dhat.workspace = true
glam.workspace =true
[dev-dependencies]
test-log.workspace = true
[features]
dhat-heap = []

4
2024/day-6/src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod part1;
pub use crate::part1::*;
pub mod part2;
pub use crate::part2::*;

31
2024/day-6/src/main.rs Normal file
View File

@@ -0,0 +1,31 @@
#![warn(clippy::all, clippy::pedantic)]
use day_6::part1;
use day_6::part2;
use error_stack::{Result, ResultExt};
use thiserror::Error;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
#[derive(Debug, Error)]
enum Day6Error {
#[error("Part 1 failed")]
Part1Error,
#[error("Part 2 failed")]
Part2Error,
}
fn main() -> Result<(), Day6Error> {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let input = include_str!("./input.txt");
let part1_result = part1(input).change_context(Day6Error::Part1Error)?;
println!("part 1: {part1_result}");
let part2_result = part2(input).change_context(Day6Error::Part2Error)?;
println!("part 2: {part2_result}");
Ok(())
}

263
2024/day-6/src/part1.rs Normal file
View File

@@ -0,0 +1,263 @@
#![warn(clippy::all, clippy::pedantic)]
use std::{collections::HashSet, ops::Sub};
use glam::IVec2;
use thiserror::Error;
// day-6
#[derive(Debug, Error)]
pub enum Day6Part1Error {
#[error("Problem parsing Day 6")]
ParseError,
}
#[derive(Debug)]
enum Direction {
North,
East,
South,
West,
}
impl From<&Direction> for IVec2 {
fn from(value: &Direction) -> Self {
match value {
Direction::North => IVec2::NEG_X,
Direction::East => IVec2::Y,
Direction::South => IVec2::X,
Direction::West => IVec2::NEG_Y,
}
}
}
impl Sub<&Direction> for &IVec2 {
type Output = IVec2;
fn sub(self, rhs: &Direction) -> Self::Output {
self - (match rhs {
Direction::North => IVec2::NEG_X,
Direction::East => IVec2::Y,
Direction::South => IVec2::X,
Direction::West => IVec2::NEG_Y,
})
}
}
impl Direction {
pub fn next(&self) -> Self {
match self {
Direction::North => Direction::East,
Direction::East => Direction::South,
Direction::South => Direction::West,
Direction::West => Direction::North,
}
}
}
#[derive(Debug)]
struct MyMap {
pub obstacles: HashSet<IVec2>,
pub height: u32,
pub width: u32,
}
impl MyMap {
pub fn next_obstacle(&self, start_pos: IVec2, direction: &Direction) -> Option<&IVec2> {
self.obstacles
.iter()
.filter(|obstacle| match direction {
Direction::North => obstacle.y == start_pos.y && obstacle.x < start_pos.x,
Direction::East => obstacle.x == start_pos.x && obstacle.y > start_pos.y,
Direction::South => obstacle.y == start_pos.y && obstacle.x > start_pos.x,
Direction::West => obstacle.x == start_pos.x && obstacle.y < start_pos.y,
})
.fold(None, |acc, obstacle| match direction {
Direction::North if acc.is_none() || obstacle.x > acc.unwrap().x => Some(obstacle),
Direction::East if acc.is_none() || obstacle.y < acc.unwrap().y => Some(obstacle),
Direction::South if acc.is_none() || obstacle.x < acc.unwrap().x => Some(obstacle),
Direction::West if acc.is_none() || obstacle.y > acc.unwrap().y => Some(obstacle),
_ => acc,
})
}
}
/// Day-6 Part 2 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/6>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
///
/// # Panics
/// - it just does
pub fn part1(input: &str) -> Result<String, Day6Part1Error> {
//let input = Span::new(input);
//TODO figure out how to real error
let (mut guard_pos, map) = parse_input(input);
let mut guard_dir = Direction::North;
let mut visited = HashSet::new();
while guard_pos.x >= 0
&& guard_pos.y >= 0
&& (guard_pos.x as u32) < map.height
&& (guard_pos.y as u32) < map.width
{
let _ = visited.insert(guard_pos);
if let Some(next_obstacle) = map.next_obstacle(guard_pos, &guard_dir) {
// println!("Hit row {}, col {} going {:?}", next_obstacle.x, next_obstacle.y, guard_dir);
match guard_dir {
Direction::North => {
((next_obstacle.x+1)..guard_pos.x)
.map(|x| IVec2::new(x, guard_pos.y))
.for_each(|x| {
visited.insert(x);
});
},
Direction::South => {
(guard_pos.x..(next_obstacle.x-1))
.map(|x| IVec2::new(x, guard_pos.y))
.for_each(|x| {
visited.insert(x);
});
},
Direction::East => {
(guard_pos.y..(next_obstacle.y-1))
.map(|y| IVec2::new(guard_pos.x, y))
.for_each(|x| {
visited.insert(x);
});
},
Direction::West => {
((next_obstacle.y+1)..guard_pos.y)
.map(|y| IVec2::new(guard_pos.x, y))
.for_each(|x| {
visited.insert(x);
});
},
}
guard_pos = next_obstacle - &guard_dir;
} else {
let new_pos = match guard_dir {
Direction::North => IVec2::new(-1, guard_pos.y),
Direction::East => IVec2::new(guard_pos.x, map.width.try_into().unwrap()),
Direction::South => IVec2::new(map.height.try_into().unwrap(), guard_pos.y),
Direction::West => IVec2::new(guard_pos.x, -1),
};
// println!("Left map at row {}, col {}", new_pos.x, new_pos.y);
match guard_dir {
Direction::North => {
((new_pos.x+1)..guard_pos.x)
.map(|x| IVec2::new(x, guard_pos.y))
.for_each(|x| {
visited.insert(x);
});
}
Direction::South => {
(guard_pos.x..new_pos.x)
.map(|x| IVec2::new(x, guard_pos.y))
.for_each(|x| {
visited.insert(x);
});
}
Direction::East => {
(guard_pos.y..new_pos.y)
.map(|y| IVec2::new(guard_pos.x, y))
.for_each(|x| {
visited.insert(x);
});
}
Direction::West => {
((new_pos.y+1)..guard_pos.y)
.map(|y| IVec2::new(guard_pos.x, y))
.for_each(|x| {
visited.insert(x);
});
}
}
guard_pos = new_pos;
//break
}
guard_dir = guard_dir.next();
/*
for row in 0.. map.height.try_into().unwrap() {
for col in 0..map.width.try_into().unwrap() {
let pos = IVec2::new(row, col);
if visited.contains(&pos) {
print!("X");
} else {
print!(".");
}
}
print!("\n");
}*/
}
for row in 0.. map.height.try_into().unwrap() {
for col in 0..map.width.try_into().unwrap() {
let pos = IVec2::new(row, col);
if visited.contains(&pos) {
print!("X");
}else if map.obstacles.contains(&pos) {
print!("#");
} else {
print!(".");
}
}
println!();
}
Ok(visited.len().to_string())
}
fn parse_input(input: &str) -> (IVec2, MyMap) {
let (pos, height, width, obstacles) = input.lines().enumerate().fold(
(IVec2::ZERO, 0, 0, HashSet::new()),
|mut acc, (row_no, row)| {
acc.1 = row_no.try_into().unwrap();
acc.2 = row.len().try_into().unwrap();
row.chars()
.enumerate()
.filter(|(_, c)| *c == '#' || *c == '^')
.for_each(|(col_no, c)| {
if c == '#' {
acc.3.insert(IVec2::new(
row_no.try_into().unwrap(),
col_no.try_into().unwrap(),
));
} else {
acc.0 = IVec2::new(row_no.try_into().unwrap(), col_no.try_into().unwrap());
}
});
acc
},
);
(
pos,
MyMap {
obstacles,
height: height + 1,
width,
},
)
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "....#.....
.........#
..........
..#.......
.......#..
..........
.#..^.....
........#.
#.........
......#...";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part1_works() {
let result = part1(INPUT).unwrap();
assert_eq!(result, "41".to_string());
}
}

35
2024/day-6/src/part2.rs Normal file
View File

@@ -0,0 +1,35 @@
#![warn(clippy::all, clippy::pedantic)]
use error_stack::Result;
use thiserror::Error;
// day-6
#[derive(Debug, Error)]
pub enum Day6Part2Error{
#[error("Problem parsing Day 6")]
ParseError,
}
/// Day-6 Part 2 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/6#part2>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part2 (_input: &str) -> Result<String, Day6Part2Error> {
Ok("Not Finished".to_string())
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part2_works() {
let result = part2(INPUT).unwrap();
assert_eq!(result, "Not Finished".to_string());
}
}