2024 day-5 clippy and done

This commit is contained in:
Dylan Thies
2024-12-05 11:47:14 -05:00
parent d320036973
commit 25dedc74fa
5 changed files with 320 additions and 0 deletions

23
2024/day-5/Cargo.toml Normal file
View File

@@ -0,0 +1,23 @@
[package]
name = "day-5"
version.workspace = true
edition.workspace = true
authors.workspace = true
repository.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nom.workspace = true
itertools.workspace = true
log.workspace = true
error-stack.workspace = true
thiserror.workspace = true
dhat.workspace = true
[dev-dependencies]
test-log.workspace = true
[features]
dhat-heap = []

4
2024/day-5/src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod part1;
pub use crate::part1::*;
pub mod part2;
pub use crate::part2::*;

31
2024/day-5/src/main.rs Normal file
View File

@@ -0,0 +1,31 @@
#![warn(clippy::all, clippy::pedantic)]
use day_5::part1;
use day_5::part2;
use error_stack::{Result, ResultExt};
use thiserror::Error;
#[cfg(feature = "dhat-heap")]
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
#[derive(Debug, Error)]
enum Day5Error {
#[error("Part 1 failed")]
Part1Error,
#[error("Part 2 failed")]
Part2Error,
}
fn main() -> Result<(), Day5Error> {
#[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap();
let input = include_str!("./input.txt");
let part1_result = part1(input).change_context(Day5Error::Part1Error)?;
println!("part 1: {part1_result}");
let part2_result = part2(input).change_context(Day5Error::Part2Error)?;
println!("part 2: {part2_result}");
Ok(())
}

119
2024/day-5/src/part1.rs Normal file
View File

@@ -0,0 +1,119 @@
#![warn(clippy::all, clippy::pedantic)]
use std::collections::HashMap;
use error_stack::{Report, Result, ResultExt};
use nom::{
bytes::complete::tag, character::complete, multi::separated_list1, sequence::separated_pair,
IResult,
};
use thiserror::Error;
// day-5
#[derive(Debug, Error)]
pub enum Day5Part1Error {
#[error("Problem parsing Day 5")]
ParseError,
}
type Orderings = HashMap<u32, Vec<u32>>;
/// Day-5 Part 1 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/3>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part1(input: &str) -> Result<String, Day5Part1Error> {
//parse into "bad list where X|Y
let (_, (ordering, updates)) = parse_input(input)
.map_err(|x| Report::from(x.to_owned()))
.change_context(Day5Part1Error::ParseError)?;
let middles: u32 = updates
.iter()
.filter_map(|update| {
let update_len = update.len();
for i in 0..update_len {
let before = &update[..i];
if let Some(a) = update.get(i) {
if let Some(rules) = ordering.get(a) {
if rules.iter().any(|b| before.contains(b)) {
return None;
}
}
}
}
Some(update[update_len / 2])
})
.sum();
Ok(middles.to_string())
}
fn parse_ordering(input: &str) -> IResult<&str, Orderings> {
let (input, rules) = separated_list1(
complete::line_ending,
separated_pair(complete::u32, tag("|"), complete::u32),
)(input)?;
let ordering = rules.iter().fold(HashMap::new(), |mut acc: Orderings, (a, b)| {
acc.entry(*a).or_default().push(*b);
acc
});
Ok((input, ordering))
}
fn parse_update(input: &str) -> IResult<&str, Vec<u32>> {
separated_list1(tag(","), complete::u32)(input)
}
fn parse_updates(input: &str) -> IResult<&str, Vec<Vec<u32>>> {
separated_list1(complete::line_ending, parse_update)(input)
}
fn parse_input(input: &str) -> IResult<&str, (Orderings, Vec<Vec<u32>>)> {
let (input, ordering) = parse_ordering(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, updates) = parse_updates(input)?;
Ok((input, (ordering, updates)))
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "47|53
97|13
97|61
97|47
75|29
61|13
75|53
29|13
97|29
53|29
61|53
97|53
61|29
47|13
75|47
97|75
47|61
75|61
47|29
75|13
53|13
75,47,61,53,29
97,61,53,29,13
75,29,13
75,97,47,61,53
61,13,29
97,13,75,29,47";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part1_works() {
let result = part1(INPUT).unwrap();
assert_eq!(result, "143".to_string());
}
}

143
2024/day-5/src/part2.rs Normal file
View File

@@ -0,0 +1,143 @@
#![warn(clippy::all, clippy::pedantic)]
use std::{cmp::Ordering, collections::HashMap};
use error_stack::{Report, Result, ResultExt};
use nom::{bytes::complete::tag, character::complete, multi::separated_list1, sequence::separated_pair, IResult};
use thiserror::Error;
// day-5
#[derive(Debug, Error)]
pub enum Day5Part2Error{
#[error("Problem parsing Day 5")]
ParseError,
}
type Orderings = HashMap<u32, Vec<u32>>;
/// Day-5 Part 2 for 2024 advent of code
/// Problem can be found here: <https://adventofcode.com/2024/day/3>
///
/// # Errors
/// - `ParseError` there was an issue with the parser
pub fn part2 (input: &str) -> Result<String, Day5Part2Error> {
let (_, (ordering, mut updates)) = parse_input(input)
.map_err(|x| Report::from(x.to_owned()))
.change_context(Day5Part2Error::ParseError)?;
let middles: u32 = updates
.iter_mut()
.filter_map(|update| {
let update_len = update.len();
for i in 0..update_len {
let before = &update[..i];
if let Some(a) = update.get(i) {
if let Some(rules) = ordering.get(a) {
if rules.iter().any(|b| before.contains(b)) {
return Some(update);
}
}
}
}
None
})
.map(|update| {
update.sort_by(|a,b| {
let Some(rule_a) = ordering.get(a) else { return Ordering::Equal;} ;
//let Some(rule_b) = ordering.get(b) else { return Ordering::Equal;} ;
if rule_a.contains(b) {
return Ordering::Less;
}
Ordering::Equal
});
update[update.len()/2]
})
.sum();
Ok(middles.to_string())
}
/*
* --- Part Two ---
While the Elves get to work printing the correctly-ordered updates, you have a little time to fix the rest of them.
For each of the incorrectly-ordered updates, use the page ordering rules to put the page numbers in the right order. For the above example, here are the three incorrectly-ordered updates and their correct orderings:
75,97,47,61,53 becomes 97,75,47,61,53.
61,13,29 becomes 61,29,13.
97,13,75,29,47 becomes 97,75,47,29,13.
After taking only the incorrectly-ordered updates and ordering them correctly, their middle page numbers are 47, 29, and 47. Adding these together produces 123.
Find the updates which are not in the correct order. What do you get if you add up the middle page numbers after correctly ordering just those updates?
*/
fn parse_ordering(input: &str) -> IResult<&str, Orderings> {
let (input, rules) = separated_list1(
complete::line_ending,
separated_pair(complete::u32, tag("|"), complete::u32),
)(input)?;
let ordering = rules.iter().fold(HashMap::new(), |mut acc: Orderings, (a, b)| {
acc.entry(*a).or_default().push(*b);
acc
});
Ok((input, ordering))
}
fn parse_update(input: &str) -> IResult<&str, Vec<u32>> {
separated_list1(tag(","), complete::u32)(input)
}
fn parse_updates(input: &str) -> IResult<&str, Vec<Vec<u32>>> {
separated_list1(complete::line_ending, parse_update)(input)
}
fn parse_input(input: &str) -> IResult<&str, (Orderings, Vec<Vec<u32>>)> {
let (input, ordering) = parse_ordering(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, _) = complete::line_ending(input)?;
let (input, updates) = parse_updates(input)?;
Ok((input, (ordering, updates)))
}
#[cfg(test)]
mod test {
use super::*;
const INPUT: &str = "47|53
97|13
97|61
97|47
75|29
61|13
75|53
29|13
97|29
53|29
61|53
97|53
61|29
47|13
75|47
97|75
47|61
75|61
47|29
75|13
53|13
75,47,61,53,29
97,61,53,29,13
75,29,13
75,97,47,61,53
61,13,29
97,13,75,29,47";
#[test_log::test]
#[test_log(default_log_filter = "trace")]
fn part2_works() {
let result = part2(INPUT).unwrap();
assert_eq!(result, "123".to_string());
}
}