Day three

This commit is contained in:
2024-12-04 20:57:28 +01:00
parent 38d35082b0
commit 464f5846ed
8 changed files with 165 additions and 0 deletions

1
day-03-rust/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
target/

7
day-03-rust/Cargo.lock generated Normal file
View File

@@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "day-03-rust"
version = "0.1.0"

6
day-03-rust/Cargo.toml Normal file
View File

@@ -0,0 +1,6 @@
[package]
name = "day-03-rust"
version = "0.1.0"
edition = "2021"
[dependencies]

View File

@@ -0,0 +1,2 @@
[toolchain]
channel = "nightly"

61
day-03-rust/src/lexer.rs Normal file
View File

@@ -0,0 +1,61 @@
use std::io::{Bytes, Read};
use std::iter::Peekable;
pub type Number = i32;
#[derive(PartialEq, Debug)]
pub enum Token {
Identifier(String),
Number(Number),
Operator(u8),
}
pub struct Lexer<R: Read> {
reader: Peekable<Bytes<R>>,
}
impl<R: Read> Lexer<R> {
pub fn new(reader: R) -> Lexer<R> {
Lexer { reader: reader.bytes().peekable() }
}
}
impl<R: Read> Iterator for Lexer<R> {
type Item = Token;
fn next(&mut self) -> Option<Token> {
if let Some(Ok(ch)) = self.reader.next() {
match ch {
b'A' ..= b'Z' | b'a' ..= b'z' | b'\'' => {
let mut identifier = String::new();
identifier.push(ch as char);
while let Some(Ok(b'A' ..= b'Z' | b'a' ..= b'z' | b'\'')) = self.reader.peek() {
identifier.push(self.reader.next().unwrap().unwrap() as char);
}
Some(Token::Identifier(identifier))
}
b'0' ..= b'9' | b'-' => {
let negative: bool;
let mut number: Number = 0;
if ch == b'-' {
negative = true;
} else {
negative = false;
number = (ch - b'0') as Number;
}
while let Some(Ok(b'0' ..= b'9')) = self.reader.peek() {
number *= 10;
number += (self.reader.next().unwrap().unwrap() - b'0') as Number;
}
if negative {
number *= -1;
}
Some(Token::Number(number))
}
_ => Some(Token::Operator(ch))
}
} else {
None
}
}
}

23
day-03-rust/src/main.rs Normal file
View File

@@ -0,0 +1,23 @@
#![feature(let_chains)]
mod task1;
mod task2;
pub mod lexer;
fn main() {
let args = std::env::args().collect::<Vec<_>>();
if args.len() != 3 {
eprintln!("Usage: {} <1|2> <input file>", args[0]);
std::process::exit(1);
}
match args[1].as_str() {
"1" => {
task1::run(std::fs::File::open(&args[2]).expect("File not found"));
}
"2" => {
task2::run(std::fs::File::open(&args[2]).expect("File not found"));
}
_ => eprintln!("Unknown task {}", args[1])
}
}

25
day-03-rust/src/task1.rs Normal file
View File

@@ -0,0 +1,25 @@
use crate::lexer::{Lexer, Number, Token};
use std::io::Read;
pub fn run<R: Read>(read: R) {
let mut lexer = Lexer::new(read);
let mut result: Number = 0;
while let Some(token) = lexer.next() {
if let Token::Identifier(identifier) = token {
if identifier.ends_with("mul") {
if let Some(Token::Operator(b'(')) = lexer.next()
&& let Some(Token::Number(first_number)) = lexer.next()
&& let Some(Token::Operator(b',')) = lexer.next()
&& let Some(Token::Number(second_number)) = lexer.next()
&& let Some(Token::Operator(b')')) = lexer.next()
{
result += first_number * second_number;
}
}
}
}
println!("Result: {}", result);
}

40
day-03-rust/src/task2.rs Normal file
View File

@@ -0,0 +1,40 @@
use crate::lexer::{Lexer, Number, Token};
use std::io::Read;
pub fn run<R: Read>(read: R) {
let mut lexer = Lexer::new(read).peekable();
let mut enabled = true;
let mut result: Number = 0;
while let Some(token) = lexer.next() {
if let Token::Identifier(identifier) = token {
if enabled && identifier.ends_with("mul") {
if let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b'(')))
&& let Some(Token::Number(first_number)) =
lexer.next_if(|token| matches!(token, &Token::Number(_)))
&& let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b',')))
&& let Some(Token::Number(second_number)) =
lexer.next_if(|token| matches!(token, &Token::Number(_)))
&& let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b')')))
{
result += first_number * second_number;
}
} else if identifier.ends_with("don't") {
if let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b'(')))
&& let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b')')))
{
enabled = false;
}
} else if identifier.ends_with("do") {
if let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b'(')))
&& let Some(_) = lexer.next_if(|token| matches!(token, &Token::Operator(b')')))
{
enabled = true;
}
}
}
}
println!("Result: {}", result);
}