Skip to content

Commit

Permalink
Implement beam
Browse files Browse the repository at this point in the history
  • Loading branch information
ichyo committed Jun 22, 2019
1 parent efba392 commit 62f061e
Show file tree
Hide file tree
Showing 4 changed files with 69 additions and 12 deletions.
9 changes: 5 additions & 4 deletions src/main.rs
@@ -1,16 +1,17 @@
use clap::{App, Arg};
use rayon::prelude::*;
use indicatif::ProgressBar;
use std::fs::File;
use std::io::Write;
use indicatif::ProgressBar;
use rayon::prelude::*;


use icfpc::models::*;
use icfpc::parse::read_all_inputs;
use icfpc::solve::solve_small;
use icfpc::solve::solve_beam;


fn solve<W: Write>(task: Task, f: &mut W) {
let cmds = solve_small(task);
let cmds = solve_beam(task);
for cmd in cmds {
write!(f, "{}", cmd).unwrap();
}
Expand Down
14 changes: 7 additions & 7 deletions src/models.rs
Expand Up @@ -3,7 +3,7 @@ use std::collections::HashMap;
use std::fmt;
use std::ops::Add;

#[derive(Debug, Clone, Copy, Eq, PartialEq)]
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct Point {
pub x: i32,
pub y: i32,
Expand Down Expand Up @@ -50,7 +50,7 @@ pub enum Direction {
Horizontal,
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Map(Vec<Point>);

impl Map {
Expand Down Expand Up @@ -113,7 +113,7 @@ impl Map {
}
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum BoosterType {
NewHand,
FastMove,
Expand All @@ -123,7 +123,7 @@ pub enum BoosterType {
Unknown,
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Booster {
pub kind: BoosterType,
pub point: Point,
Expand All @@ -135,7 +135,7 @@ impl Booster {
}
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Task {
pub width: usize,
pub height: usize,
Expand All @@ -145,7 +145,7 @@ pub struct Task {
pub boosters: Vec<Booster>,
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Move {
MoveUp,
MoveDown,
Expand All @@ -154,7 +154,7 @@ pub enum Move {
Noop,
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Command {
Move(Move),
TurnRight,
Expand Down
57 changes: 56 additions & 1 deletion src/solve.rs
Expand Up @@ -3,8 +3,10 @@ use crate::utils::Matrix;

use rand::seq::SliceRandom;
use rand::thread_rng;
use std::collections::VecDeque;
use std::cmp::Ordering;
use std::collections::{BinaryHeap, HashSet, VecDeque};

#[derive(Clone, Eq, PartialEq)]
pub struct State<'a> {
task: &'a Task,
current_point: Point,
Expand Down Expand Up @@ -202,6 +204,15 @@ impl<'a> State<'a> {

self.remaining > 0
}

pub fn finished(&self) -> bool {
self.remaining == 0
}

// min is better
fn score(&self) -> usize {
self.remaining
}
}

pub fn solve_small(task: Task) -> Vec<Command> {
Expand All @@ -213,3 +224,47 @@ pub fn solve_small(task: Task) -> Vec<Command> {
}
state.commands
}

#[derive(Clone, Eq, PartialEq)]
struct BeamEntry<'a>(State<'a>);

impl<'a> Ord for BeamEntry<'a> {
fn cmp(&self, other: &BeamEntry) -> Ordering {
let cmd = other.0.commands.len().cmp(&self.0.commands.len());
let score = other.0.score().cmp(&self.0.score());
cmd.then(score)
}
}

impl<'a> PartialOrd for BeamEntry<'a> {
fn partial_cmp(&self, other: &BeamEntry) -> Option<Ordering> {
Some(self.cmp(other))
}
}

pub fn solve_beam(task: Task) -> Vec<Command> {
let mut beam = BinaryHeap::new();
let beam_length = 3;
let branch = 3;
beam.push(BeamEntry(State::initialize(&task)));
loop {
let mut next_beam = BinaryHeap::new();
let mut points_set = HashSet::new();
for entry in beam.into_iter().take(beam_length) {
let state = entry.0;
if state.finished() {
return state.commands;
}
for _ in 0..branch {
let mut state = state.clone();
state.next_state();
if points_set.contains(&state.current_point) {
continue;
}
points_set.insert(state.current_point);
next_beam.push(BeamEntry(state));
}
}
beam = next_beam;
}
}
1 change: 1 addition & 0 deletions src/utils.rs
@@ -1,5 +1,6 @@
use crate::models::Point;

#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Matrix<T> {
width: usize,
height: usize,
Expand Down

0 comments on commit 62f061e

Please sign in to comment.