open_tavern/src/game/mod.rs

113 lines
No EOL
4.1 KiB
Rust

//! Game Parser and Data types
use std::{collections::HashMap, marker::PhantomData};
use character_sheet::{AccessLevel, Character, CharacterShort, EntryType};
use serde::{Deserialize, Serialize};
pub mod character_sheet;
pub mod chat_message;
pub mod entry;
pub trait GameImpl<'a, C: Character<A> + Serialize + Deserialize<'a>, A: entry::GameEntry + Serialize + Deserialize<'a>> {
/// Creates a new game
fn new() -> Self;
/// Creates a new character, returning the character id
fn create_character(&mut self) -> usize;
fn display_character(&self, character_id: usize, access: AccessLevel) -> Vec<Option<(String, EntryType)>>;
fn characters(&self) -> Vec<usize>;
fn character_short(&self, character_id: usize) -> Option<CharacterShort>;
fn create_token(&mut self, map_id: usize, character: String, img_source: String, x: f32, y: f32) -> usize;
fn move_token(&mut self, map_id: usize, token_id: usize, x: f32, y: f32) -> bool;
fn token_info(&self, map_id: usize, token_id: usize) -> Option<&TokenInfo>;
fn available_tokens(&self) -> impl Iterator<Item = usize>;
}
#[derive(Serialize, Deserialize)]
pub struct Game<C: Character<A> + Serialize, A: entry::GameEntry + Serialize> {
_a: PhantomData<A>,
characters: Vec<C>,
tokens: HashMap<usize, TokenInfo>,
}
impl<'a, C: Character<A> + Serialize + Deserialize<'a>, A: entry::GameEntry + Serialize + Deserialize<'a>> GameImpl<'a, C, A> for Game<C, A> {
fn new() -> Self {
let mut tokens = HashMap::new();
tokens.insert(0, TokenInfo { character: "bart".to_string(), map_id: 0, img_source: "assets/pf2r/tokens/louise.jpg".to_string(), x: 2.0, y: 2.0 });
Self {
_a: PhantomData,
characters: Vec::new(),
tokens,
}
}
fn create_character(&mut self) -> usize {
self.characters.push(C::default());
self.characters.len() - 1
}
fn move_token(&mut self, _map_id: usize, token_id: usize, x: f32, y: f32) -> bool {
if let Some(ti) = self.tokens.get_mut(&token_id) {
ti.x = x;
ti.y = y;
true
}
else {
false
}
}
fn token_info(&self, _map_id: usize, token_id: usize) -> Option<&TokenInfo> {
if let Some(ti) = self.tokens.get(&token_id) {
Some(ti)
}
else {
None
}
}
fn available_tokens(&self) -> impl Iterator<Item = usize> {
self.tokens
.keys()
.into_iter()
.map(|k| *k) // this map feels stupid but keys() turns into a &usize iterator so :shrug:
}
fn create_token(&mut self, map_id: usize, character: String, img_source: String, x: f32, y: f32) -> usize {
let mut id = 0;
while self.tokens.contains_key(&id) {
id += 1;
}
self.tokens.insert(id, TokenInfo { character, map_id, img_source, x, y });
id
}
fn display_character(&self, character_id: usize, access: AccessLevel) -> Vec<Option<(String, EntryType)>> {
self.characters
.get(character_id)
.map(|c| c.display(access))
.unwrap_or(Vec::new())
}
fn characters(&self) -> Vec<usize> {
// this is stupid i should redo the characters data struct to actually have ids rather than use an index
// (as deletion of a character f-s up the ids)
(0_usize..self.characters.len()).collect()
}
fn character_short(&self, character_id: usize) -> Option<CharacterShort> {
self.characters
.get(character_id)
.map(|c| c.short())
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct TokenInfo {
/// Which character the token refers to
pub character: String,
/// Which map does the token exists in (allowing multiple tokens in multiple maps)
pub map_id: usize,
/// Token image source, as path relative to the data directory
pub img_source: String,
// x, y are floats to allow 'free movement'
/// X position, in grid slots units (integers are grid aligned)
pub x: f32,
/// Y position, in grid slots units (integers are grid aligned)
pub y: f32,
}