Skip to content
12 changes: 12 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ bincode = "1.3.3"
rand = "0.8.5"
renet = "0.0.15"
serde = "1.0.203"
serde_repr = "0.1"

[profile.dev.package."*"]
opt-level = 3
Expand Down
140 changes: 140 additions & 0 deletions src/client/terrain/util/buffer_serializer.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
#[derive(Debug, PartialEq)]
pub struct RLEToken {
symbol: u8,
count: u16,
}

const TOKEN_BYTE_COUNT: usize = 3;
const SYMBOL_OFFSET: usize = 0;
const COUNT_OFFSET: usize = 1;
const COUNT_LENGTH: usize = 2;

pub fn serialize_buffer(array: Vec<u8>) -> Vec<u8> {
let tokens = tokenize_buffer(array);

let mut bytes = Vec::<u8>::new();
tokens.iter().for_each(|token| {
let symbol_bytes = token.symbol.to_le_bytes();
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

le := little endian

let count_bytes = token.count.to_le_bytes();
bytes.extend_from_slice(&symbol_bytes);
bytes.extend_from_slice(&count_bytes);
});

bytes
}

fn tokenize_buffer(array: Vec<u8>) -> Vec<RLEToken> {
let mut vec = Vec::<RLEToken>::new();

let mut last_symbol = array[0];
let mut count = 1;

for &element in array.iter().skip(1) {
if last_symbol == element {
count += 1;
} else {
vec.push(RLEToken {
count,
symbol: last_symbol,
});
last_symbol = element;
count = 1;
}
}
vec.push(RLEToken {
count,
symbol: last_symbol,
});

vec
}

pub fn deserialize_buffer(bytes: &[u8]) -> Vec<u8> {
let mut vec = Vec::<u8>::new();

let mut i = 0;
while i < bytes.len() {
let symbol = bytes[i + SYMBOL_OFFSET];
let count_bytes = &bytes[i + COUNT_OFFSET..i + COUNT_OFFSET + COUNT_LENGTH];
let count = u16::from_le_bytes(count_bytes.try_into().unwrap());

for _ in 0..count {
vec.push(symbol);
}

i += TOKEN_BYTE_COUNT;
}

vec
}

#[cfg(test)]
pub mod tests {
use super::*;

fn revert_buffer_tokenization(tokens: Vec<RLEToken>) -> Vec<u8> {
let mut vec = Vec::<u8>::new();

tokens.iter().for_each(|token| {
for _ in 0..token.count {
vec.push(token.symbol);
}
});

vec
}

#[test]
fn test_tokenize_buffer() {
#[rustfmt::skip]
let array = vec![1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3];
let tokens = tokenize_buffer(array);

#[rustfmt::skip]
let expected_tokens = vec![
RLEToken { symbol: 1, count: 4 },
RLEToken { symbol: 2, count: 3 },
RLEToken { symbol: 3, count: 5 },
];

assert_eq!(tokens, expected_tokens);
}

#[test]
fn test_revert_buffer_tokenization() {
#[rustfmt::skip]
let tokens = vec![
RLEToken { symbol: 1, count: 4 },
RLEToken { symbol: 2, count: 3 },
RLEToken { symbol: 3, count: 5 },
];

let array = revert_buffer_tokenization(tokens);
#[rustfmt::skip]
let expected_array = vec![1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3];

assert_eq!(array, expected_array);
}

#[test]
fn test_compressed_buffer_is_smaller() {
#[rustfmt::skip]
let array = vec![1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3];
let other_array = array.clone();
let bytes = serialize_buffer(array);

let default_bytes = other_array.len() * std::mem::size_of::<u8>();
let compressed_bytes = bytes.len();

assert!(compressed_bytes < default_bytes);
}

#[test]
fn test_serialization_deserialization() {
#[rustfmt::skip]
let array = vec![1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3];
let bytes = serialize_buffer(array.clone());
let deserialized_array = deserialize_buffer(&bytes);
assert_eq!(array, deserialized_array);
}
}
64 changes: 61 additions & 3 deletions src/client/terrain/util/chunk.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
use super::buffer_serializer::{deserialize_buffer, serialize_buffer};
use crate::prelude::*;
use serde::ser::SerializeStruct;
use serde::{Deserialize, Serialize};

pub const CHUNK_SIZE: usize = 32;
pub const PADDED_CHUNK_SIZE: usize = CHUNK_SIZE + 2;
Expand Down Expand Up @@ -37,9 +40,9 @@ impl Chunk {

#[rustfmt::skip]
pub fn index(x: usize, y: usize, z: usize) -> usize {
if (x >= PADDED_CHUNK_SIZE) || (y >= PADDED_CHUNK_SIZE) || (z >= PADDED_CHUNK_SIZE) {
panic!("Index out of bounds: ({}, {}, {})", x, y, z);
}
if (x >= PADDED_CHUNK_SIZE) || (y >= PADDED_CHUNK_SIZE) || (z >= PADDED_CHUNK_SIZE) {
panic!("Index out of bounds: ({}, {}, {})", x, y, z);
}
x + PADDED_CHUNK_USIZE * (y + PADDED_CHUNK_USIZE * z)
}

Expand All @@ -53,3 +56,58 @@ impl Default for Chunk {
Self::new(Vec3::ZERO)
}
}

impl Serialize for Chunk {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let data_as_u8: Vec<u8> = self.data.iter().map(|block_id| block_id.to_u8()).collect();
let serialized_data = serialize_buffer(data_as_u8);
let mut state = serializer.serialize_struct("Chunk", 2)?;
state.serialize_field("data", &serialized_data)?;
state.serialize_field("position", &self.position)?;
state.end()
}
}

struct BytesVec(Vec<u8>);

impl<'de> Deserialize<'de> for BytesVec {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let vec = Vec::<u8>::deserialize(deserializer)?;
Ok(BytesVec(vec))
}
}

impl<'de> Deserialize<'de> for Chunk {
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Inspired by the deserialization docs:
https://serde.rs/impl-serializer.html

fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
struct ChunkData {
data: BytesVec,
position: Vec3,
}

let ChunkData { data, position } = ChunkData::deserialize(deserializer)?;
let chunk_data_bytes_u8: Vec<u8> = data.0;
let bytes_slice: &[u8] = &chunk_data_bytes_u8;
let deserialized_data = deserialize_buffer(bytes_slice);
let data_as_block_id: [BlockId; CHUNK_LENGTH] = deserialized_data
.into_iter()
.map(|i| BlockId::from_u8(i).unwrap())
.collect::<Vec<BlockId>>()
.try_into()
.map_err(|_| serde::de::Error::custom("Failed to convert data to BlockId array"))?;

Ok(Chunk {
data: data_as_block_id,
position,
})
}
}
2 changes: 2 additions & 0 deletions src/client/terrain/util/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
pub mod blocks;
pub mod buffer_serializer;
pub mod chunk;
pub mod generator;
pub mod mesher;

pub use blocks::*;
pub use buffer_serializer::*;
pub use chunk::*;
pub use generator::*;
pub use mesher::*;
56 changes: 40 additions & 16 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,44 @@ pub enum NetworkingMessage {
BlockUpdate { position: Vec3, block: BlockId },
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum BlockId {
Air,
Grass,
Dirt,
Stone,
Bedrock,
RedSand,
BrownTerracotta,
CyanTerracotta,
GrayTerracotta,
LightGrayTerracotta,
OrangeTerracotta,
RedTerracotta,
Terracotta,
YellowTerracotta,
macro_rules! enum_from_u8 {
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't plan on learning macros just yet, so this generated stuff is way beyond my understanding but it keeps the enum definition clear. 🪄

($name:ident { $( $variant:ident ),* $(,)? }) => {
#[repr(u8)]
#[derive(Debug, PartialEq, Copy, Clone, Deserialize, Serialize)]
pub enum $name {
$( $variant ),*
}

impl $name {
pub fn from_u8(value: u8) -> Option<$name> {
match value {
$(x if x == $name::$variant as u8 => Some($name::$variant),)*
_ => None,
}
}

pub fn to_u8(&self) -> u8 {
self.clone() as u8
}
}
};
}

enum_from_u8! {
BlockId {
Air,
Grass,
Dirt,
Stone,
Bedrock,
RedSand,
BrownTerracotta,
CyanTerracotta,
GrayTerracotta,
LightGrayTerracotta,
OrangeTerracotta,
RedTerracotta,
Terracotta,
YellowTerracotta,
}
}