Skip to content

Commit

Permalink
ur: add no-std support
Browse files Browse the repository at this point in the history
It turns out to be unnecessary for the decoder queue to be
two-sided. We thus use the simpler Vec instead.
  • Loading branch information
dspicher committed Apr 21, 2023
1 parent f2171c6 commit fd9bcad
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 26 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/no-std.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: no-std

on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

jobs:
no-std:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- run: cargo install --locked --debug cargo-nono
# nono doesn't recognize bitcoin-hashes' no-std support
- run: (! cargo nono check | grep "ur:" | grep -v SUCCESS)
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## Unreleased
- Added support for `no-std` environments. https://github.com/dspicher/ur-rs/pull/183

## [0.3.0] - 2023-01-07
- Added `ur::ur::decode` to the public API to decode a single `ur` URI. https://github.com/dspicher/ur-rs/pull/112
Expand Down
7 changes: 6 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ edition = "2021"
repository = "https://github.com/dspicher/ur-rs/"

[dependencies]
bitcoin_hashes = "0.12"
bitcoin_hashes = { version = "0.12", default-features = false }
crc = "3"
minicbor = { version = "0.19", features = ["alloc"] }
phf = { version = "0.11", features = ["macros"], default-features = false }
Expand All @@ -20,3 +20,8 @@ rand_xoshiro = "0.6"
[dev-dependencies]
hex = "0.4"
qrcode = { version = "0.12", default-features = false }

[features]
default = ["std"]
std = []

10 changes: 7 additions & 3 deletions src/bytewords.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@
//! assert_eq!(data, decode(&encoded, Style::Minimal).unwrap());
//! ```

extern crate alloc;
use alloc::vec::Vec;

/// The three different `bytewords` encoding styles. See the [`encode`] documentation for examples.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Style {
Expand All @@ -60,8 +63,8 @@ pub enum Error {
NonAscii,
}

impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Error::InvalidWord => write!(f, "invalid word"),
Error::InvalidChecksum => write!(f, "invalid checksum"),
Expand All @@ -71,6 +74,7 @@ impl std::fmt::Display for Error {
}
}

#[cfg(feature = "std")]
impl std::error::Error for Error {}

/// Decodes a `bytewords`-encoded String back into a byte payload. The encoding
Expand Down Expand Up @@ -159,7 +163,7 @@ fn strip_checksum(mut data: Vec<u8>) -> Result<Vec<u8>, Error> {
/// assert_eq!(encode(&[0], Style::Minimal), "aetdaowslg");
/// ```
#[must_use]
pub fn encode(data: &[u8], style: Style) -> String {
pub fn encode(data: &[u8], style: Style) -> alloc::string::String {
let checksum = crate::crc32().checksum(data).to_be_bytes();
let data = data.iter().chain(checksum.iter());
let words: Vec<&str> = match style {
Expand Down
37 changes: 20 additions & 17 deletions src/fountain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,9 @@
//! );
//! ```

use std::convert::Infallible;
extern crate alloc;
use alloc::vec::Vec;
use core::convert::Infallible;

/// Errors that can happen during fountain encoding and decoding.
#[derive(Debug)]
Expand All @@ -105,8 +107,8 @@ pub enum Error {
InvalidPadding,
}

impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::CborDecode(e) => write!(f, "{e}"),
Self::CborEncode(e) => write!(f, "{e}"),
Expand Down Expand Up @@ -213,7 +215,7 @@ impl Encoder {
self.current_sequence += 1;
let indexes = choose_fragments(self.current_sequence, self.parts.len(), self.checksum);

let mut mixed = vec![0; self.parts[0].len()];
let mut mixed = alloc::vec![0; self.parts[0].len()];
for item in indexes {
xor(&mut mixed, &self.parts[item]);
}
Expand Down Expand Up @@ -273,10 +275,10 @@ impl Encoder {
/// See the [`crate::fountain`] module documentation for an example.
#[derive(Default)]
pub struct Decoder {
decoded: std::collections::HashMap<usize, Part>,
received: std::collections::HashSet<Vec<usize>>,
buffer: std::collections::HashMap<Vec<usize>, Part>,
queue: std::collections::VecDeque<(usize, Part)>,
decoded: alloc::collections::btree_map::BTreeMap<usize, Part>,
received: alloc::collections::btree_set::BTreeSet<Vec<usize>>,
buffer: alloc::collections::btree_map::BTreeMap<Vec<usize>, Part>,
queue: Vec<(usize, Part)>,
sequence_count: usize,
message_length: usize,
checksum: u32,
Expand Down Expand Up @@ -330,14 +332,14 @@ impl Decoder {
fn process_simple(&mut self, part: Part) -> Result<(), Error> {
let index = *part.indexes().first().ok_or(Error::ExpectedItem)?;
self.decoded.insert(index, part.clone());
self.queue.push_back((index, part));
self.queue.push((index, part));
self.process_queue()?;
Ok(())
}

fn process_queue(&mut self) -> Result<(), Error> {
while !self.queue.is_empty() {
let (index, simple) = self.queue.pop_front().ok_or(Error::ExpectedItem)?;
let (index, simple) = self.queue.pop().ok_or(Error::ExpectedItem)?;
let to_process: Vec<Vec<usize>> = self
.buffer
.keys()
Expand All @@ -356,7 +358,7 @@ impl Decoder {
if new_indexes.len() == 1 {
self.decoded
.insert(*new_indexes.first().unwrap(), part.clone());
self.queue.push_back((*new_indexes.first().unwrap(), part));
self.queue.push((*new_indexes.first().unwrap(), part));
} else {
self.buffer.insert(new_indexes, part);
}
Expand Down Expand Up @@ -388,7 +390,7 @@ impl Decoder {
}
if indexes.len() == 1 {
self.decoded.insert(*indexes.first().unwrap(), part.clone());
self.queue.push_back((*indexes.first().unwrap(), part));
self.queue.push((*indexes.first().unwrap(), part));
} else {
self.buffer.insert(indexes, part);
}
Expand Down Expand Up @@ -468,7 +470,7 @@ impl Decoder {
.map(|idx| self.decoded.get(&idx).ok_or(Error::ExpectedItem))
.collect::<Result<Vec<&Part>, Error>>()?
.iter()
.fold(vec![], |a, b| [a, b.data.clone()].concat());
.fold(alloc::vec![], |a, b| [a, b.data.clone()].concat());
if !combined
.get(self.message_length..)
.ok_or(Error::ExpectedItem)?
Expand Down Expand Up @@ -588,8 +590,8 @@ impl Part {
}

#[must_use]
pub(crate) fn sequence_id(&self) -> String {
format!("{}-{}", self.sequence, self.sequence_count)
pub(crate) fn sequence_id(&self) -> alloc::string::String {
alloc::format!("{}-{}", self.sequence, self.sequence_count)
}

/// Returns a slice view onto the underlying data.
Expand Down Expand Up @@ -634,15 +636,16 @@ pub(crate) fn fragment_length(data_length: usize, max_fragment_length: usize) ->

#[must_use]
pub(crate) fn partition(mut data: Vec<u8>, fragment_length: usize) -> Vec<Vec<u8>> {
let mut padding = vec![0; (fragment_length - (data.len() % fragment_length)) % fragment_length];
let mut padding =
alloc::vec![0; (fragment_length - (data.len() % fragment_length)) % fragment_length];
data.append(&mut padding);
data.chunks(fragment_length).map(<[u8]>::to_vec).collect()
}

#[must_use]
fn choose_fragments(sequence: usize, fragment_count: usize, checksum: u32) -> Vec<usize> {
if sequence <= fragment_count {
return vec![sequence - 1];
return alloc::vec![sequence - 1];
}

#[allow(clippy::cast_possible_truncation)]
Expand Down
4 changes: 4 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,11 @@
//! of a fountain encoder, which splits up a byte payload into multiple segments
//! and emits an unbounded stream of parts which can be recombined at the receiving
//! decoder side.

#![forbid(unsafe_code)]
#![cfg_attr(not(feature = "std"), no_std)]

extern crate alloc;

pub mod bytewords;
pub mod fountain;
Expand Down
7 changes: 5 additions & 2 deletions src/sampler.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
extern crate alloc;
use alloc::vec::Vec;

#[derive(Debug)]
pub struct Weighted {
aliases: Vec<u32>,
Expand All @@ -22,8 +25,8 @@ impl Weighted {
.map(|j| count - j)
.partition(|&j| weights[j] < 1.0);

let mut probs: Vec<f64> = vec![0.0; count];
let mut aliases: Vec<u32> = vec![0; count];
let mut probs: Vec<f64> = alloc::vec![0.0; count];
let mut aliases: Vec<u32> = alloc::vec![0; count];

while !s.is_empty() && !l.is_empty() {
let a = s.remove(s.len() - 1);
Expand Down
9 changes: 6 additions & 3 deletions src/ur.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@
//! assert_eq!(decoder.message().unwrap().as_deref(), Some(data.as_bytes()));
//! ```

extern crate alloc;
use alloc::{string::String, vec::Vec};

/// Errors that can happen during encoding and decoding of URs.
#[derive(Debug)]
pub enum Error {
Expand All @@ -43,8 +46,8 @@ pub enum Error {
NotMultiPart,
}

impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Error::Bytewords(e) => write!(f, "{e}"),
Error::Fountain(e) => write!(f, "{e}"),
Expand Down Expand Up @@ -86,7 +89,7 @@ pub fn encode<T: Into<String>>(data: &[u8], ur_type: T) -> String {

#[must_use]
fn encode_ur(items: &[String]) -> String {
format!("{}:{}", "ur", items.join("/"))
alloc::format!("{}:{}", "ur", items.join("/"))
}

/// A uniform resource encoder with an underlying fountain encoding.
Expand Down
2 changes: 2 additions & 0 deletions src/xoshiro.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
extern crate alloc;
use alloc::vec::Vec;
use bitcoin_hashes::Hash;
use rand_xoshiro::rand_core::RngCore;
use rand_xoshiro::rand_core::SeedableRng;
Expand Down

0 comments on commit fd9bcad

Please sign in to comment.