Compare commits

...

No commits in common. "main" and "547edb04bebd048a0833cc1056c5c078e7b4e5d6" have entirely different histories.

26 changed files with 2586 additions and 208 deletions

11
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "cargo" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"

60
.github/workflows/mdbook.yml vendored Normal file
View file

@ -0,0 +1,60 @@
# Sample workflow for building and deploying a mdBook site to GitHub Pages
#
# To get started with mdBook see: https://rust-lang.github.io/mdBook/index.html
#
name: Deploy mdBook site to Pages
on:
# Runs on pushes targeting the default branch
push:
branches: ["main"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Build job
build:
runs-on: ubuntu-latest
env:
MDBOOK_VERSION: 0.4.36
steps:
- uses: actions/checkout@v4
- name: Install mdBook
run: |
curl --proto '=https' --tlsv1.2 https://sh.rustup.rs -sSf -y | sh
rustup update
cargo install --version ${MDBOOK_VERSION} mdbook
- name: Setup Pages
id: pages
uses: actions/configure-pages@v4
- name: Build with mdBook
run: mdbook build
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: ./book
# Deployment job
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

18
.github/workflows/publish.yml vendored Normal file
View file

@ -0,0 +1,18 @@
name: Publish Crate
on:
push:
tags:
- '*'
jobs:
create-release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: katyo/publish-crates@v2
with:
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}

18
.github/workflows/release-please.yml vendored Normal file
View file

@ -0,0 +1,18 @@
name: Release Please
on:
push:
branches:
- main
permissions:
contents: write
pull-requests: write
jobs:
release-please:
runs-on: ubuntu-latest
steps:
- uses: google-github-actions/release-please-action@v4
with:
release-type: rust

55
.github/workflows/rust-clippy.yml vendored Normal file
View file

@ -0,0 +1,55 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# rust-clippy is a tool that runs a bunch of lints to catch common
# mistakes in your Rust code and help improve your Rust code.
# More details at https://github.com/rust-lang/rust-clippy
# and https://rust-lang.github.io/rust-clippy/
name: rust-clippy analyze
on:
push:
branches: [ "main" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "main" ]
schedule:
- cron: '25 0 * * 5'
jobs:
rust-clippy-analyze:
name: Run rust-clippy analyzing
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install Rust toolchain
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af #@v1
with:
profile: minimal
toolchain: stable
components: clippy
override: true
- name: Install required cargo
run: cargo install clippy-sarif sarif-fmt
- name: Run rust-clippy
run:
cargo clippy
--all-features
--message-format=json | clippy-sarif | tee rust-clippy-results.sarif | sarif-fmt
continue-on-error: true
- name: Upload analysis results to GitHub
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: rust-clippy-results.sarif
wait-for-processing: true

21
.github/workflows/rust.yml vendored Normal file
View file

@ -0,0 +1,21 @@
name: Rust
on:
push:
branches: ["main"]
pull_request:
branches: ["main"]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

1
.gitignore vendored
View file

@ -1 +1,2 @@
docs/book
/target

View file

@ -0,0 +1,3 @@
{
".": "0.0.0"
}

1808
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,30 +1,40 @@
[package]
name = "okid"
version = "0.1.3"
version = "0.1.39"
edition = "2021"
publish = ["oksoftware"]
readme = "README.md"
description = "A library for gereating double clickable ids"
license = "BSD-3-Clause"
[dependencies]
async-graphql = { version = "7.0.11", optional = true }
blake3 = { version = "1.5.4", optional = true }
bytes = { version = "1.7.2", features = ["serde"] }
digest = "0.10.7"
enumflags2 = "0.7.10"
git2 = { version = "0.18.3", optional = true, default-features = false }
hex = { version = "0.4.3", features = ["serde"] }
jetstream_wireformat = "6.0.0"
serde = { version = "1.0.210", features = ["derive"] }
serde_bytes = "0.11.15"
serde_json = "1.0.128"
sha1 = { version = "0.10.6", optional = true }
sha2 = { version = "0.10.8", optional = true }
sha3 = { version = "0.10.8", optional = true }
ulid = { version = "1.1.3", optional = true, features = ["uuid"] }
utoipa = { version = "^5.0.0-beta.0", optional = true }
uuid = { version = "1.10.0", optional = true, features = ["js", "v4"] }
[features]
default = ["sha1", "sha2", "sha3", "blake3", "uuid", "ulid"]
default = ["sha1", "sha2", "sha3", "blake3", "uuid", "ulid", "openapi"]
sha1 = ["dep:sha1"]
sha2 = ["dep:sha2"]
sha3 = ["dep:sha3"]
blake3 = ["dep:blake3"]
uuid = ["dep:uuid"]
ulid = ["dep:ulid"]
openapi = ["dep:utoipa"]
git = ["dep:git2"]
graphql = ["dep:async-graphql"]
[dev-dependencies]
insta = { version = "1.40.0", features = ["yaml"] }

View file

@ -2,4 +2,25 @@
# okid
IDs represented as self identifying strings.
`okid` is a library for generating double clickable representations of various types of data,
such as `sha1` hashes, `uuid`s and more.
## sha1
```rust
use sha1::Digest as sha1digest;
let hasher = sha1::Sha1::new();
let binary_id = okid::OkId::from(hasher);
```
## sha256
```rust
use sha2::Digest;
let mut hasher = sha2::Sha256::new();
hasher.update(b"hello world");
let binary_id = okid::OkId::from(hasher);
```
The resulting strings look like this:
`2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9`
first character of the string is the type of the binary data
in this case 2 means sha256
the rest of the string is the hexadecimal representation of the binary data

5
book.toml Normal file
View file

@ -0,0 +1,5 @@
[book]
authors = ["sevki"]
language = "en"
multilingual = false
src = "docs/src"

3
docs/src/SUMMARY.md Normal file
View file

@ -0,0 +1,3 @@
# Summary
- [Intro](intro.md)

26
docs/src/intro.md Normal file
View file

@ -0,0 +1,26 @@
<img src="https://assets.ok.software/okid.png" align="right" width="200">
# okid
`okid` is a library for generating double clickable representations of various types of data,
such as `sha1` hashes, `uuid`s and more.
## sha1
```rust
use sha1::Digest as sha1digest;
let hasher = sha1::Sha1::new();
let binary_id = okid::OkId::from(hasher);
```
## sha256
```rust
use sha2::Digest;
let mut hasher = sha2::Sha256::new();
hasher.update(b"hello world");
let binary_id = okid::OkId::from(hasher);
```
The resulting strings look like this:
`2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9`
first character of the string is the type of the binary data
in this case 2 means sha256
the rest of the string is the hexadecimal representation of the binary data

BIN
docs/src/okRust.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 273 KiB

View file

Before

Width:  |  Height:  |  Size: 438 KiB

After

Width:  |  Height:  |  Size: 438 KiB

View file

@ -0,0 +1,14 @@
{
"packages": {
".": {
"package-name": "okrust",
"changelog-path": "CHANGELOG.md",
"release-type": "rust",
"bump-minor-pre-major": false,
"bump-patch-for-minor-pre-major": true,
"draft": false,
"prerelease": false
}
},
"$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json"
}

View file

@ -1,9 +1,10 @@
use std::fmt::Display;
use crate::{BinaryType, Digest, OkId};
use crate::{BinaryType, Digest, IntoOkId, OkId};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Blake3(pub(super) [u8; 32]);
#[derive(Copy, Clone, Debug, PartialEq)]
pub(super) struct Blake3([u8; 32]);
impl From<blake3::Hasher> for OkId {
fn from(value: blake3::Hasher) -> Self {
let data = value.finalize();
@ -20,6 +21,8 @@ impl From<blake3::Hasher> for OkId {
}
}
impl IntoOkId for blake3::Hasher {}
impl Display for Blake3 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let data = self.0;

36
src/fingerprint.rs Normal file
View file

@ -0,0 +1,36 @@
use std::{fmt::Display, str::FromStr};
use super::{BinaryType, Digest, IntoOkId, OkId};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Fingerprint(pub(super) u64);
impl From<u64> for OkId {
fn from(value: u64) -> Self {
Self {
hash_type: BinaryType::Fingerprint,
digest: Digest::Fingerprint(Fingerprint(value)),
}
}
}
impl IntoOkId for u64 {}
impl Display for Fingerprint {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let buf = hex::encode(self.0.to_be_bytes());
f.write_str(&buf)?;
Ok(())
}
}
impl FromStr for Fingerprint {
type Err = super::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let buf = hex::decode(s)?;
let mut hash: [u8; 8] = [0; 8];
hash.copy_from_slice(&buf);
Ok(Fingerprint(u64::from_be_bytes(hash)))
}
}

View file

@ -8,7 +8,6 @@
//! use sha1::Digest as sha1digest;
//! let hasher = sha1::Sha1::new();
//! let binary_id = okid::OkId::from(hasher);
//! insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"1ː00da39a3ee5e6b4b0d3255bfef95601890afd80709"###);
//! ```
//! ## sha256
//! ```rust
@ -16,13 +15,10 @@
//! let mut hasher = sha2::Sha256::new();
//! hasher.update(b"hello world");
//! let binary_id = okid::OkId::from(hasher);
//! insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
//! 2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9
//! "###);
//! ```
//!
//! The resulting strings look like this:
//! 2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9
//! `2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9`
//! first character of the string is the type of the binary data
//! in this case 2 means sha256
//! the rest of the string is the hexadecimal representation of the binary data
@ -32,17 +28,33 @@
#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
#![deny(missing_docs)]
use std::{fmt::Display, str::FromStr};
use std::{fmt::Display, hash::Hash, str::FromStr};
use enumflags2::{bitflags, BitFlags};
use digest::OutputSizeUser;
use jetstream_wireformat::Data;
use serde::{Deserialize, Serialize};
const SEPARATOR: char = 'ː';
use serde_json::json;
#[cfg(feature = "openapi")]
use utoipa::ToSchema;
use utoipa::{
openapi::{schema::SchemaType, SchemaFormat, Type},
PartialSchema,
};
/// Separator character for the OkId string representation
pub const SEPARATOR: char = 'ː';
#[cfg(feature = "blake3")]
/// blake3 module
pub mod blake3;
/// fingerprint module
pub mod fingerprint;
#[cfg(feature = "git")]
/// git module
pub mod oid;
#[cfg(feature = "sha1")]
/// sha1 module
pub mod sha1;
@ -62,6 +74,8 @@ pub mod uuid;
#[repr(u8)]
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize, Hash)]
pub(crate) enum BinaryType {
// Unknown
Unknown = 0,
#[cfg(feature = "sha1")]
// Next bit means the size of the digest is of sha1 type
Sha1 = 1 << 0,
@ -80,6 +94,8 @@ pub(crate) enum BinaryType {
#[cfg(feature = "uuid")]
// UUID
Uuid = 1 << 5,
// Fingerprint
Fingerprint = 1 << 6,
}
impl From<char> for BinaryType {
@ -97,7 +113,8 @@ impl From<char> for BinaryType {
'u' => Self::Ulid,
#[cfg(feature = "uuid")]
'i' => Self::Uuid,
_ => panic!("Invalid binary type"),
'f' => Self::Fingerprint,
_ => Self::Unknown,
}
}
}
@ -117,6 +134,8 @@ impl BinaryType {
Self::Ulid => 'u',
#[cfg(feature = "uuid")]
Self::Uuid => 'i',
Self::Unknown => '0',
Self::Fingerprint => 'f',
}
}
}
@ -136,17 +155,99 @@ impl Display for BinaryType {
Self::Ulid => write!(f, "ulid"),
#[cfg(feature = "uuid")]
Self::Uuid => write!(f, "uuid"),
Self::Unknown => write!(f, "unknown"),
Self::Fingerprint => write!(f, "fingerprint"),
}
}
}
/// The digest of the binary identifier
#[derive(Clone, Copy)]
pub struct OkId {
hash_type: BinaryType,
/// The digest of the binary identifier
digest: Digest,
}
/// OkId scalar for graphql
#[cfg(feature = "graphql")]
async_graphql::scalar!(OkId);
#[cfg(feature = "openapi")]
impl PartialSchema for OkId {
fn schema() -> utoipa::openapi::RefOr<utoipa::openapi::schema::Schema> {
let mut o = utoipa::openapi::schema::Object::new();
o.schema_type = SchemaType::new(Type::String);
o.example = Some(json!(format!(
"2{SEPARATOR}00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
)
.to_string()));
let version = env!("CARGO_PKG_VERSION");
o.description = Some(format!(
r###"[OkId v{}](https://ok.software/ok/-/packages/cargo/okid/{})
"###,
version, version
));
o.format = Some(SchemaFormat::Custom("OkId".to_string()));
utoipa::openapi::RefOr::T(utoipa::openapi::schema::Schema::Object(o))
}
}
/// OkId schema for openapi
#[cfg(feature = "openapi")]
impl ToSchema for OkId {
fn name() -> std::borrow::Cow<'static, str> {
"OkId".into()
}
}
impl PartialEq for OkId {
fn eq(&self, other: &Self) -> bool {
match (&self.digest, &other.digest) {
(Digest::Sha1(a), Digest::Sha1(b)) => a == b,
(Digest::Sha1(_), _) => false,
(Digest::Sha256(a), Digest::Sha256(b)) => a == b,
(Digest::Sha256(_), _) => false,
(Digest::Sha512(a), Digest::Sha512(b)) => a == b,
(Digest::Sha512(_), _) => false,
(Digest::Blake3(a), Digest::Blake3(b)) => a == b,
(Digest::Blake3(_), _) => false,
(Digest::Ulid(a), Digest::Ulid(b)) => a == b,
(Digest::Ulid(_), _) => false,
(Digest::Uuid(a), Digest::Uuid(b)) => a == b,
(Digest::Uuid(_), _) => false,
(Digest::Fingerprint(a), Digest::Fingerprint(b)) => a == b,
(Digest::Fingerprint(_), _) => false,
}
}
}
impl Eq for OkId {}
impl PartialOrd for OkId {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
match (&self.digest, &other.digest) {
(Digest::Ulid(a), Digest::Ulid(b)) => a.0.partial_cmp(&b.0),
_ => None,
}
}
}
impl Hash for OkId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.hash_type.hash(state);
match &self.digest {
Digest::Sha1(d) => d.hash(state),
Digest::Sha256(d) => d.hash(state),
Digest::Sha512(d) => d.hash(state),
Digest::Blake3(d) => d.hash(state),
Digest::Ulid(d) => d.hash(state),
Digest::Uuid(d) => d.hash(state),
Digest::Fingerprint(d) => d.hash(state),
}
}
}
impl Serialize for OkId {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.to_string())
@ -173,6 +274,8 @@ pub enum Error {
InvalidFormat,
}
impl std::error::Error for Error {}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
@ -239,55 +342,16 @@ fn parse_okid(s: &str) -> Result<OkId, Error> {
hash_type,
digest: Digest::Uuid(rest.parse()?),
}),
BinaryType::Unknown => todo!(),
BinaryType::Fingerprint => Ok(OkId {
hash_type,
digest: Digest::Fingerprint(rest.parse()?),
}),
}
}
#[bitflags]
#[repr(u8)]
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize, Hash)]
enum CommonSettings {
Git = 0b10000000,
Obfuscated = 0b01000000,
}
impl From<char> for CommonSettings {
fn from(value: char) -> Self {
match value {
'g' => Self::Git,
'o' => Self::Obfuscated,
_ => panic!("Invalid common setting"),
}
}
}
impl CommonSettings {
fn char_code(&self) -> char {
match self {
Self::Git => 'g',
Self::Obfuscated => 'o',
}
}
}
impl Display for CommonSettings {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let settings = BitFlags::from_flag(*self);
let buf = "+".to_string();
let settings = settings.iter().fold(vec![], |acc, x| {
let mut acc = acc;
acc.push(x.char_code());
acc
});
let settings = settings
.iter()
.fold(String::new(), |acc, x| acc + &x.to_string());
write!(f, "{}", buf + settings.as_str())
}
}
/// Digest of the binary identifier
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Copy)]
enum Digest {
#[cfg(feature = "sha1")]
Sha1(crate::sha1::Sha1),
@ -301,11 +365,11 @@ enum Digest {
Ulid(crate::ulid::Ulid),
#[cfg(feature = "uuid")]
Uuid(crate::uuid::Uuid),
Fingerprint(crate::fingerprint::Fingerprint),
}
impl Display for OkId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// Write the binary type code
write!(f, "{}{}", self.hash_type.char_code(), SEPARATOR)?;
match &self.digest {
@ -321,13 +385,190 @@ impl Display for OkId {
Digest::Ulid(ulid) => ulid.fmt(f),
#[cfg(feature = "uuid")]
Digest::Uuid(uuid) => uuid.fmt(f),
Digest::Fingerprint(fingerprint) => fingerprint.fmt(f),
}
}
}
impl std::fmt::Debug for OkId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}{}", self.hash_type.char_code(), SEPARATOR)?;
match &self.digest {
#[cfg(feature = "sha1")]
Digest::Sha1(sha1) => std::fmt::Display::fmt(sha1, f),
#[cfg(feature = "sha2")]
Digest::Sha256(sha256) => std::fmt::Display::fmt(sha256, f),
#[cfg(feature = "sha3")]
Digest::Sha512(sha512) => std::fmt::Display::fmt(sha512, f),
#[cfg(feature = "blake3")]
Digest::Blake3(blake3) => std::fmt::Display::fmt(blake3, f),
#[cfg(feature = "ulid")]
Digest::Ulid(ulid) => std::fmt::Display::fmt(ulid, f),
#[cfg(feature = "uuid")]
Digest::Uuid(uuid) => std::fmt::Display::fmt(uuid, f),
Digest::Fingerprint(fingerprint) => std::fmt::Display::fmt(fingerprint, f),
}
}
}
/// IntoOkId trait, a common trait that OkId can be converted from
pub trait IntoOkId
where
Self: Into<OkId>,
{
/// Convert the type into an OkId
fn into_okid(self) -> OkId {
self.into()
}
}
impl OkId {
/// Convert the OkId into a byte slice
#[inline]
pub fn as_key(&self) -> &[u8] {
let fmtd = self.to_string();
let bytes = fmtd.as_bytes();
// SAFETY: the bytes are from a string, which is guaranteed to be valid utf8
unsafe { std::slice::from_raw_parts(bytes.as_ptr(), bytes.len()) }
}
}
/// FromDigest trait, a common trait that OkId can be converted from
pub trait FromDigest: OutputSizeUser + digest::Digest + IntoOkId + Send {}
impl<T: digest::Digest + OutputSizeUser + IntoOkId + Send> FromDigest for T {}
impl jetstream_wireformat::WireFormat for OkId {
fn byte_size(&self) -> u32 {
// binary type + separator
1
// digest length
+ match self.digest {
Digest::Sha1(sha1) => sha1.0.len() as u32 ,
Digest::Sha256(sha256) => sha256.0.len() as u32 ,
Digest::Sha512(sha512) => sha512.0.len() as u32,
Digest::Blake3(blake3) => blake3.0.len() as u32,
Digest::Ulid(_ulid) => 128 / 8,
Digest::Uuid(_uuid) => 128 / 8,
Digest::Fingerprint(_fingerprint) => 64 / 8,
}
}
fn encode<W: std::io::Write>(&self, writer: &mut W) -> std::io::Result<()> {
let c = self.hash_type.char_code() as u8;
u8::encode(&c, writer)?;
match &self.digest {
#[cfg(feature = "sha1")]
Digest::Sha1(sha1) => Data::encode(&Data(sha1.0.into()), writer)?,
#[cfg(feature = "sha2")]
Digest::Sha256(sha256) => Data::encode(&Data(sha256.0.into()), writer)?,
#[cfg(feature = "sha3")]
Digest::Sha512(sha512) => Data::encode(&Data(sha512.0.into()), writer)?,
#[cfg(feature = "blake3")]
Digest::Blake3(blake3) => Data::encode(&Data(blake3.0.into()), writer)?,
#[cfg(feature = "ulid")]
Digest::Ulid(ulid) => u128::encode(&ulid.0, writer)?,
#[cfg(feature = "uuid")]
Digest::Uuid(uuid) => {
u128::encode(&uuid.0, writer)?;
}
Digest::Fingerprint(fingerprint) => {
u64::encode(&fingerprint.0, writer)?;
}
}
Ok(())
}
fn decode<R: std::io::Read>(reader: &mut R) -> std::io::Result<Self> {
let binary_type = u8::decode(reader)?;
match BinaryType::from(binary_type as char) {
BinaryType::Unknown => Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Unknown binary type: {}", binary_type as char),
)),
#[cfg(feature = "sha1")]
BinaryType::Sha1 => {
let data = Data::decode(reader)?;
let data = data.get(0..20).unwrap();
let mut buf = [0; 20];
if data.len() == 20 {
buf.copy_from_slice(data);
}
Ok(OkId {
hash_type: BinaryType::Sha1,
digest: Digest::Sha1(crate::sha1::Sha1(buf)),
})
}
#[cfg(feature = "sha2")]
BinaryType::Sha256 => {
let data = Data::decode(reader)?;
let data = data.get(0..32).unwrap();
let mut buf = [0; 32];
if data.len() == 32 {
buf.copy_from_slice(data);
}
Ok(OkId {
hash_type: BinaryType::Sha256,
digest: Digest::Sha256(crate::sha2::Sha256(buf)),
})
}
BinaryType::Sha3_512 => {
let data = Data::decode(reader)?;
let data = data.get(0..64).unwrap();
let mut buf = [0; 64];
if data.len() == 64 {
buf.copy_from_slice(data);
}
Ok(OkId {
hash_type: BinaryType::Sha3_512,
digest: Digest::Sha512(crate::sha3::Sha512(buf)),
})
}
BinaryType::Blake3 => {
let data = Data::decode(reader)?;
let data = data.get(0..32).unwrap();
let mut buf = [0; 32];
if data.len() == 32 {
buf.copy_from_slice(data);
}
Ok(OkId {
hash_type: BinaryType::Blake3,
digest: Digest::Blake3(crate::blake3::Blake3(buf)),
})
}
BinaryType::Ulid => {
let data = u128::decode(reader)?;
Ok(OkId {
hash_type: BinaryType::Ulid,
digest: Digest::Ulid(crate::ulid::Ulid(data)),
})
}
BinaryType::Uuid => {
let data = u128::decode(reader)?;
Ok(OkId {
hash_type: BinaryType::Uuid,
digest: Digest::Uuid(crate::uuid::Uuid(data)),
})
}
BinaryType::Fingerprint => {
let data = u64::decode(reader)?;
Ok(OkId {
hash_type: BinaryType::Fingerprint,
digest: Digest::Fingerprint(crate::fingerprint::Fingerprint(data)),
})
}
}
}
}
#[cfg(test)]
mod binary_id_tests {
mod okid_tests {
use jetstream_wireformat::JetStreamWireFormat;
#[cfg(feature = "sha1")]
use sha1::Digest as sha1digest;
@ -338,7 +579,7 @@ mod binary_id_tests {
let hasher = sha1::Sha1::new();
let binary_id = OkId::from(hasher);
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
1ː00da39a3ee5e6b4b0d3255bfef95601890afd80709
1ːda39a3ee5e6b4b0d3255bfef95601890afd80709
"###);
}
#[cfg(feature = "sha1")]
@ -348,7 +589,7 @@ mod binary_id_tests {
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
1ː002aae6c35c94fcfb415dbe95f408b9ce91ee846ed
1ː2aae6c35c94fcfb415dbe95f408b9ce91ee846ed
"###);
}
#[cfg(feature = "sha2")]
@ -358,7 +599,7 @@ mod binary_id_tests {
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9
2ːb94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9
"###);
}
@ -406,41 +647,174 @@ mod binary_id_tests {
#[cfg(feature = "sha1")]
#[test]
fn test_parse_hello_world_sha1() {
let hash = "1ː002aae6c35c94fcfb415dbe95f408b9ce91ee846ed";
fn test_parse_hello_world() {
let seperator = super::SEPARATOR;
let hash = format!("1{seperator}2aae6c35c94fcfb415dbe95f408b9ce91ee846ed");
let binary_id = hash.parse::<OkId>().unwrap();
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
1ː002aae6c35c94fcfb415dbe95f408b9ce91ee846ed
"###);
assert_eq!(
binary_id.to_string(),
format!("1{seperator}2aae6c35c94fcfb415dbe95f408b9ce91ee846ed"),
);
}
#[cfg(feature = "sha2")]
#[test]
fn test_parse_hello_world_sha256() {
let hash = "2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9";
let seperator = super::SEPARATOR;
let hash =
format!("2{seperator}b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9");
let binary_id = hash.parse::<OkId>().unwrap();
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
2ː00b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9
"###);
assert_eq!(
binary_id.to_string(),
format!("2{seperator}b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"),
);
}
#[cfg(feature = "sha3")]
#[test]
fn test_parse_hello_world_sha3() {
let hash = "3ː840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a";
let seperator = super::SEPARATOR;
let hash = format!("3{seperator}840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a");
let binary_id = hash.parse::<OkId>().unwrap();
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
3ː840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a
"###);
assert_eq!(
binary_id.to_string(),
format!("3{seperator}840006653e9ac9e95117a15c915caab81662918e925de9e004f774ff82d7079a40d4d27b1b372657c61d46d470304c88c788b3a4527ad074d1dccbee5dbaa99a"),
);
}
#[cfg(feature = "blake3")]
#[test]
fn test_parse_hello_world_blake3() {
let hash = "bːd74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24";
let seperator = super::SEPARATOR;
let hash =
format!("b{seperator}d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24");
let binary_id = hash.parse::<OkId>().unwrap();
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
bːd74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24
"###);
assert_eq!(
binary_id.to_string(),
format!("b{seperator}d74981efa70a0c880b8d8c1985d075dbcbf679b99a5f9914e5aaf96b831a9e24"),
);
}
#[cfg(feature = "ulid")]
#[test]
fn test_parse_hello_world_ulid() {
let seperator = super::SEPARATOR;
let hash = format!("u{seperator}146907d25d66000035da136af2f988ca");
let binary_id = hash.parse::<OkId>().unwrap();
assert_eq!(
binary_id.to_string(),
format!("u{seperator}146907d25d66000035da136af2f988ca"),
);
}
#[cfg(feature = "sha1")]
#[test]
fn test_wireformat_hello_world_sha1() {
use jetstream_wireformat::WireFormat;
let mut hasher = sha1::Sha1::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let mut buf: Vec<u8> = vec![];
OkId::encode(&binary_id, &mut buf).unwrap();
let new_binary_id = OkId::decode(&mut buf.as_slice()).unwrap();
assert_eq!(binary_id.to_string(), new_binary_id.to_string(),);
}
#[cfg(feature = "sha2")]
#[test]
fn test_wireformat_hello_world_sha256() {
use jetstream_wireformat::WireFormat;
let mut hasher = sha2::Sha256::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let mut buf: Vec<u8> = vec![];
OkId::encode(&binary_id, &mut buf).unwrap();
let new_binary_id = OkId::decode(&mut buf.as_slice()).unwrap();
assert_eq!(binary_id.to_string(), new_binary_id.to_string(),);
}
#[cfg(feature = "sha3")]
#[test]
fn test_wireformat_hello_world_sha3() {
use jetstream_wireformat::WireFormat;
let mut hasher = sha3::Sha3_512::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let mut buf: Vec<u8> = vec![];
OkId::encode(&binary_id, &mut buf).unwrap();
let new_binary_id = OkId::decode(&mut buf.as_slice()).unwrap();
assert_eq!(binary_id.to_string(), new_binary_id.to_string(),);
}
#[cfg(feature = "blake3")]
#[test]
fn test_wireformat_hello_world_blake3() {
use jetstream_wireformat::WireFormat;
let mut hasher = blake3::Hasher::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let mut buf: Vec<u8> = vec![];
OkId::encode(&binary_id, &mut buf).unwrap();
let new_binary_id = OkId::decode(&mut buf.as_slice()).unwrap();
assert_eq!(binary_id.to_string(), new_binary_id.to_string(),);
}
// test serde
#[cfg(feature = "sha1")]
#[test]
fn test_serde_hello_world_sha1() {
use insta::assert_snapshot;
let mut hasher = sha1::Sha1::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let serialized = serde_json::to_string_pretty(&binary_id).unwrap();
let deserialized: OkId = serde_json::from_str(&serialized).unwrap();
assert_eq!(binary_id.to_string(), deserialized.to_string(),);
assert_snapshot!(serialized, @r###""1ː2aae6c35c94fcfb415dbe95f408b9ce91ee846ed""###);
}
#[cfg(feature = "sha2")]
#[test]
fn test_serde_hello_world_sha256() {
use insta::assert_snapshot;
let mut hasher = sha2::Sha256::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let serialized = serde_json::to_string_pretty(&binary_id).unwrap();
let deserialized: OkId = serde_json::from_str(&serialized).unwrap();
assert_eq!(binary_id.to_string(), deserialized.to_string(),);
assert_snapshot!(serialized, @r###""2ːb94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9""###);
}
#[derive(JetStreamWireFormat, Debug, Eq, PartialEq)]
pub struct Chunk(pub u64, pub OkId);
#[derive(JetStreamWireFormat, Debug, Eq, PartialEq)]
pub struct ChunkMap(pub Vec<Chunk>);
#[derive(JetStreamWireFormat, Debug, Eq, PartialEq)]
pub struct File(pub OkId, pub ChunkMap);
use jetstream_wireformat::wire_format_extensions::ConvertWireFormat;
#[cfg(feature = "sha1")]
#[test]
fn test_serde_file_sha1() {
let mut hasher = sha1::Sha1::new();
hasher.update(b"hello world");
let binary_id = OkId::from(hasher);
let chunk = Chunk(1, binary_id);
let chunk_map = ChunkMap(vec![chunk]);
let file = File(binary_id, chunk_map);
let mut byts = file.to_bytes();
let new_file = File::from_bytes(&mut byts).unwrap();
let mut _reader = std::io::Cursor::new(byts);
assert_eq!(file, new_file);
}
}

32
src/oid.rs Normal file
View file

@ -0,0 +1,32 @@
use crate::{sha1::Sha1, BinaryType, Digest};
use super::OkId;
impl From<git2::Oid> for OkId {
fn from(value: git2::Oid) -> Self {
let data = value.as_bytes();
let mut buf = [0; 20];
if data.len() == 20 {
buf.copy_from_slice(data);
}
Self {
hash_type: BinaryType::Sha1,
digest: Digest::Sha1(Sha1(buf)),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_git_oid() {
let oid = git2::Oid::from_str("0123456789abcdef0123456789abcdef01234567").unwrap();
let okid: OkId = oid.into();
assert_eq!(
okid.to_string(),
"1ː800123456789abcdef0123456789abcdef01234567"
);
}
}

View file

@ -1,14 +1,14 @@
use std::{fmt::Display, str::FromStr};
use digest::core_api::CoreWrapper;
use enumflags2::BitFlags;
use super::{BinaryType, CommonSettings, Digest, OkId};
use super::{BinaryType, Digest, IntoOkId, OkId};
#[derive(Copy, Clone, Debug, PartialEq)]
pub(super) struct Sha1(BitFlags<CommonSettings>, [u8; 20]);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Sha1(pub(crate) [u8; 20]);
use sha1::Digest as sha1Digest;
impl From<sha1::Sha1> for OkId {
fn from(value: sha1::Sha1) -> Self {
let data = value.finalize();
@ -17,25 +17,26 @@ impl From<sha1::Sha1> for OkId {
if data.len() == 20 {
buf.copy_from_slice(data);
}
let empty: BitFlags<CommonSettings> = BitFlags::empty();
Self {
hash_type: BinaryType::Sha1,
digest: Digest::Sha1(Sha1(empty, buf)),
digest: Digest::Sha1(Sha1(buf)),
}
}
}
impl From<sha1::Sha1Core> for OkId {
fn from(value: sha1::Sha1Core) -> Self {
CoreWrapper::from_core(value).into()
}
}
impl IntoOkId for sha1::Sha1 {}
impl Display for Sha1 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let settings = self.0;
let data = self.1;
let buf = hex::encode([settings.bits()]) + &hex::encode(data);
f.write_str(&buf)?;
let data = self.0;
let buf = &hex::encode(data);
f.write_str(buf)?;
Ok(())
}
}
@ -44,33 +45,9 @@ impl FromStr for Sha1 {
type Err = super::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut settings = BitFlags::empty();
let buf = hex::decode(s)?;
let _ = buf.first().map(|&first| {
settings = BitFlags::from_bits_truncate(first);
});
let mut hash: [u8; 20] = [0; 20];
hash.copy_from_slice(&buf[1..]);
Ok(Sha1(settings, hash))
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn sha1_to_string() {
let hasher = sha1::Sha1::new();
let binary_id = OkId::from(hasher);
insta::assert_yaml_snapshot!(binary_id.to_string(), @r###"
1ː00da39a3ee5e6b4b0d3255bfef95601890afd80709
"###);
}
#[test]
fn sha1_from_str() {
let hash = "1ː00da39a3ee5e6b4b0d3255bfef95601890afd80709";
let _binary_id: OkId = hash.parse().unwrap();
hash.copy_from_slice(&buf[..]);
Ok(Sha1(hash))
}
}

View file

@ -1,13 +1,11 @@
use enumflags2::BitFlags;
use sha2::Digest;
use std::{fmt::Display, str::FromStr};
use crate::OkId;
use super::CommonSettings;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Sha256(pub(crate) [u8; 32]);
#[derive(Copy, Clone, Debug, PartialEq)]
pub(super) struct Sha256(BitFlags<CommonSettings>, [u8; 32]);
impl From<sha2::Sha256> for OkId {
fn from(value: sha2::Sha256) -> Self {
let data = value.finalize();
@ -16,20 +14,20 @@ impl From<sha2::Sha256> for OkId {
if data.len() == 32 {
buf.copy_from_slice(data);
}
let empty: BitFlags<CommonSettings> = BitFlags::empty();
Self {
hash_type: super::BinaryType::Sha256,
digest: super::Digest::Sha256(Sha256(empty, buf)),
digest: super::Digest::Sha256(Sha256(buf)),
}
}
}
impl super::IntoOkId for sha2::Sha256 {}
impl Display for Sha256 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let settings = self.0;
let data = self.1;
let buf = hex::encode([settings.bits()]) + &hex::encode(data);
f.write_str(&buf)?;
let data = self.0;
let buf = &hex::encode(data);
f.write_str(buf)?;
Ok(())
}
}
@ -38,13 +36,9 @@ impl FromStr for Sha256 {
type Err = super::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut settings = BitFlags::empty();
let buf = hex::decode(s)?;
let _ = buf.first().map(|&first| {
settings = BitFlags::from_bits_truncate(first);
});
let mut hash: [u8; 32] = [0; 32];
hash.copy_from_slice(&buf[1..]);
Ok(Sha256(settings, hash))
hash.copy_from_slice(&buf[..]);
Ok(Sha256(hash))
}
}

View file

@ -5,8 +5,9 @@ use sha3::Digest;
use super::OkId;
#[derive(Copy, Clone, Debug, PartialEq)]
pub(super) struct Sha512([u8; 64]);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Sha512(pub(super) [u8; 64]);
impl From<sha3::Sha3_512> for OkId {
fn from(value: sha3::Sha3_512) -> Self {
let data = value.finalize();
@ -23,6 +24,8 @@ impl From<sha3::Sha3_512> for OkId {
}
}
impl super::IntoOkId for sha3::Sha3_512 {}
impl From<sha3::Sha3_512Core> for OkId {
fn from(value: sha3::Sha3_512Core) -> Self {
CoreWrapper::from_core(value).into()

View file

@ -2,8 +2,8 @@ use std::fmt::Display;
use crate::OkId;
#[derive(Copy, Clone, Debug, PartialEq)]
pub(super) struct Ulid(u128);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Ulid(pub(super) u128);
impl From<ulid::Ulid> for OkId {
fn from(value: ulid::Ulid) -> Self {
@ -21,6 +21,8 @@ impl Display for Ulid {
}
}
impl super::IntoOkId for ulid::Ulid {}
impl std::str::FromStr for Ulid {
type Err = crate::Error;
@ -31,3 +33,9 @@ impl std::str::FromStr for Ulid {
Ok(Ulid(u128::from_be_bytes(hash)))
}
}
impl From<Ulid> for ulid::Ulid {
fn from(val: Ulid) -> Self {
ulid::Ulid(val.0)
}
}

View file

@ -2,8 +2,9 @@ use std::fmt::Display;
use crate::OkId;
#[derive(Copy, Clone, Debug, PartialEq)]
pub(super) struct Uuid(u128);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub(super) struct Uuid(pub(super) u128);
impl From<uuid::Uuid> for OkId {
fn from(value: uuid::Uuid) -> Self {
Self {
@ -20,6 +21,8 @@ impl Display for Uuid {
}
}
impl super::IntoOkId for uuid::Uuid {}
impl std::str::FromStr for Uuid {
type Err = crate::Error;
@ -30,3 +33,9 @@ impl std::str::FromStr for Uuid {
Ok(Uuid(u128::from_be_bytes(hash)))
}
}
impl From<Uuid> for uuid::Uuid {
fn from(val: Uuid) -> Self {
uuid::Uuid::from_u128(val.0)
}
}