Compare commits
2 commits
5e8a2d6c25
...
53e5bd81af
Author | SHA1 | Date | |
---|---|---|---|
53e5bd81af | |||
ab056bfe73 |
13 changed files with 1457 additions and 1126 deletions
179
Cargo.lock
generated
179
Cargo.lock
generated
|
@ -249,10 +249,31 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "1.8.0"
|
||||
name = "errno"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499"
|
||||
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
|
||||
dependencies = [
|
||||
"errno-dragonfly",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno-dragonfly"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
|
||||
dependencies = [
|
||||
"instant",
|
||||
]
|
||||
|
@ -359,9 +380,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4"
|
||||
checksum = "5be7b54589b581f624f566bf5d8eb2bab1db736c51528720b6bd36b96b55924d"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
|
@ -393,9 +414,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "0.2.8"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399"
|
||||
checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
|
@ -427,9 +448,9 @@ checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
|
|||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.23"
|
||||
version = "0.14.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "034711faac9d2166cb1baf1a2fb0b60b1f277f8492fd72176c17f3515e1abd3c"
|
||||
checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
|
@ -500,6 +521,16 @@ dependencies = [
|
|||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "io-lifetimes"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cfa919a82ea574332e2de6e74b4c36e74d41982b335080fa59d4ef31be20fdf3"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.7.1"
|
||||
|
@ -508,9 +539,9 @@ checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146"
|
|||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.5"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
|
||||
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
|
@ -542,6 +573,12 @@ version = "0.2.139"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
|
@ -565,14 +602,14 @@ checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
|
|||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
|
||||
checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi",
|
||||
"windows-sys",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -612,9 +649,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.0"
|
||||
version = "1.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
|
||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
||||
|
||||
[[package]]
|
||||
name = "opaque-debug"
|
||||
|
@ -771,9 +808,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.50"
|
||||
version = "1.0.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
|
||||
checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
@ -857,15 +894,6 @@ dependencies = [
|
|||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "remove_dir_all"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.11.14"
|
||||
|
@ -904,10 +932,24 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.12"
|
||||
name = "rustix"
|
||||
version = "0.36.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
|
||||
checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"errno",
|
||||
"io-lifetimes",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041"
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
|
@ -915,7 +957,7 @@ version = "0.1.21"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3"
|
||||
dependencies = [
|
||||
"windows-sys",
|
||||
"windows-sys 0.42.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -943,15 +985,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.152"
|
||||
version = "1.0.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||
checksum = "3a382c72b4ba118526e187430bb4963cd6d55051ebf13d9b25574d379cc98d20"
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.91"
|
||||
version = "1.0.94"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
|
||||
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
|
@ -993,18 +1035,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "slab"
|
||||
version = "0.4.7"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef"
|
||||
checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.4.7"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd"
|
||||
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi",
|
||||
|
@ -1018,9 +1060,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.107"
|
||||
version = "1.0.109"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1029,16 +1071,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.3.0"
|
||||
version = "3.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
|
||||
checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"remove_dir_all",
|
||||
"winapi",
|
||||
"rustix",
|
||||
"windows-sys 0.42.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1052,15 +1093,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tinyvec_macros"
|
||||
version = "0.1.0"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
|
||||
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.25.0"
|
||||
version = "1.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af"
|
||||
checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"bytes",
|
||||
|
@ -1069,14 +1110,14 @@ dependencies = [
|
|||
"mio",
|
||||
"pin-project-lite",
|
||||
"socket2",
|
||||
"windows-sys",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-native-tls"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b"
|
||||
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
|
||||
dependencies = [
|
||||
"native-tls",
|
||||
"tokio",
|
||||
|
@ -1084,9 +1125,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740"
|
||||
checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
|
@ -1142,9 +1183,9 @@ checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
|
||||
checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
|
@ -1317,6 +1358,30 @@ dependencies = [
|
|||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.45.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.42.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.42.1"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{fs::File, io::Read, path::Path};
|
||||
|
||||
use crate::store::HashType;
|
||||
use crate::crypto::HashType;
|
||||
|
||||
pub struct Preferences {
|
||||
_store_type: String,
|
||||
|
@ -11,7 +11,7 @@ pub struct Preferences {
|
|||
|
||||
pub fn try_load_preferences() -> Option<Preferences> {
|
||||
//Alert: develop a better home dir acquisition method
|
||||
let mut file = Path::new("/home/nya/.nyanpass.conf");
|
||||
let mut file = Path::new("~/.nyanpass.conf");
|
||||
if !file.exists() {
|
||||
file = Path::new("/etc/nyanpass.conf");
|
||||
}
|
||||
|
|
274
src/crypto/mod.rs
Normal file
274
src/crypto/mod.rs
Normal file
|
@ -0,0 +1,274 @@
|
|||
use std::{
|
||||
fs::File,
|
||||
io::{BufReader, Read},
|
||||
};
|
||||
|
||||
use aes_gcm::{aead::Aead, Aes256Gcm, KeyInit, Nonce};
|
||||
use argon2::PasswordHasher;
|
||||
use password_hash::SaltString;
|
||||
use rand::{rngs::StdRng, RngCore, SeedableRng};
|
||||
|
||||
use crate::data::DataPage;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd)]
|
||||
pub enum EncryptionType {
|
||||
Invalid = 0,
|
||||
AesGcm = 1,
|
||||
Chacha20Poly1305 = 2,
|
||||
}
|
||||
|
||||
impl From<u8> for EncryptionType {
|
||||
fn from(n: u8) -> Self {
|
||||
match n {
|
||||
0 => EncryptionType::Invalid,
|
||||
1 => EncryptionType::AesGcm,
|
||||
2 => EncryptionType::Chacha20Poly1305,
|
||||
_ => panic!("Invalid value '{}'.", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd)]
|
||||
pub enum HashType {
|
||||
Invalid = 0,
|
||||
Argon2 = 1,
|
||||
Bcrypt = 2,
|
||||
Pbkdf2 = 4,
|
||||
}
|
||||
|
||||
impl From<u8> for HashType {
|
||||
fn from(n: u8) -> Self {
|
||||
match n {
|
||||
0 => HashType::Invalid,
|
||||
1 => HashType::Argon2,
|
||||
2 => HashType::Bcrypt,
|
||||
4 => HashType::Pbkdf2,
|
||||
_ => panic!("Invalid value '{}'.", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait EncryptionManager {}
|
||||
|
||||
pub struct EncryptionContext {
|
||||
enc_type: EncryptionType,
|
||||
hash_type: HashType,
|
||||
key: Vec<u8>,
|
||||
salt: SaltString,
|
||||
iv_root: Vec<u8>,
|
||||
}
|
||||
|
||||
impl EncryptionContext {
|
||||
pub fn decrypt_block(&self, ciphertext: &Vec<u8>, offset: usize) -> Result<DataPage, String> {
|
||||
let plaintext = match self.enc_type {
|
||||
EncryptionType::Invalid => unreachable!(),
|
||||
EncryptionType::AesGcm => {
|
||||
let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key);
|
||||
let aes = Aes256Gcm::new(aes_key);
|
||||
|
||||
let nonce = nonce_offset(&self.iv_root, offset);
|
||||
let nonce = Nonce::from_slice(&nonce);
|
||||
|
||||
aes.decrypt(nonce, ciphertext.as_ref())
|
||||
}
|
||||
EncryptionType::Chacha20Poly1305 => todo!(),
|
||||
};
|
||||
|
||||
if plaintext.is_err() {
|
||||
return Err(format!("Decryption error: {}", plaintext.unwrap_err()));
|
||||
}
|
||||
|
||||
Ok(DataPage::load(offset, plaintext.unwrap()))
|
||||
}
|
||||
|
||||
pub fn decrypt_block_from_file(
|
||||
&self,
|
||||
len: usize,
|
||||
offset: usize,
|
||||
file: &mut BufReader<File>,
|
||||
) -> Result<DataPage, String> {
|
||||
let mut buf: Vec<u8> = vec![0; len + 16];
|
||||
match file.read_exact(&mut buf) {
|
||||
Err(err) => Err(format!("Read error: {}", err.to_string())),
|
||||
Ok(_) => self.decrypt_block(&buf, offset),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default(passphrase: &Vec<u8>) -> Self {
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
let salt = SaltString::generate(&mut rng);
|
||||
|
||||
let (default_hash_type, default_enc_type) = (HashType::Argon2, EncryptionType::AesGcm);
|
||||
|
||||
let key = derive_key(&passphrase, &salt, default_hash_type).unwrap();
|
||||
|
||||
let mut iv: [u8; 12] = [0; 12];
|
||||
rng.fill_bytes(&mut iv);
|
||||
|
||||
EncryptionContext {
|
||||
enc_type: default_enc_type,
|
||||
hash_type: default_hash_type,
|
||||
key,
|
||||
salt,
|
||||
iv_root: iv.to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encrypt_block(
|
||||
&self,
|
||||
plaintext: &Vec<u8>,
|
||||
offset: usize,
|
||||
) -> Result<DataPage, &'static str> {
|
||||
let ciphertext = match self.enc_type {
|
||||
EncryptionType::Invalid => unreachable!(),
|
||||
EncryptionType::AesGcm => {
|
||||
let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key);
|
||||
let aes = Aes256Gcm::new(aes_key);
|
||||
|
||||
let nonce = nonce_offset(&self.iv_root, offset);
|
||||
let nonce = Nonce::from_slice(&nonce);
|
||||
|
||||
aes.encrypt(nonce, plaintext.as_ref())
|
||||
}
|
||||
EncryptionType::Chacha20Poly1305 => todo!(),
|
||||
};
|
||||
|
||||
if ciphertext.is_err() {
|
||||
return Err("Encryption error");
|
||||
}
|
||||
|
||||
Ok(DataPage::load(offset, ciphertext.unwrap()))
|
||||
}
|
||||
|
||||
pub fn encryption_type(&self) -> EncryptionType {
|
||||
self.enc_type
|
||||
}
|
||||
|
||||
pub fn from_settings(
|
||||
passphrase: &Vec<u8>,
|
||||
enc_type: EncryptionType,
|
||||
hash_type: HashType,
|
||||
salt: SaltString,
|
||||
iv_root: Vec<u8>,
|
||||
) -> Self {
|
||||
let key = derive_key(&passphrase, &salt, hash_type).unwrap();
|
||||
|
||||
EncryptionContext {
|
||||
enc_type,
|
||||
hash_type,
|
||||
key,
|
||||
salt,
|
||||
iv_root,
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn get_key(&self) -> &Vec<u8> {
|
||||
&self.key
|
||||
}
|
||||
|
||||
pub fn hash_type(&self) -> HashType {
|
||||
self.hash_type
|
||||
}
|
||||
|
||||
pub fn offset_iv_by(&self, offset: usize) -> Vec<u8> {
|
||||
nonce_offset(&self.iv_root, offset)
|
||||
}
|
||||
|
||||
pub fn root_iv(&self) -> &Vec<u8> {
|
||||
&self.iv_root
|
||||
}
|
||||
|
||||
pub fn salt(&self) -> &SaltString {
|
||||
&self.salt
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nonce_offset(nonce_root: &Vec<u8>, offset: usize) -> Vec<u8> {
|
||||
let len = nonce_root.len();
|
||||
if len < (usize::BITS / 8) as usize {
|
||||
panic!("Nonce length less than {}", usize::BITS / 8);
|
||||
}
|
||||
let start = len - (usize::BITS / 8) as usize;
|
||||
|
||||
let mut nonce_copy = vec![0; len];
|
||||
nonce_copy.clone_from_slice(&nonce_root);
|
||||
|
||||
let mut num = u64::from_be_bytes(nonce_copy[start..len].try_into().unwrap());
|
||||
num += offset as u64;
|
||||
nonce_copy[start..len].copy_from_slice(&num.to_be_bytes());
|
||||
|
||||
nonce_copy
|
||||
}
|
||||
|
||||
pub fn derive_key(
|
||||
password: &Vec<u8>,
|
||||
salt: &SaltString,
|
||||
hash_type: HashType,
|
||||
) -> Result<Vec<u8>, &'static str> {
|
||||
let hash = match hash_type {
|
||||
HashType::Invalid => unreachable!(),
|
||||
HashType::Argon2 => {
|
||||
let argon = argon2::Argon2::default();
|
||||
argon.hash_password(password, salt)
|
||||
}
|
||||
HashType::Bcrypt => {
|
||||
todo!()
|
||||
}
|
||||
HashType::Pbkdf2 => {
|
||||
todo!()
|
||||
}
|
||||
};
|
||||
|
||||
if hash.is_err() {
|
||||
return Err("Hashing error");
|
||||
}
|
||||
|
||||
let hash = hash.unwrap();
|
||||
|
||||
let hash: Vec<u8> = hash.hash.unwrap().as_bytes().to_owned();
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::nonce_offset;
|
||||
|
||||
#[test]
|
||||
fn nonce_offset_increment() {
|
||||
let nonce: Vec<u8> = vec![0; 12];
|
||||
|
||||
let nonce_off = nonce_offset(&nonce, 0xfeeddeadbeef);
|
||||
|
||||
assert!(
|
||||
nonce_off[11] == 0xef
|
||||
&& nonce_off[10] == 0xbe
|
||||
&& nonce_off[9] == 0xad
|
||||
&& nonce_off[8] == 0xde
|
||||
&& nonce_off[7] == 0xed
|
||||
&& nonce_off[6] == 0xfe,
|
||||
"\ninitial = {:x?}\nnew = {:x?}",
|
||||
nonce,
|
||||
nonce_off
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonce_offset_increment_carry() {
|
||||
let nonce: Vec<u8> = vec![0xfe; 12];
|
||||
|
||||
let nonce_off = nonce_offset(&nonce, 0x2);
|
||||
|
||||
assert!(
|
||||
nonce_off[11] == 0x00
|
||||
&& nonce_off[10] == 0xff
|
||||
&& nonce_off[9] == 0xfe
|
||||
&& nonce_off[8] == 0xfe
|
||||
&& nonce_off[7] == 0xfe
|
||||
&& nonce_off[6] == 0xfe,
|
||||
"\ninitial = {:x?}\nnew = {:x?}",
|
||||
nonce,
|
||||
nonce_off
|
||||
);
|
||||
}
|
||||
}
|
176
src/data/mod.rs
Normal file
176
src/data/mod.rs
Normal file
|
@ -0,0 +1,176 @@
|
|||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
|
||||
pub trait DataPager {
|
||||
fn get_page(&mut self, offset: usize) -> Result<&mut DataPage, String>;
|
||||
|
||||
fn update_page(&mut self, page: WritableDataPage) -> Result<u64, ()>;
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash)]
|
||||
pub struct DataPage {
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Read for DataPage {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
let range = self.pos..(self.pos + buf.len()).clamp(0, self.data.len() - buf.len());
|
||||
let size = range.len();
|
||||
buf.copy_from_slice(&self.data[range]);
|
||||
self.pos += size;
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
|
||||
impl Seek for DataPage {
|
||||
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
|
||||
let cur = self.pos;
|
||||
|
||||
let newpos = match pos {
|
||||
SeekFrom::Start(x) => {
|
||||
if x > self.data.len() as u64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Seek position out of buffer bounds.",
|
||||
));
|
||||
}
|
||||
x
|
||||
}
|
||||
SeekFrom::End(x) => {
|
||||
if x > self.data.len() as i64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Seek position out of buffer bounds.",
|
||||
));
|
||||
}
|
||||
|
||||
(self.data.len() as i64 - x) as u64
|
||||
}
|
||||
SeekFrom::Current(x) => {
|
||||
let new = self.pos as i64 + x;
|
||||
|
||||
if new < 0 || new > self.data.len() as i64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Seek position out of buffer bounds.",
|
||||
));
|
||||
}
|
||||
|
||||
new as u64
|
||||
}
|
||||
};
|
||||
|
||||
self.pos = newpos as usize;
|
||||
|
||||
Ok(cur as u64)
|
||||
}
|
||||
}
|
||||
|
||||
impl DataPage {
|
||||
pub(crate) fn load(offset: usize, data: Vec<u8>) -> Self {
|
||||
DataPage {
|
||||
offset,
|
||||
pos: 0,
|
||||
data,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
|
||||
pub fn buffer(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> usize {
|
||||
self.offset
|
||||
}
|
||||
|
||||
pub fn mutate(&mut self) -> WritableDataPage {
|
||||
WritableDataPage { page: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WritableDataPage<'a> {
|
||||
page: &'a mut DataPage,
|
||||
}
|
||||
|
||||
impl Write for WritableDataPage<'_> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
let pos = self.page.pos;
|
||||
|
||||
let len = buf.len().clamp(0, self.page.data.len() - pos);
|
||||
|
||||
//Is this neccessary?
|
||||
if len == 0 || buf.len() == 0 {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
self.page.data[pos..pos + len].copy_from_slice(&buf);
|
||||
|
||||
Ok(len)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableDataPage<'_> {
|
||||
pub fn data(&self) -> &[u8] {
|
||||
self.page.buffer()
|
||||
}
|
||||
|
||||
pub fn offset(&self) -> usize {
|
||||
self.page.offset()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::DataPage;
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
|
||||
fn make_page() -> DataPage {
|
||||
DataPage {
|
||||
offset: 0,
|
||||
data: vec![0; 128],
|
||||
pos: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn datapage_seek() {
|
||||
let mut stream = make_page();
|
||||
|
||||
{
|
||||
let pos = stream.seek(SeekFrom::Start(64));
|
||||
assert!(pos.is_ok());
|
||||
assert_eq!(pos.unwrap(), 0);
|
||||
}
|
||||
|
||||
{
|
||||
let pos = stream.seek(SeekFrom::Start(128));
|
||||
assert!(pos.is_ok());
|
||||
assert_eq!(pos.unwrap(), 64);
|
||||
}
|
||||
|
||||
{
|
||||
let pos = stream.seek(SeekFrom::Start(256));
|
||||
assert!(pos.is_err());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn datapage_read() {
|
||||
let mut stream = make_page();
|
||||
let mut buf: [u8; 8] = [1; 8];
|
||||
|
||||
assert!(stream.read_exact(&mut buf).is_ok());
|
||||
|
||||
assert_eq!(&buf, &[0; 8]);
|
||||
assert_eq!(stream.stream_position().unwrap(), 8);
|
||||
}
|
||||
}
|
|
@ -2,6 +2,8 @@ use config::try_load_preferences;
|
|||
use store::Store;
|
||||
|
||||
mod config;
|
||||
mod crypto;
|
||||
mod data;
|
||||
mod store;
|
||||
|
||||
fn main() {
|
||||
|
@ -13,7 +15,7 @@ fn main() {
|
|||
|
||||
let _preferences = try_load_preferences();
|
||||
|
||||
let store = match store::local::load(
|
||||
let mut store = match store::local::load(
|
||||
"/tmp/store.db".to_string(),
|
||||
"meowmeow".as_bytes().to_owned(),
|
||||
) {
|
||||
|
|
|
@ -1,905 +0,0 @@
|
|||
use std::{
|
||||
arch::x86_64::_mm_crc32_u32,
|
||||
collections::{hash_map::DefaultHasher, HashMap},
|
||||
fs::File,
|
||||
hash::{Hash, Hasher},
|
||||
io::{BufReader, Read, Seek, SeekFrom, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use aes_gcm::{aead::Aead, Aes256Gcm, KeyInit, Nonce};
|
||||
use argon2::PasswordHasher;
|
||||
use password_hash::SaltString;
|
||||
use rand::{rngs::StdRng, RngCore, SeedableRng};
|
||||
|
||||
use super::{EncryptionStream, EncryptionType, HashType, RecordType, Store, StoreRecord};
|
||||
|
||||
pub struct LocalStore {
|
||||
path: String,
|
||||
header: LocalStoreHeader,
|
||||
meta: LocalStoreMeta,
|
||||
enc_ctx: EncryptionContext,
|
||||
cache: HashMap<String, LocalRecord>,
|
||||
}
|
||||
|
||||
/*
|
||||
Store binary format:
|
||||
Header: 0x00 (first 32 bytes):
|
||||
0x00 - magic number
|
||||
0x04 - last access timestamp
|
||||
0x08 - last write timestamp
|
||||
0x0c - encryption type byte
|
||||
0x0d - hash type byte
|
||||
0x0e - superblock size
|
||||
0x10 - cipher key size
|
||||
0x12 - encrypted block offset
|
||||
0x16 - header CRC-32 checksum
|
||||
0x1a - padding
|
||||
Enc details: 16 bytes
|
||||
0x20 - salt
|
||||
0x30 - master IV
|
||||
0x3c - 4 byte padding with 0
|
||||
---Encrypted section starts here---
|
||||
--Block 0
|
||||
Superblock:
|
||||
Hash:
|
||||
0x40 - master password hash
|
||||
Metadata:
|
||||
0x60 - record count
|
||||
0x64 - specifier count
|
||||
0x68 - padding with 0
|
||||
Auth tag: 0x80
|
||||
--Block 1
|
||||
Index:
|
||||
Root node:
|
||||
0x90 - root node or invalid node if empty db
|
||||
*/
|
||||
|
||||
//Proposal: Initialize IV to 0?
|
||||
|
||||
const MAGIC_NUM: u32 = 0x6d656f77;
|
||||
const HEADER_SIZE: usize = 32;
|
||||
|
||||
struct LocalStoreHeader {
|
||||
magic_h: u32,
|
||||
last_access: u32,
|
||||
last_write: u32,
|
||||
enc_type: EncryptionType,
|
||||
hash_type: HashType,
|
||||
enc_sup_block_size: u16,
|
||||
enc_key_size: u16,
|
||||
encrypted_block_offset: u32,
|
||||
chksum_crc: u32,
|
||||
}
|
||||
|
||||
impl LocalStoreHeader {
|
||||
fn get_checksum(&self) -> u32 {
|
||||
let mut crc: u32 = 0xffffffff;
|
||||
unsafe {
|
||||
crc = _mm_crc32_u32(crc, self.last_access);
|
||||
crc = _mm_crc32_u32(crc, self.last_write);
|
||||
crc = _mm_crc32_u32(crc, self.enc_type as u32);
|
||||
crc = _mm_crc32_u32(crc, self.hash_type as u32);
|
||||
crc = _mm_crc32_u32(crc, self.enc_sup_block_size as u32);
|
||||
crc = _mm_crc32_u32(crc, self.enc_key_size as u32);
|
||||
crc = _mm_crc32_u32(crc, self.encrypted_block_offset);
|
||||
}
|
||||
crc
|
||||
}
|
||||
|
||||
fn is_checksum_valid(&self) -> bool {
|
||||
let chk = self.get_checksum();
|
||||
chk == self.chksum_crc
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalStoreMeta {
|
||||
record_count: u32,
|
||||
specifier_count: u32,
|
||||
index_node_arity: u16,
|
||||
data_block_size: u32,
|
||||
data_offset: u64,
|
||||
}
|
||||
|
||||
struct EncryptionContext {
|
||||
enc_type: EncryptionType,
|
||||
hash_type: HashType,
|
||||
key: Vec<u8>,
|
||||
salt: SaltString,
|
||||
iv_root: Vec<u8>,
|
||||
}
|
||||
|
||||
fn try_deserialize_header(file: &mut BufReader<File>) -> Result<LocalStoreHeader, &'static str> {
|
||||
let mut header: LocalStoreHeader = LocalStoreHeader {
|
||||
magic_h: 0,
|
||||
last_access: 0,
|
||||
last_write: 0,
|
||||
enc_type: EncryptionType::Invalid,
|
||||
hash_type: HashType::Invalid,
|
||||
enc_sup_block_size: 0,
|
||||
enc_key_size: 0,
|
||||
chksum_crc: 0,
|
||||
encrypted_block_offset: 32,
|
||||
};
|
||||
|
||||
let mut buf: [u8; HEADER_SIZE] = [0; HEADER_SIZE];
|
||||
|
||||
if file.read_exact(&mut buf).is_err() {
|
||||
return Err("Read error.");
|
||||
}
|
||||
|
||||
header.magic_h = u32::from_be_bytes(buf[0..4].try_into().unwrap());
|
||||
if header.magic_h != MAGIC_NUM {
|
||||
return Err("Invalid header magic number.");
|
||||
}
|
||||
|
||||
header.last_access = u32::from_be_bytes(buf[4..8].try_into().unwrap());
|
||||
header.last_write = u32::from_be_bytes(buf[8..12].try_into().unwrap());
|
||||
header.enc_type = EncryptionType::from(buf[12]);
|
||||
header.hash_type = HashType::from(buf[13]);
|
||||
header.enc_sup_block_size = u16::from_be_bytes(buf[14..16].try_into().unwrap());
|
||||
header.enc_key_size = u16::from_be_bytes(buf[16..18].try_into().unwrap());
|
||||
header.encrypted_block_offset = u32::from_be_bytes(buf[18..22].try_into().unwrap());
|
||||
header.chksum_crc = u32::from_be_bytes(buf[22..26].try_into().unwrap());
|
||||
|
||||
if !header.is_checksum_valid() {
|
||||
return Err("Invalid checksum.");
|
||||
}
|
||||
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn try_deserialize_superblock(
|
||||
file: &mut BufReader<File>,
|
||||
enc: &EncryptionContext,
|
||||
header: &LocalStoreHeader,
|
||||
) -> Result<LocalStoreMeta, &'static str> {
|
||||
let data = enc
|
||||
.decrypt_block_from_file(header.enc_sup_block_size as usize, 0, file)
|
||||
.unwrap();
|
||||
|
||||
if data.data[0..enc.key.len()].ne(&enc.key) {
|
||||
return Err("Key mismatch.");
|
||||
}
|
||||
|
||||
let meta = deserialize_store_meta(&&data.data[32..]);
|
||||
|
||||
Ok(meta)
|
||||
}
|
||||
|
||||
fn serialize_header(header: &LocalStoreHeader) -> Vec<u8> {
|
||||
let mut buf = Vec::<u8>::new();
|
||||
buf.append(u32::to_be_bytes(MAGIC_NUM).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(header.last_access).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(header.last_write).to_vec().as_mut());
|
||||
buf.push(header.enc_type as u8);
|
||||
buf.push(header.hash_type as u8);
|
||||
buf.append(
|
||||
u16::to_be_bytes(header.enc_sup_block_size)
|
||||
.to_vec()
|
||||
.as_mut(),
|
||||
);
|
||||
buf.append(u16::to_be_bytes(header.enc_key_size).to_vec().as_mut());
|
||||
buf.append(
|
||||
u32::to_be_bytes(header.encrypted_block_offset)
|
||||
.to_vec()
|
||||
.as_mut(),
|
||||
);
|
||||
buf.append(u32::to_be_bytes(header.chksum_crc).to_vec().as_mut());
|
||||
|
||||
let padding_len = 32 - buf.len();
|
||||
let mut pad: Vec<u8> = vec![0; padding_len];
|
||||
buf.append(&mut pad);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
fn try_create_store(path: &String, passphrase: &Vec<u8>) -> bool {
|
||||
let timestamp: u32 = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs() as u32;
|
||||
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
let salt = SaltString::generate(&mut rng);
|
||||
|
||||
let (default_hash_type, default_enc_type) = (HashType::Argon2, EncryptionType::AesGcm);
|
||||
|
||||
let key = derive_key(&passphrase, &salt, default_hash_type).unwrap();
|
||||
|
||||
let mut iv: [u8; 12] = [0; 12];
|
||||
rng.fill_bytes(&mut iv);
|
||||
|
||||
let mut salt_buf: [u8; 16] = [0; 16];
|
||||
salt.b64_decode(&mut salt_buf).unwrap();
|
||||
|
||||
let enc = EncryptionContext {
|
||||
enc_type: default_enc_type,
|
||||
hash_type: default_hash_type,
|
||||
key,
|
||||
salt,
|
||||
iv_root: iv.to_vec(),
|
||||
};
|
||||
|
||||
let mut superblock_buf: Vec<u8> = Vec::new();
|
||||
|
||||
superblock_buf.extend_from_slice(&enc.key);
|
||||
if superblock_buf.len() != 32 {
|
||||
superblock_buf.append([b'\0'].repeat(32 - superblock_buf.len()).as_mut());
|
||||
}
|
||||
|
||||
let meta = LocalStoreMeta {
|
||||
record_count: 0,
|
||||
specifier_count: 0,
|
||||
index_node_arity: 16,
|
||||
data_block_size: 8092,
|
||||
data_offset: 0,
|
||||
};
|
||||
|
||||
superblock_buf.append(&mut serialize_store_meta(&meta));
|
||||
|
||||
let enc_buf = enc.encrypt_block(&superblock_buf, 0).unwrap();
|
||||
|
||||
let mut header: LocalStoreHeader = LocalStoreHeader {
|
||||
magic_h: MAGIC_NUM,
|
||||
last_access: timestamp,
|
||||
last_write: timestamp,
|
||||
enc_type: enc.enc_type,
|
||||
enc_sup_block_size: (enc_buf.len() - 16) as u16,
|
||||
enc_key_size: 256,
|
||||
hash_type: enc.hash_type,
|
||||
encrypted_block_offset: 32,
|
||||
chksum_crc: 0,
|
||||
};
|
||||
|
||||
header.chksum_crc = header.get_checksum();
|
||||
|
||||
let mut index_block: Vec<u8> = vec![0; meta.data_block_size as usize];
|
||||
let root_node_buf = IndexNode {
|
||||
node_type: IndexNodeType::Leaf,
|
||||
children: Vec::new(),
|
||||
}
|
||||
.serialize(&meta);
|
||||
|
||||
index_block[0..root_node_buf.len()].copy_from_slice(&root_node_buf);
|
||||
|
||||
let mut buf = serialize_header(&header);
|
||||
|
||||
buf.extend_from_slice(&salt_buf);
|
||||
buf.extend_from_slice(&iv);
|
||||
buf.append([b'\0'].repeat(16 - iv.len()).as_mut());
|
||||
buf.extend_from_slice(&enc_buf.data);
|
||||
buf.extend_from_slice(&enc.encrypt_block(&index_block, 0x90).unwrap().data);
|
||||
|
||||
match std::fs::File::create(path).unwrap().write(&buf) {
|
||||
Ok(x) => return x == buf.len(),
|
||||
Err(_) => return false,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_enc_salt_iv(file: &mut BufReader<File>) -> Result<(SaltString, Vec<u8>), &'static str> {
|
||||
let mut buf: [u8; 32] = [0; 32];
|
||||
|
||||
file.read_exact(&mut buf).unwrap();
|
||||
|
||||
let salt = match SaltString::b64_encode(&buf[..16]) {
|
||||
Err(_) => return Err("Invalid salt."),
|
||||
Ok(x) => x,
|
||||
};
|
||||
|
||||
let iv = buf[16..28].to_owned();
|
||||
Ok((salt, iv))
|
||||
}
|
||||
|
||||
fn derive_key(
|
||||
password: &Vec<u8>,
|
||||
salt: &SaltString,
|
||||
hash_type: HashType,
|
||||
) -> Result<Vec<u8>, &'static str> {
|
||||
let hash = match hash_type {
|
||||
HashType::Invalid => unreachable!(),
|
||||
HashType::Argon2 => {
|
||||
let argon = argon2::Argon2::default();
|
||||
argon.hash_password(password, salt)
|
||||
}
|
||||
HashType::Bcrypt => {
|
||||
todo!()
|
||||
}
|
||||
HashType::Pbkdf2 => {
|
||||
todo!()
|
||||
}
|
||||
};
|
||||
|
||||
if hash.is_err() {
|
||||
return Err("Hashing error");
|
||||
}
|
||||
|
||||
let hash = hash.unwrap();
|
||||
|
||||
let hash: Vec<u8> = hash.hash.unwrap().as_bytes().to_owned();
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
fn deserialize_store_meta(data: &[u8]) -> LocalStoreMeta {
|
||||
let records = u32::from_be_bytes(data[0..4].try_into().unwrap());
|
||||
let specifiers = u32::from_be_bytes(data[4..8].try_into().unwrap());
|
||||
let index_node_arity = u16::from_be_bytes(data[8..10].try_into().unwrap());
|
||||
let data_block_size = u32::from_be_bytes(data[10..14].try_into().unwrap());
|
||||
let data_offset = u64::from_be_bytes(data[14..22].try_into().unwrap());
|
||||
|
||||
LocalStoreMeta {
|
||||
record_count: records,
|
||||
specifier_count: specifiers,
|
||||
index_node_arity,
|
||||
data_block_size,
|
||||
data_offset,
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_store_meta(meta: &LocalStoreMeta) -> Vec<u8> {
|
||||
let mut buf: Vec<u8> = Vec::with_capacity(32);
|
||||
|
||||
buf.append(u32::to_be_bytes(meta.record_count).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(meta.specifier_count).to_vec().as_mut());
|
||||
buf.append(u16::to_be_bytes(meta.index_node_arity).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(meta.data_block_size).to_vec().as_mut());
|
||||
buf.append(u64::to_be_bytes(meta.data_offset).to_vec().as_mut());
|
||||
buf.append([b'\0'].repeat(32 - buf.len()).as_mut());
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn load(path: String, passphrase: Vec<u8>) -> Result<LocalStore, &'static str> {
|
||||
let path_p = Path::new(&path);
|
||||
|
||||
if !path_p.exists() {
|
||||
if !try_create_store(&path, &passphrase) {
|
||||
return Err("Cannot create store.");
|
||||
}
|
||||
} else if !path_p.is_file() {
|
||||
return Err("Invalid path.");
|
||||
}
|
||||
|
||||
let file = match File::open(&path_p) {
|
||||
Ok(x) => x,
|
||||
Err(_) => return Err("Cannot open store."),
|
||||
};
|
||||
|
||||
let mut reader = BufReader::new(file);
|
||||
|
||||
let header = match try_deserialize_header(&mut reader) {
|
||||
Err(x) => return Err(x),
|
||||
Ok(meta) => meta,
|
||||
};
|
||||
|
||||
let (salt, iv) = match get_enc_salt_iv(&mut reader) {
|
||||
Ok(s) => s,
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
|
||||
let key = match derive_key(&passphrase, &salt, header.hash_type) {
|
||||
Ok(x) => x,
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
|
||||
let enc_ctx = EncryptionContext {
|
||||
enc_type: header.enc_type,
|
||||
hash_type: header.hash_type,
|
||||
key,
|
||||
salt,
|
||||
iv_root: iv,
|
||||
};
|
||||
|
||||
let meta = match try_deserialize_superblock(&mut reader, &enc_ctx, &header) {
|
||||
Ok(d) => d,
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
|
||||
let store = LocalStore {
|
||||
path,
|
||||
header,
|
||||
enc_ctx,
|
||||
meta,
|
||||
cache: HashMap::new(),
|
||||
};
|
||||
|
||||
return Ok(store);
|
||||
}
|
||||
|
||||
struct LocalRecord {
|
||||
record: StoreRecord,
|
||||
position: usize,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
const CRED_FLAG_METADATA: u8 = 0b0000100;
|
||||
|
||||
impl LocalRecord {
|
||||
fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = Vec::<u8>::new();
|
||||
|
||||
buf.push(self.record.r#type as u8);
|
||||
|
||||
let mut flags: u8 = 0;
|
||||
|
||||
if self.record.meta.is_some() {
|
||||
flags |= CRED_FLAG_METADATA;
|
||||
}
|
||||
buf.push(flags);
|
||||
|
||||
let spec_bytes = self.record.specifier.bytes();
|
||||
let key_bytes = self.record.key.bytes();
|
||||
let value_bytes = &self.record.value;
|
||||
let meta_len = if self.record.meta.is_none() {
|
||||
0
|
||||
} else {
|
||||
self.record.meta.as_ref().unwrap().len()
|
||||
};
|
||||
|
||||
buf.append(u32::to_be_bytes(spec_bytes.len() as u32).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(key_bytes.len() as u32).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(value_bytes.len() as u32).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(meta_len as u32).to_vec().as_mut());
|
||||
|
||||
buf.extend(spec_bytes);
|
||||
buf.extend(key_bytes);
|
||||
buf.extend(value_bytes);
|
||||
|
||||
if meta_len > 0 {
|
||||
buf.extend(self.record.meta.as_ref().unwrap().bytes());
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
fn from_block(bytes: &[u8]) -> Result<LocalRecord, &'static str> {
|
||||
let r#type = bytes[0];
|
||||
let flags = bytes[1];
|
||||
let spec_len = u32::from_be_bytes(bytes[2..6].try_into().unwrap()) as usize;
|
||||
let key_len = u32::from_be_bytes(bytes[6..10].try_into().unwrap()) as usize;
|
||||
let val_len = u32::from_be_bytes(bytes[10..14].try_into().unwrap()) as usize;
|
||||
let meta_len = u32::from_be_bytes(bytes[14..18].try_into().unwrap()) as usize;
|
||||
|
||||
if key_len < 1 {
|
||||
return Err("Invalid structure: key length is 0.");
|
||||
}
|
||||
|
||||
if spec_len < 1 {
|
||||
return Err("Invalid structure: specifier length is 0.");
|
||||
}
|
||||
|
||||
if val_len < 1 {
|
||||
return Err("Invalid structure: value length is 0.");
|
||||
}
|
||||
|
||||
let key_offset: usize = 18 + spec_len;
|
||||
let val_offset: usize = key_offset + key_len;
|
||||
|
||||
let specifier = std::str::from_utf8(&bytes[18..(18 + spec_len)])
|
||||
.expect("Cannot deserialize the specifier.");
|
||||
let key = std::str::from_utf8(&bytes[key_offset..key_offset + key_len])
|
||||
.expect("Cannot deserialize the key.");
|
||||
let value = &bytes[val_offset..val_offset + val_len];
|
||||
let meta = if meta_len == 0 {
|
||||
None
|
||||
} else {
|
||||
let meta_offset = val_offset + val_len;
|
||||
Some(
|
||||
std::str::from_utf8(&bytes[meta_offset..meta_offset + meta_len])
|
||||
.expect("Cannot deserialize metadata.")
|
||||
.to_owned(),
|
||||
)
|
||||
};
|
||||
|
||||
let record = LocalRecord {
|
||||
record: StoreRecord {
|
||||
specifier: specifier.to_owned(),
|
||||
key: key.to_owned(),
|
||||
r#type: RecordType::from(r#type),
|
||||
value: value.to_vec(),
|
||||
meta,
|
||||
},
|
||||
position: 0,
|
||||
size: val_offset + val_len + meta_len,
|
||||
};
|
||||
|
||||
Ok(record)
|
||||
}
|
||||
}
|
||||
|
||||
impl Store for LocalStore {
|
||||
fn get_creds(&self, specifier: &String, key: &String) -> Option<StoreRecord> {
|
||||
match self.btree_find_key(&key) {
|
||||
(Some(x), _) => {
|
||||
println!("Found: {}", x);
|
||||
todo!()
|
||||
}
|
||||
(None, _) => return None,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_creds_by_specifier(&self, specifier: &String) -> Vec<StoreRecord> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn store_creds(
|
||||
&self,
|
||||
specifier: &String,
|
||||
key: &String,
|
||||
value: &Vec<u8>,
|
||||
r#type: RecordType,
|
||||
meta: Option<String>,
|
||||
) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
|
||||
enum IndexNodeType {
|
||||
Internal = 1,
|
||||
Leaf = 2,
|
||||
Invalid = 0,
|
||||
}
|
||||
|
||||
impl From<u8> for IndexNodeType {
|
||||
fn from(value: u8) -> Self {
|
||||
match value {
|
||||
0 => IndexNodeType::Invalid,
|
||||
1 => IndexNodeType::Internal,
|
||||
2 => IndexNodeType::Leaf,
|
||||
_ => panic!("Invalid value {}", value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct IndexNodeEntry {
|
||||
key: u64,
|
||||
pointer: u64,
|
||||
}
|
||||
|
||||
struct IndexNode {
|
||||
node_type: IndexNodeType,
|
||||
children: Vec<IndexNodeEntry>,
|
||||
}
|
||||
|
||||
impl IndexNode {
|
||||
fn children(&self) -> &Vec<IndexNodeEntry> {
|
||||
&self.children
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.children.len()
|
||||
}
|
||||
|
||||
fn serialize(&self, meta: &LocalStoreMeta) -> Vec<u8> {
|
||||
let mut buf: Vec<u8> = vec![0; 16 * meta.index_node_arity as usize + 2];
|
||||
|
||||
let mut cnt = 2;
|
||||
for child in self.children() {
|
||||
buf[cnt..(cnt + 8)].copy_from_slice(&child.key.to_be_bytes());
|
||||
buf[(cnt + 8)..(cnt + 16)].copy_from_slice(&child.pointer.to_be_bytes());
|
||||
cnt += 16;
|
||||
}
|
||||
|
||||
buf[0] = self.node_type as u8;
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl LocalStore {
|
||||
fn read_node(&self, data: &mut EncryptionStream) -> IndexNode {
|
||||
let node_type = {
|
||||
let mut buf: [u8; 2] = [0; 2];
|
||||
data.read_exact(&mut buf).expect("Cannot read node type.");
|
||||
IndexNodeType::from(buf[0])
|
||||
};
|
||||
|
||||
let mut children = Vec::with_capacity(self.meta.index_node_arity as usize);
|
||||
for _ in 0..self.meta.index_node_arity {
|
||||
let mut buf: [u8; 16] = [0; 16];
|
||||
|
||||
data.read_exact(&mut buf)
|
||||
.expect("Cannot read child node content.");
|
||||
|
||||
let key = u64::from_be_bytes(buf[0..8].try_into().unwrap());
|
||||
|
||||
if key == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
let pointer = u64::from_be_bytes(buf[0..8].try_into().unwrap());
|
||||
|
||||
let entry = IndexNodeEntry { key, pointer };
|
||||
|
||||
children.push(entry);
|
||||
}
|
||||
|
||||
let node = IndexNode {
|
||||
node_type,
|
||||
children,
|
||||
};
|
||||
|
||||
node
|
||||
}
|
||||
|
||||
fn btree_find_key(&self, key: &String) -> (Option<u64>, Vec<u64>) {
|
||||
let mut file: BufReader<File> =
|
||||
BufReader::new(File::open(&self.path).expect("Cannot open file"));
|
||||
|
||||
let hash = {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
key.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
};
|
||||
|
||||
let mut traversal_history = Vec::new();
|
||||
|
||||
let pos = file.seek(SeekFrom::Start(0x90)).unwrap();
|
||||
let mut block = self
|
||||
.enc_ctx
|
||||
.decrypt_block_from_file(self.meta.data_block_size as usize, 0x90, &mut file)
|
||||
.unwrap();
|
||||
|
||||
loop {
|
||||
traversal_history.push(block.stream_position().unwrap());
|
||||
let node = self.read_node(&mut block);
|
||||
|
||||
if node.node_type == IndexNodeType::Invalid {
|
||||
panic!(
|
||||
"Invalid node detected: {}",
|
||||
block.stream_position().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
if node.node_type == IndexNodeType::Leaf {
|
||||
for child in node.children() {
|
||||
if child.key == hash {
|
||||
return (Some(child.pointer), traversal_history);
|
||||
}
|
||||
}
|
||||
return (None, traversal_history);
|
||||
}
|
||||
|
||||
for child in node.children {
|
||||
if child.key < hash {
|
||||
continue;
|
||||
}
|
||||
|
||||
block.seek(SeekFrom::Start(child.pointer)).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn nonce_offset(nonce_root: &Vec<u8>, offset: usize) -> Vec<u8> {
|
||||
let len = nonce_root.len();
|
||||
if len < (usize::BITS / 8) as usize {
|
||||
panic!("Nonce length less than {}", usize::BITS / 8);
|
||||
}
|
||||
let start = len - (usize::BITS / 8) as usize;
|
||||
|
||||
let mut nonce_copy = vec![0; len];
|
||||
nonce_copy.clone_from_slice(&nonce_root);
|
||||
|
||||
let mut num = u64::from_be_bytes(nonce_copy[start..len].try_into().unwrap());
|
||||
num += offset as u64;
|
||||
nonce_copy[start..len].copy_from_slice(&num.to_be_bytes());
|
||||
|
||||
nonce_copy
|
||||
}
|
||||
|
||||
impl EncryptionContext {
|
||||
fn decrypt_block(
|
||||
&self,
|
||||
ciphertext: &Vec<u8>,
|
||||
offset: usize,
|
||||
) -> Result<EncryptionStream, String> {
|
||||
let plaintext = match self.enc_type {
|
||||
EncryptionType::Invalid => unreachable!(),
|
||||
EncryptionType::AesGcm => {
|
||||
let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key);
|
||||
let aes = Aes256Gcm::new(aes_key);
|
||||
|
||||
let nonce = nonce_offset(&self.iv_root, offset);
|
||||
let nonce = Nonce::from_slice(&nonce);
|
||||
|
||||
aes.decrypt(nonce, ciphertext.as_ref())
|
||||
}
|
||||
EncryptionType::Chacha20Poly1305 => todo!(),
|
||||
};
|
||||
|
||||
if plaintext.is_err() {
|
||||
return Err(format!("Decryption error: {}", plaintext.unwrap_err()));
|
||||
}
|
||||
|
||||
Ok(EncryptionStream {
|
||||
data: plaintext.unwrap(),
|
||||
pos: 0,
|
||||
})
|
||||
}
|
||||
|
||||
fn encrypt_block(
|
||||
&self,
|
||||
plaintext: &Vec<u8>,
|
||||
offset: usize,
|
||||
) -> Result<EncryptionStream, &'static str> {
|
||||
let ciphertext = match self.enc_type {
|
||||
EncryptionType::Invalid => unreachable!(),
|
||||
EncryptionType::AesGcm => {
|
||||
let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key);
|
||||
let aes = Aes256Gcm::new(aes_key);
|
||||
|
||||
let nonce = nonce_offset(&self.iv_root, offset);
|
||||
let nonce = Nonce::from_slice(&nonce);
|
||||
|
||||
aes.encrypt(nonce, plaintext.as_ref())
|
||||
}
|
||||
EncryptionType::Chacha20Poly1305 => todo!(),
|
||||
};
|
||||
|
||||
if ciphertext.is_err() {
|
||||
return Err("Encryption error");
|
||||
}
|
||||
|
||||
Ok(EncryptionStream {
|
||||
data: ciphertext.unwrap(),
|
||||
pos: 0,
|
||||
})
|
||||
}
|
||||
|
||||
fn decrypt_block_from_file(
|
||||
&self,
|
||||
len: usize,
|
||||
offset: usize,
|
||||
file: &mut BufReader<File>,
|
||||
) -> Result<EncryptionStream, String> {
|
||||
let mut buf: Vec<u8> = vec![0; len + 16];
|
||||
match file.read_exact(&mut buf) {
|
||||
Err(err) => Err(format!("Read error: {}", err.to_string())),
|
||||
Ok(_) => self.decrypt_block(&buf, offset),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::mem::size_of;
|
||||
|
||||
use crate::store::{
|
||||
local::{nonce_offset, LocalStoreHeader, HEADER_SIZE},
|
||||
StoreRecord,
|
||||
};
|
||||
|
||||
use super::{IndexNode, IndexNodeEntry, IndexNodeType, LocalRecord};
|
||||
|
||||
#[test]
|
||||
fn header_size_const_greater_than_realsize() {
|
||||
let c = HEADER_SIZE;
|
||||
let r = size_of::<LocalStoreHeader>();
|
||||
|
||||
assert!(c > r, "const = {}, real = {}", c, r);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonce_offset_increment() {
|
||||
{
|
||||
let nonce: Vec<u8> = vec![0; 12];
|
||||
|
||||
let nonce_off = nonce_offset(&nonce, 0xfeeddeadbeef);
|
||||
|
||||
assert!(
|
||||
nonce_off[11] == 0xef
|
||||
&& nonce_off[10] == 0xbe
|
||||
&& nonce_off[9] == 0xad
|
||||
&& nonce_off[8] == 0xde
|
||||
&& nonce_off[7] == 0xed
|
||||
&& nonce_off[6] == 0xfe,
|
||||
"\ninitial = {:x?}\nnew = {:x?}",
|
||||
nonce,
|
||||
nonce_off
|
||||
);
|
||||
}
|
||||
|
||||
//Carry test
|
||||
{
|
||||
let nonce: Vec<u8> = vec![0xfe; 12];
|
||||
|
||||
let nonce_off = nonce_offset(&nonce, 0x2);
|
||||
|
||||
assert!(
|
||||
nonce_off[11] == 0x00
|
||||
&& nonce_off[10] == 0xff
|
||||
&& nonce_off[9] == 0xfe
|
||||
&& nonce_off[8] == 0xfe
|
||||
&& nonce_off[7] == 0xfe
|
||||
&& nonce_off[6] == 0xfe,
|
||||
"\ninitial = {:x?}\nnew = {:x?}",
|
||||
nonce,
|
||||
nonce_off
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn localstore_record_serialize() {
|
||||
let record = LocalRecord {
|
||||
position: 0,
|
||||
size: 0,
|
||||
record: StoreRecord {
|
||||
key: "meow".to_owned(),
|
||||
meta: None,
|
||||
specifier: "test".to_owned(),
|
||||
r#type: crate::store::RecordType::LoginPassword,
|
||||
value: vec![1, 2, 3, 4],
|
||||
},
|
||||
};
|
||||
|
||||
let serialized = record.serialize();
|
||||
|
||||
println!("D: {:x?}", serialized);
|
||||
|
||||
let deserialized = LocalRecord::from_block(&serialized).unwrap();
|
||||
|
||||
assert_eq!(deserialized.record, record.record);
|
||||
|
||||
let reserialized = deserialized.serialize();
|
||||
|
||||
assert_eq!(serialized, reserialized);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn index_node_serialize() {
|
||||
let root = IndexNode {
|
||||
node_type: IndexNodeType::Internal,
|
||||
children: vec![IndexNodeEntry {
|
||||
key: 1,
|
||||
pointer: 258,
|
||||
}],
|
||||
};
|
||||
|
||||
let leaf = IndexNode {
|
||||
node_type: IndexNodeType::Leaf,
|
||||
children: vec![
|
||||
IndexNodeEntry {
|
||||
key: 1,
|
||||
pointer: 516,
|
||||
},
|
||||
IndexNodeEntry {
|
||||
key: 4,
|
||||
pointer: 520,
|
||||
},
|
||||
IndexNodeEntry {
|
||||
key: 16,
|
||||
pointer: 524,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let meta = super::LocalStoreMeta {
|
||||
record_count: 3,
|
||||
specifier_count: 0,
|
||||
index_node_arity: 16,
|
||||
data_block_size: 8192,
|
||||
data_offset: 0,
|
||||
};
|
||||
|
||||
let root_bytes = root.serialize(&meta);
|
||||
let leaf_bytes = leaf.serialize(&meta);
|
||||
|
||||
assert_eq!(IndexNodeType::from(root_bytes[0]), root.node_type);
|
||||
assert_eq!(IndexNodeType::from(leaf_bytes[0]), leaf.node_type);
|
||||
|
||||
assert_eq!(root_bytes[2..10], root.children[0].key.to_be_bytes());
|
||||
assert_eq!(root_bytes[10..18], root.children[0].pointer.to_be_bytes());
|
||||
assert!(root_bytes.iter().skip(18).all(|x| *x == 0u8));
|
||||
|
||||
let mut cnt = 2;
|
||||
for child in leaf.children {
|
||||
assert_eq!(leaf_bytes[cnt..(cnt + 8)], child.key.to_be_bytes());
|
||||
assert_eq!(
|
||||
leaf_bytes[(cnt + 8)..(cnt + 16)],
|
||||
child.pointer.to_be_bytes()
|
||||
);
|
||||
cnt += 16;
|
||||
}
|
||||
assert!(root_bytes.iter().skip(cnt).all(|x| *x == 0u8));
|
||||
}
|
||||
}
|
135
src/store/local/data.rs
Normal file
135
src/store/local/data.rs
Normal file
|
@ -0,0 +1,135 @@
|
|||
use crate::store::{RecordType, StoreRecord};
|
||||
|
||||
pub(super) struct LocalRecord {
|
||||
record: StoreRecord,
|
||||
position: usize,
|
||||
size: usize,
|
||||
}
|
||||
|
||||
const CRED_FLAG_METADATA: u8 = 0b0000100;
|
||||
|
||||
impl LocalRecord {
|
||||
fn serialize(&self) -> Vec<u8> {
|
||||
let mut buf = Vec::<u8>::new();
|
||||
|
||||
buf.push(self.record.r#type as u8);
|
||||
|
||||
let mut flags: u8 = 0;
|
||||
|
||||
if self.record.meta.is_some() {
|
||||
flags |= CRED_FLAG_METADATA;
|
||||
}
|
||||
buf.push(flags);
|
||||
|
||||
let spec_bytes = self.record.specifier.bytes();
|
||||
let key_bytes = self.record.key.bytes();
|
||||
let value_bytes = &self.record.value;
|
||||
let meta_len = if self.record.meta.is_none() {
|
||||
0
|
||||
} else {
|
||||
self.record.meta.as_ref().unwrap().len()
|
||||
};
|
||||
|
||||
buf.append(u32::to_be_bytes(spec_bytes.len() as u32).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(key_bytes.len() as u32).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(value_bytes.len() as u32).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(meta_len as u32).to_vec().as_mut());
|
||||
|
||||
buf.extend(spec_bytes);
|
||||
buf.extend(key_bytes);
|
||||
buf.extend(value_bytes);
|
||||
|
||||
if meta_len > 0 {
|
||||
buf.extend(self.record.meta.as_ref().unwrap().bytes());
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
fn from_block(bytes: &[u8]) -> Result<LocalRecord, &'static str> {
|
||||
let r#type = bytes[0];
|
||||
let flags = bytes[1];
|
||||
let spec_len = u32::from_be_bytes(bytes[2..6].try_into().unwrap()) as usize;
|
||||
let key_len = u32::from_be_bytes(bytes[6..10].try_into().unwrap()) as usize;
|
||||
let val_len = u32::from_be_bytes(bytes[10..14].try_into().unwrap()) as usize;
|
||||
let meta_len = u32::from_be_bytes(bytes[14..18].try_into().unwrap()) as usize;
|
||||
|
||||
if key_len < 1 {
|
||||
return Err("Invalid structure: key length is 0.");
|
||||
}
|
||||
|
||||
if spec_len < 1 {
|
||||
return Err("Invalid structure: specifier length is 0.");
|
||||
}
|
||||
|
||||
if val_len < 1 {
|
||||
return Err("Invalid structure: value length is 0.");
|
||||
}
|
||||
|
||||
let key_offset: usize = 18 + spec_len;
|
||||
let val_offset: usize = key_offset + key_len;
|
||||
|
||||
let specifier = std::str::from_utf8(&bytes[18..(18 + spec_len)])
|
||||
.expect("Cannot deserialize the specifier.");
|
||||
let key = std::str::from_utf8(&bytes[key_offset..key_offset + key_len])
|
||||
.expect("Cannot deserialize the key.");
|
||||
let value = &bytes[val_offset..val_offset + val_len];
|
||||
let meta = if meta_len == 0 {
|
||||
None
|
||||
} else {
|
||||
let meta_offset = val_offset + val_len;
|
||||
Some(
|
||||
std::str::from_utf8(&bytes[meta_offset..meta_offset + meta_len])
|
||||
.expect("Cannot deserialize metadata.")
|
||||
.to_owned(),
|
||||
)
|
||||
};
|
||||
|
||||
let record = LocalRecord {
|
||||
record: StoreRecord {
|
||||
specifier: specifier.to_owned(),
|
||||
key: key.to_owned(),
|
||||
r#type: RecordType::from(r#type),
|
||||
value: value.to_vec(),
|
||||
meta,
|
||||
},
|
||||
position: 0,
|
||||
size: val_offset + val_len + meta_len,
|
||||
};
|
||||
|
||||
Ok(record)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::LocalRecord;
|
||||
use crate::store::StoreRecord;
|
||||
|
||||
#[test]
|
||||
fn localstore_record_serialize() {
|
||||
let record = LocalRecord {
|
||||
position: 0,
|
||||
size: 0,
|
||||
record: StoreRecord {
|
||||
key: "meow".to_owned(),
|
||||
meta: None,
|
||||
specifier: "test".to_owned(),
|
||||
r#type: crate::store::RecordType::LoginPassword,
|
||||
value: vec![1, 2, 3, 4],
|
||||
},
|
||||
};
|
||||
|
||||
let serialized = record.serialize();
|
||||
|
||||
println!("D: {:x?}", serialized);
|
||||
|
||||
let deserialized = LocalRecord::from_block(&serialized).unwrap();
|
||||
|
||||
assert_eq!(deserialized.record, record.record);
|
||||
|
||||
let reserialized = deserialized.serialize();
|
||||
|
||||
assert_eq!(serialized, reserialized);
|
||||
}
|
||||
}
|
85
src/store/local/header.rs
Normal file
85
src/store/local/header.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
use std::{
|
||||
arch::x86_64::_mm_crc32_u32,
|
||||
fs::File,
|
||||
io::{BufReader, Read},
|
||||
};
|
||||
|
||||
use crate::crypto::{EncryptionType, HashType};
|
||||
|
||||
pub const MAGIC_NUM: u32 = 0x6d656f77;
|
||||
pub const HEADER_SIZE: usize = 32;
|
||||
|
||||
pub(super) struct LocalStoreHeader {
|
||||
pub magic_h: u32,
|
||||
pub last_access: u32,
|
||||
pub last_write: u32,
|
||||
pub enc_type: EncryptionType,
|
||||
pub hash_type: HashType,
|
||||
pub enc_sup_block_size: u16,
|
||||
pub enc_key_size: u16,
|
||||
pub encrypted_block_offset: u32,
|
||||
pub chksum_crc: u32,
|
||||
}
|
||||
|
||||
impl LocalStoreHeader {
|
||||
pub fn get_checksum(&self) -> u32 {
|
||||
let mut crc: u32 = 0xffffffff;
|
||||
unsafe {
|
||||
crc = _mm_crc32_u32(crc, self.last_access);
|
||||
crc = _mm_crc32_u32(crc, self.last_write);
|
||||
crc = _mm_crc32_u32(crc, self.enc_type as u32);
|
||||
crc = _mm_crc32_u32(crc, self.hash_type as u32);
|
||||
crc = _mm_crc32_u32(crc, self.enc_sup_block_size as u32);
|
||||
crc = _mm_crc32_u32(crc, self.enc_key_size as u32);
|
||||
crc = _mm_crc32_u32(crc, self.encrypted_block_offset);
|
||||
}
|
||||
crc
|
||||
}
|
||||
|
||||
pub fn is_checksum_valid(&self) -> bool {
|
||||
let chk = self.get_checksum();
|
||||
chk == self.chksum_crc
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn try_deserialize_header(
|
||||
file: &mut BufReader<File>,
|
||||
) -> Result<LocalStoreHeader, &'static str> {
|
||||
let mut header: LocalStoreHeader = LocalStoreHeader {
|
||||
magic_h: 0,
|
||||
last_access: 0,
|
||||
last_write: 0,
|
||||
enc_type: EncryptionType::Invalid,
|
||||
hash_type: HashType::Invalid,
|
||||
enc_sup_block_size: 0,
|
||||
enc_key_size: 0,
|
||||
chksum_crc: 0,
|
||||
encrypted_block_offset: 32,
|
||||
};
|
||||
|
||||
let mut buf: [u8; HEADER_SIZE] = [0; HEADER_SIZE];
|
||||
|
||||
if file.read_exact(&mut buf).is_err() {
|
||||
return Err("Read error.");
|
||||
}
|
||||
|
||||
header.magic_h = u32::from_be_bytes(buf[0..4].try_into().unwrap());
|
||||
if header.magic_h != MAGIC_NUM {
|
||||
return Err("Invalid header magic number.");
|
||||
}
|
||||
|
||||
header.last_access = u32::from_be_bytes(buf[4..8].try_into().unwrap());
|
||||
header.last_write = u32::from_be_bytes(buf[8..12].try_into().unwrap());
|
||||
header.enc_type = EncryptionType::from(buf[12]);
|
||||
header.hash_type = HashType::from(buf[13]);
|
||||
header.enc_sup_block_size = u16::from_be_bytes(buf[14..16].try_into().unwrap());
|
||||
header.enc_key_size = u16::from_be_bytes(buf[16..18].try_into().unwrap());
|
||||
header.encrypted_block_offset = u32::from_be_bytes(buf[18..22].try_into().unwrap());
|
||||
header.chksum_crc = u32::from_be_bytes(buf[22..26].try_into().unwrap());
|
||||
|
||||
if !header.is_checksum_valid() {
|
||||
return Err("Invalid checksum.");
|
||||
}
|
||||
|
||||
Ok(header)
|
||||
}
|
258
src/store/local/index.rs
Normal file
258
src/store/local/index.rs
Normal file
|
@ -0,0 +1,258 @@
|
|||
use std::{
|
||||
collections::hash_map::DefaultHasher,
|
||||
hash::{Hash, Hasher},
|
||||
io::{Read, Seek, SeekFrom},
|
||||
};
|
||||
|
||||
use crate::data::{DataPage, DataPager};
|
||||
|
||||
use super::{LocalStore, LocalStoreMeta};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
|
||||
pub(super) enum IndexNodeType {
|
||||
Internal = 1,
|
||||
Leaf = 2,
|
||||
Invalid = 0,
|
||||
}
|
||||
|
||||
impl From<u8> for IndexNodeType {
|
||||
fn from(value: u8) -> Self {
|
||||
match value {
|
||||
0 => IndexNodeType::Invalid,
|
||||
1 => IndexNodeType::Internal,
|
||||
2 => IndexNodeType::Leaf,
|
||||
_ => panic!("Invalid value {}", value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct IndexNodeEntry {
|
||||
key: u64,
|
||||
pointer: u64,
|
||||
}
|
||||
|
||||
pub(super) struct IndexNode {
|
||||
node_type: IndexNodeType,
|
||||
children: Vec<IndexNodeEntry>,
|
||||
}
|
||||
|
||||
impl IndexNode {
|
||||
pub fn new() -> Self {
|
||||
IndexNode {
|
||||
node_type: IndexNodeType::Leaf,
|
||||
children: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn children(&self) -> &Vec<IndexNodeEntry> {
|
||||
&self.children
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.children.len()
|
||||
}
|
||||
|
||||
pub fn serialize(&self, meta: &LocalStoreMeta) -> Vec<u8> {
|
||||
let mut buf: Vec<u8> = vec![0; 16 * meta.index_node_arity as usize + 2];
|
||||
|
||||
let mut cnt = 2;
|
||||
for child in self.children() {
|
||||
buf[cnt..(cnt + 8)].copy_from_slice(&child.key.to_be_bytes());
|
||||
buf[(cnt + 8)..(cnt + 16)].copy_from_slice(&child.pointer.to_be_bytes());
|
||||
cnt += 16;
|
||||
}
|
||||
|
||||
buf[0] = self.node_type as u8;
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_key(key: &String) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
key.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
pub(super) fn read_node(meta: &LocalStoreMeta, data: &mut DataPage) -> IndexNode {
|
||||
let node_type = {
|
||||
let mut buf: [u8; 2] = [0; 2];
|
||||
data.read_exact(&mut buf).expect("Cannot read node type.");
|
||||
IndexNodeType::from(buf[0])
|
||||
};
|
||||
|
||||
let mut children = Vec::with_capacity(meta.index_node_arity as usize);
|
||||
for _ in 0..meta.index_node_arity {
|
||||
let mut buf: [u8; 16] = [0; 16];
|
||||
|
||||
data.read_exact(&mut buf)
|
||||
.expect("Cannot read child node content.");
|
||||
|
||||
let key = u64::from_be_bytes(buf[0..8].try_into().unwrap());
|
||||
|
||||
if key == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
let pointer = u64::from_be_bytes(buf[0..8].try_into().unwrap());
|
||||
|
||||
let entry = IndexNodeEntry { key, pointer };
|
||||
|
||||
children.push(entry);
|
||||
}
|
||||
|
||||
let node = IndexNode {
|
||||
node_type,
|
||||
children,
|
||||
};
|
||||
|
||||
node
|
||||
}
|
||||
|
||||
impl LocalStore {
|
||||
pub(super) fn btree_find_key(&mut self, key: &String) -> (Option<u64>, Vec<u64>) {
|
||||
let hash = hash_key(key);
|
||||
|
||||
let mut traversal_history = Vec::new();
|
||||
|
||||
let block = self.io.get_page(0xa0).expect("IO Error.");
|
||||
|
||||
loop {
|
||||
traversal_history.push(block.stream_position().unwrap());
|
||||
let node = read_node(&self.meta, block);
|
||||
|
||||
if node.node_type == IndexNodeType::Invalid {
|
||||
panic!(
|
||||
"Invalid node detected: {}",
|
||||
block.stream_position().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
if node.node_type == IndexNodeType::Leaf {
|
||||
for child in node.children() {
|
||||
if child.key == hash {
|
||||
return (Some(child.pointer), traversal_history);
|
||||
}
|
||||
}
|
||||
return (None, traversal_history);
|
||||
}
|
||||
|
||||
for child in node.children {
|
||||
if child.key < hash {
|
||||
continue;
|
||||
}
|
||||
|
||||
block.seek(SeekFrom::Start(child.pointer)).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn btree_insert_key(
|
||||
&mut self,
|
||||
key: &String,
|
||||
pointer: u64,
|
||||
) -> Result<Vec<u64>, String> {
|
||||
let (result, traversal_history) = self.btree_find_key(key);
|
||||
|
||||
if result.is_some() {
|
||||
return Err("Value already exists".to_string());
|
||||
}
|
||||
|
||||
let hash = hash_key(key);
|
||||
|
||||
let mut block = &mut self.io.get_page(0xa0).expect("IO Error.");
|
||||
|
||||
if traversal_history.is_empty() {
|
||||
return Err("No nodes found...".to_string());
|
||||
}
|
||||
|
||||
let position = *traversal_history.last().unwrap();
|
||||
|
||||
block.seek(SeekFrom::Start(position)).unwrap();
|
||||
|
||||
let mut node = read_node(&self.meta, &mut block);
|
||||
if node.len() == self.meta.index_node_arity as usize {
|
||||
//TODO Reorganize nodes
|
||||
todo!()
|
||||
}
|
||||
|
||||
let entry = IndexNodeEntry { key: hash, pointer };
|
||||
|
||||
node.children.push(entry);
|
||||
|
||||
block.seek(SeekFrom::Start(position)).unwrap();
|
||||
node.serialize(&self.meta);
|
||||
|
||||
//todo serialize
|
||||
|
||||
todo!()
|
||||
}
|
||||
|
||||
pub(super) fn btree_delete_key(&self, key: &String) {
|
||||
let _hash = hash_key(key);
|
||||
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IndexNode, IndexNodeEntry, IndexNodeType};
|
||||
|
||||
#[test]
|
||||
fn index_node_serialize() {
|
||||
let root = IndexNode {
|
||||
node_type: IndexNodeType::Internal,
|
||||
children: vec![IndexNodeEntry {
|
||||
key: 1,
|
||||
pointer: 258,
|
||||
}],
|
||||
};
|
||||
|
||||
let leaf = IndexNode {
|
||||
node_type: IndexNodeType::Leaf,
|
||||
children: vec![
|
||||
IndexNodeEntry {
|
||||
key: 1,
|
||||
pointer: 516,
|
||||
},
|
||||
IndexNodeEntry {
|
||||
key: 4,
|
||||
pointer: 520,
|
||||
},
|
||||
IndexNodeEntry {
|
||||
key: 16,
|
||||
pointer: 524,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let meta = super::LocalStoreMeta {
|
||||
record_count: 3,
|
||||
specifier_count: 0,
|
||||
index_node_arity: 16,
|
||||
data_block_size: 8192,
|
||||
data_offset: 0,
|
||||
};
|
||||
|
||||
let root_bytes = root.serialize(&meta);
|
||||
let leaf_bytes = leaf.serialize(&meta);
|
||||
|
||||
assert_eq!(IndexNodeType::from(root_bytes[0]), root.node_type);
|
||||
assert_eq!(IndexNodeType::from(leaf_bytes[0]), leaf.node_type);
|
||||
|
||||
assert_eq!(root_bytes[2..10], root.children[0].key.to_be_bytes());
|
||||
assert_eq!(root_bytes[10..18], root.children[0].pointer.to_be_bytes());
|
||||
assert!(root_bytes.iter().skip(18).all(|x| *x == 0u8));
|
||||
|
||||
let mut cnt = 2;
|
||||
for child in leaf.children {
|
||||
assert_eq!(leaf_bytes[cnt..(cnt + 8)], child.key.to_be_bytes());
|
||||
assert_eq!(
|
||||
leaf_bytes[(cnt + 8)..(cnt + 16)],
|
||||
child.pointer.to_be_bytes()
|
||||
);
|
||||
cnt += 16;
|
||||
}
|
||||
assert!(root_bytes.iter().skip(cnt).all(|x| *x == 0u8));
|
||||
}
|
||||
}
|
62
src/store/local/io.rs
Normal file
62
src/store/local/io.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{BufReader, Read, Seek, SeekFrom},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
crypto::EncryptionContext,
|
||||
data::{DataPage, DataPager},
|
||||
};
|
||||
|
||||
use super::LocalStoreMeta;
|
||||
|
||||
pub struct DataAccessor {
|
||||
path: String,
|
||||
cache: HashMap<usize, DataPage>,
|
||||
dirty_pages: Vec<DataPage>,
|
||||
enc: EncryptionContext,
|
||||
block_size: usize,
|
||||
}
|
||||
|
||||
impl DataPager for DataAccessor {
|
||||
fn get_page(&mut self, offset: usize) -> Result<&mut DataPage, String> {
|
||||
if !self.cache.contains_key(&offset) {
|
||||
let data = self.read_page_from_file(offset);
|
||||
match data {
|
||||
Ok(x) => self.cache.insert(offset, x),
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
};
|
||||
|
||||
Ok(self.cache.get_mut(&offset).unwrap())
|
||||
}
|
||||
|
||||
fn update_page(&mut self, page: crate::data::WritableDataPage) -> Result<u64, ()> {
|
||||
self.dirty_pages
|
||||
.push(DataPage::load(page.offset(), page.data().to_vec()));
|
||||
return Ok(1);
|
||||
}
|
||||
}
|
||||
|
||||
impl DataAccessor {
|
||||
fn read_page_from_file(&mut self, offset: usize) -> Result<DataPage, String> {
|
||||
let mut file = BufReader::new(File::open(&self.path).unwrap());
|
||||
file.seek(SeekFrom::Start(offset as u64)).unwrap();
|
||||
|
||||
let mut buf = vec![0; self.block_size + 16];
|
||||
file.read_exact(&mut buf).unwrap();
|
||||
|
||||
self.enc.decrypt_block(&buf, offset)
|
||||
}
|
||||
|
||||
pub(super) fn create(path: &String, enc: EncryptionContext, meta: &LocalStoreMeta) -> Self {
|
||||
DataAccessor {
|
||||
enc,
|
||||
path: path.clone(),
|
||||
cache: HashMap::new(),
|
||||
dirty_pages: Vec::new(),
|
||||
block_size: meta.data_block_size as usize,
|
||||
}
|
||||
}
|
||||
}
|
333
src/store/local/mod.rs
Normal file
333
src/store/local/mod.rs
Normal file
|
@ -0,0 +1,333 @@
|
|||
mod data;
|
||||
mod header;
|
||||
mod index;
|
||||
mod io;
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{BufReader, Read, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use password_hash::SaltString;
|
||||
|
||||
use crate::crypto::EncryptionContext;
|
||||
|
||||
use self::{
|
||||
data::LocalRecord,
|
||||
header::{try_deserialize_header, LocalStoreHeader, MAGIC_NUM},
|
||||
index::IndexNode,
|
||||
io::DataAccessor,
|
||||
};
|
||||
|
||||
use super::{RecordType, Store, StoreRecord};
|
||||
|
||||
pub struct LocalStore {
|
||||
path: String,
|
||||
header: LocalStoreHeader,
|
||||
meta: LocalStoreMeta,
|
||||
cache: HashMap<String, LocalRecord>,
|
||||
io: DataAccessor,
|
||||
}
|
||||
|
||||
/*
|
||||
Store binary format:
|
||||
Header: 0x00 (first 96 bytes):
|
||||
0x00 - magic number
|
||||
0x04 - last access timestamp
|
||||
0x08 - last write timestamp
|
||||
0x0c - encryption type byte
|
||||
0x0d - hash type byte
|
||||
0x0e - superblock size
|
||||
0x10 - cipher key size
|
||||
0x12 - encrypted block offset
|
||||
0x16 - header CRC-32 checksum
|
||||
0x1a - padding (6 bytes)
|
||||
Enc details: 48 bytes
|
||||
0x20 - salt
|
||||
0x40 - master IV
|
||||
0x4c - 3 byte padding with 0
|
||||
0x4f - salt size
|
||||
---Encrypted section starts here---
|
||||
--Block 0
|
||||
Superblock:
|
||||
Hash:
|
||||
0x50 - master password hash
|
||||
Metadata:
|
||||
0x70 - record count
|
||||
0x74 - specifier count
|
||||
0x88 - padding with 0
|
||||
Auth tag: 0x90
|
||||
--Block 1
|
||||
Index:
|
||||
Root node:
|
||||
0xa0 - root node or invalid node if empty db
|
||||
*/
|
||||
|
||||
//Proposal: Initialize IV to 0?
|
||||
|
||||
struct LocalStoreMeta {
|
||||
record_count: u32,
|
||||
specifier_count: u32,
|
||||
index_node_arity: u16,
|
||||
data_block_size: u32,
|
||||
data_offset: u64,
|
||||
}
|
||||
|
||||
fn try_deserialize_superblock(
|
||||
file: &mut BufReader<File>,
|
||||
enc: &EncryptionContext,
|
||||
header: &LocalStoreHeader,
|
||||
) -> Result<LocalStoreMeta, &'static str> {
|
||||
unsafe {
|
||||
let data = match enc.decrypt_block_from_file(header.enc_sup_block_size as usize, 0, file) {
|
||||
Ok(x) => x,
|
||||
Err(_) => return Err("Possibly corrupted store superblock."),
|
||||
};
|
||||
|
||||
if data.buffer()[0..enc.get_key().len()].ne(enc.get_key()) {
|
||||
return Err("Key mismatch.");
|
||||
}
|
||||
|
||||
let meta = deserialize_store_meta(&&data.buffer()[32..]);
|
||||
|
||||
Ok(meta)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_header(header: &LocalStoreHeader) -> Vec<u8> {
|
||||
let mut buf = Vec::<u8>::new();
|
||||
buf.append(u32::to_be_bytes(MAGIC_NUM).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(header.last_access).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(header.last_write).to_vec().as_mut());
|
||||
buf.push(header.enc_type as u8);
|
||||
buf.push(header.hash_type as u8);
|
||||
buf.append(
|
||||
u16::to_be_bytes(header.enc_sup_block_size)
|
||||
.to_vec()
|
||||
.as_mut(),
|
||||
);
|
||||
buf.append(u16::to_be_bytes(header.enc_key_size).to_vec().as_mut());
|
||||
buf.append(
|
||||
u32::to_be_bytes(header.encrypted_block_offset)
|
||||
.to_vec()
|
||||
.as_mut(),
|
||||
);
|
||||
buf.append(u32::to_be_bytes(header.chksum_crc).to_vec().as_mut());
|
||||
|
||||
let padding_len = 32 - buf.len();
|
||||
let mut pad: Vec<u8> = vec![0; padding_len];
|
||||
buf.append(&mut pad);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
fn try_create_store(path: &String, passphrase: &Vec<u8>) -> bool {
|
||||
let timestamp: u32 = std::time::UNIX_EPOCH.elapsed().unwrap().as_secs() as u32;
|
||||
|
||||
let enc = EncryptionContext::default(passphrase);
|
||||
|
||||
let mut superblock_buf: Vec<u8> = Vec::new();
|
||||
|
||||
unsafe {
|
||||
superblock_buf.extend_from_slice(&enc.get_key());
|
||||
}
|
||||
|
||||
if superblock_buf.len() != 32 {
|
||||
superblock_buf.append([b'\0'].repeat(32 - superblock_buf.len()).as_mut());
|
||||
}
|
||||
|
||||
let meta = LocalStoreMeta {
|
||||
record_count: 0,
|
||||
specifier_count: 0,
|
||||
index_node_arity: 16,
|
||||
data_block_size: 8092,
|
||||
data_offset: 0,
|
||||
};
|
||||
|
||||
superblock_buf.append(&mut serialize_store_meta(&meta));
|
||||
superblock_buf.extend_from_slice(&[b'\0'].repeat(64 - superblock_buf.len()));
|
||||
|
||||
let enc_buf = enc.encrypt_block(&superblock_buf, 0).unwrap();
|
||||
|
||||
let mut header: LocalStoreHeader = LocalStoreHeader {
|
||||
magic_h: MAGIC_NUM,
|
||||
last_access: timestamp,
|
||||
last_write: timestamp,
|
||||
enc_type: enc.encryption_type(),
|
||||
enc_sup_block_size: (enc_buf.len() - 16) as u16,
|
||||
enc_key_size: 256,
|
||||
hash_type: enc.hash_type(),
|
||||
encrypted_block_offset: 32,
|
||||
chksum_crc: 0,
|
||||
};
|
||||
|
||||
header.chksum_crc = header.get_checksum();
|
||||
|
||||
let mut index_block: Vec<u8> = vec![0; meta.data_block_size as usize];
|
||||
let root_node_buf = IndexNode::new().serialize(&meta);
|
||||
|
||||
index_block[0..root_node_buf.len()].copy_from_slice(&root_node_buf);
|
||||
|
||||
let mut buf = serialize_header(&header);
|
||||
|
||||
let mut salt_buf: [u8; 32] = [0; 32];
|
||||
enc.salt().b64_decode(&mut salt_buf).unwrap();
|
||||
if salt_buf.len() > 32 {
|
||||
return false;
|
||||
}
|
||||
buf.extend_from_slice(&salt_buf);
|
||||
|
||||
let iv = &enc.root_iv();
|
||||
if iv.len() != 12 {
|
||||
return false;
|
||||
}
|
||||
buf.extend_from_slice(iv);
|
||||
buf.extend_from_slice(&[b'\0'].repeat(4));
|
||||
|
||||
buf.extend_from_slice(&[b'\0'].repeat(0x50 - buf.len()));
|
||||
buf[0x4f] = 16;
|
||||
|
||||
buf.extend_from_slice(&enc_buf.buffer());
|
||||
buf.extend_from_slice(&enc.encrypt_block(&index_block, 0xa0).unwrap().buffer());
|
||||
|
||||
match std::fs::File::create(path).unwrap().write(&buf) {
|
||||
Ok(x) => return x == buf.len(),
|
||||
Err(_) => return false,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_enc_salt_iv(file: &mut BufReader<File>) -> Result<(SaltString, Vec<u8>), &'static str> {
|
||||
let mut buf: [u8; 48] = [0; 48];
|
||||
|
||||
file.read_exact(&mut buf).unwrap();
|
||||
|
||||
let salt_len = buf[47] as usize;
|
||||
|
||||
let salt = match SaltString::b64_encode(&buf[0..salt_len]) {
|
||||
Err(_) => return Err("Invalid salt."),
|
||||
Ok(x) => x,
|
||||
};
|
||||
|
||||
let iv = buf[32..44].to_owned();
|
||||
Ok((salt, iv))
|
||||
}
|
||||
|
||||
fn deserialize_store_meta(data: &[u8]) -> LocalStoreMeta {
|
||||
let records = u32::from_be_bytes(data[0..4].try_into().unwrap());
|
||||
let specifiers = u32::from_be_bytes(data[4..8].try_into().unwrap());
|
||||
let index_node_arity = u16::from_be_bytes(data[8..10].try_into().unwrap());
|
||||
let data_block_size = u32::from_be_bytes(data[10..14].try_into().unwrap());
|
||||
let data_offset = u64::from_be_bytes(data[14..22].try_into().unwrap());
|
||||
|
||||
LocalStoreMeta {
|
||||
record_count: records,
|
||||
specifier_count: specifiers,
|
||||
index_node_arity,
|
||||
data_block_size,
|
||||
data_offset,
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_store_meta(meta: &LocalStoreMeta) -> Vec<u8> {
|
||||
let mut buf: Vec<u8> = Vec::with_capacity(32);
|
||||
|
||||
buf.append(u32::to_be_bytes(meta.record_count).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(meta.specifier_count).to_vec().as_mut());
|
||||
buf.append(u16::to_be_bytes(meta.index_node_arity).to_vec().as_mut());
|
||||
buf.append(u32::to_be_bytes(meta.data_block_size).to_vec().as_mut());
|
||||
buf.append(u64::to_be_bytes(meta.data_offset).to_vec().as_mut());
|
||||
buf.append([b'\0'].repeat(32 - buf.len()).as_mut());
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn load(path: String, passphrase: Vec<u8>) -> Result<LocalStore, &'static str> {
|
||||
let path_p = Path::new(&path);
|
||||
|
||||
if !path_p.exists() {
|
||||
if !try_create_store(&path, &passphrase) {
|
||||
return Err("Cannot create store.");
|
||||
}
|
||||
} else if !path_p.is_file() {
|
||||
return Err("Invalid path.");
|
||||
}
|
||||
|
||||
let file = match File::open(&path_p) {
|
||||
Ok(x) => x,
|
||||
Err(_) => return Err("Cannot open store."),
|
||||
};
|
||||
|
||||
let mut reader = BufReader::new(file);
|
||||
|
||||
let header = match try_deserialize_header(&mut reader) {
|
||||
Err(x) => return Err(x),
|
||||
Ok(meta) => meta,
|
||||
};
|
||||
|
||||
let (salt, iv) = match get_enc_salt_iv(&mut reader) {
|
||||
Ok(s) => s,
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
|
||||
let enc_ctx =
|
||||
EncryptionContext::from_settings(&passphrase, header.enc_type, header.hash_type, salt, iv);
|
||||
|
||||
let meta = match try_deserialize_superblock(&mut reader, &enc_ctx, &header) {
|
||||
Ok(d) => d,
|
||||
Err(x) => return Err(x),
|
||||
};
|
||||
|
||||
let store = LocalStore {
|
||||
io: DataAccessor::create(&path, enc_ctx, &meta),
|
||||
path,
|
||||
header,
|
||||
meta,
|
||||
cache: HashMap::new(),
|
||||
};
|
||||
|
||||
return Ok(store);
|
||||
}
|
||||
|
||||
impl Store for LocalStore {
|
||||
fn get_creds(&mut self, specifier: &String, key: &String) -> Option<StoreRecord> {
|
||||
match self.btree_find_key(&key) {
|
||||
(Some(x), _) => {
|
||||
println!("Found: {}", x);
|
||||
todo!()
|
||||
}
|
||||
(None, _) => return None,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_creds_by_specifier(&mut self, specifier: &String) -> Vec<StoreRecord> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn store_creds(
|
||||
&mut self,
|
||||
specifier: &String,
|
||||
key: &String,
|
||||
value: &Vec<u8>,
|
||||
r#type: RecordType,
|
||||
meta: Option<String>,
|
||||
) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::mem::size_of;
|
||||
|
||||
use crate::store::local::{header::HEADER_SIZE, LocalStoreHeader};
|
||||
|
||||
#[test]
|
||||
fn header_size_const_greater_than_realsize() {
|
||||
let c = HEADER_SIZE;
|
||||
let r = size_of::<LocalStoreHeader>();
|
||||
|
||||
assert!(c > r, "const = {}, real = {}", c, r);
|
||||
}
|
||||
}
|
160
src/store/mod.rs
160
src/store/mod.rs
|
@ -1,5 +1,3 @@
|
|||
use std::io::{Read, Seek, SeekFrom};
|
||||
|
||||
pub(crate) mod local;
|
||||
pub(crate) mod server;
|
||||
|
||||
|
@ -39,10 +37,10 @@ impl From<u8> for RecordType {
|
|||
}
|
||||
|
||||
pub trait Store {
|
||||
fn get_creds(&self, specifier: &String, key: &String) -> Option<StoreRecord>;
|
||||
fn get_creds_by_specifier(&self, specifier: &String) -> Vec<StoreRecord>;
|
||||
fn get_creds(&mut self, specifier: &String, key: &String) -> Option<StoreRecord>;
|
||||
fn get_creds_by_specifier(&mut self, specifier: &String) -> Vec<StoreRecord>;
|
||||
fn store_creds(
|
||||
&self,
|
||||
&mut self,
|
||||
specifier: &String,
|
||||
key: &String,
|
||||
value: &Vec<u8>,
|
||||
|
@ -50,155 +48,3 @@ pub trait Store {
|
|||
meta: Option<String>,
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd)]
|
||||
pub enum EncryptionType {
|
||||
Invalid = 0,
|
||||
AesGcm = 1,
|
||||
Chacha20Poly1305 = 2,
|
||||
}
|
||||
|
||||
impl From<u8> for EncryptionType {
|
||||
fn from(n: u8) -> Self {
|
||||
match n {
|
||||
0 => EncryptionType::Invalid,
|
||||
1 => EncryptionType::AesGcm,
|
||||
2 => EncryptionType::Chacha20Poly1305,
|
||||
_ => panic!("Invalid value '{}'.", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd)]
|
||||
pub enum HashType {
|
||||
Invalid = 0,
|
||||
Argon2 = 1,
|
||||
Bcrypt = 2,
|
||||
Pbkdf2 = 4,
|
||||
}
|
||||
|
||||
impl From<u8> for HashType {
|
||||
fn from(n: u8) -> Self {
|
||||
match n {
|
||||
0 => HashType::Invalid,
|
||||
1 => HashType::Argon2,
|
||||
2 => HashType::Bcrypt,
|
||||
4 => HashType::Pbkdf2,
|
||||
_ => panic!("Invalid value '{}'.", n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct EncryptionStream {
|
||||
data: Vec<u8>,
|
||||
pos: usize,
|
||||
}
|
||||
|
||||
impl Read for EncryptionStream {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
let range = self.pos..(self.pos + buf.len()).clamp(0, self.data.len() - buf.len());
|
||||
let size = range.len();
|
||||
buf.copy_from_slice(&self.data[range]);
|
||||
self.pos += size;
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
|
||||
impl Seek for EncryptionStream {
|
||||
fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result<u64> {
|
||||
let cur = self.pos;
|
||||
|
||||
let newpos = match pos {
|
||||
SeekFrom::Start(x) => {
|
||||
if x > self.data.len() as u64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Seek position out of buffer bounds.",
|
||||
));
|
||||
}
|
||||
x
|
||||
}
|
||||
SeekFrom::End(x) => {
|
||||
if x > self.data.len() as i64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Seek position out of buffer bounds.",
|
||||
));
|
||||
}
|
||||
|
||||
(self.data.len() as i64 - x) as u64
|
||||
}
|
||||
SeekFrom::Current(x) => {
|
||||
let new = self.pos as i64 + x;
|
||||
|
||||
if new < 0 || new > self.data.len() as i64 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Seek position out of buffer bounds.",
|
||||
));
|
||||
}
|
||||
|
||||
new as u64
|
||||
}
|
||||
};
|
||||
|
||||
self.pos = newpos as usize;
|
||||
|
||||
Ok(cur as u64)
|
||||
}
|
||||
}
|
||||
|
||||
impl EncryptionStream {
|
||||
pub fn len(&self) -> usize {
|
||||
self.data.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
|
||||
use super::EncryptionStream;
|
||||
|
||||
fn make_enc_stream() -> EncryptionStream {
|
||||
EncryptionStream {
|
||||
data: vec![0; 128],
|
||||
pos: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encryption_stream_seek() {
|
||||
let mut stream = make_enc_stream();
|
||||
|
||||
{
|
||||
{
|
||||
let pos = stream.seek(SeekFrom::Start(64));
|
||||
assert!(pos.is_ok());
|
||||
assert_eq!(pos.unwrap(), 0);
|
||||
}
|
||||
|
||||
{
|
||||
let pos = stream.seek(SeekFrom::Start(128));
|
||||
assert!(pos.is_ok());
|
||||
assert_eq!(pos.unwrap(), 64);
|
||||
}
|
||||
|
||||
{
|
||||
let pos = stream.seek(SeekFrom::Start(256));
|
||||
assert!(pos.is_err());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn encryption_stream_read() {
|
||||
let mut stream = make_enc_stream();
|
||||
let mut buf: [u8; 8] = [1; 8];
|
||||
|
||||
assert!(stream.read_exact(&mut buf).is_ok());
|
||||
|
||||
assert_eq!(&buf, &[0; 8]);
|
||||
assert_eq!(stream.stream_position().unwrap(), 8);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ pub struct ServerStore {
|
|||
pub fn load(address: String) -> Result<ServerStore, &'static str> {
|
||||
let url = match reqwest::Url::parse(&address) {
|
||||
Ok(x) => x,
|
||||
Err(x) => return Err("Invalid address."),
|
||||
Err(_) => return Err("Invalid address."),
|
||||
};
|
||||
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
|
@ -41,16 +41,16 @@ impl ServerStore {
|
|||
}
|
||||
|
||||
impl Store for ServerStore {
|
||||
fn get_creds(&self, specifier: &String, key: &String) -> Option<StoreRecord> {
|
||||
fn get_creds(&mut self, specifier: &String, key: &String) -> Option<StoreRecord> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn get_creds_by_specifier(&self, specifier: &String) -> Vec<StoreRecord> {
|
||||
fn get_creds_by_specifier(&mut self, specifier: &String) -> Vec<StoreRecord> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn store_creds(
|
||||
&self,
|
||||
&mut self,
|
||||
specifier: &String,
|
||||
key: &String,
|
||||
value: &Vec<u8>,
|
||||
|
|
Loading…
Reference in a new issue