Restructure preferences. Use B+ Trees for indices. Decouple common blocks.

This commit is contained in:
femsci 2022-12-28 11:03:04 +01:00
parent 4ec212dd4b
commit 444e15e3ca
Signed by: femsci
GPG key ID: 08F7911F0E650C67
5 changed files with 358 additions and 139 deletions

View file

@ -1,10 +1,12 @@
use std::{fs::File, io::Read, path::Path}; use std::{fs::File, io::Read, path::Path};
use crate::store::HashType;
pub struct Preferences { pub struct Preferences {
_store_type: String,
_store_path: String, _store_path: String,
_pref_hash_algo: String, _pref_hash_algo: HashType,
_pref_enc_algo: String, _pref_enc_algo: String,
_pref_sign_algo: String,
} }
pub fn try_load_preferences() -> Option<Preferences> { pub fn try_load_preferences() -> Option<Preferences> {

View file

@ -11,7 +11,7 @@ fn main() {
//options //options
} }
try_load_preferences(); let _preferences = try_load_preferences();
let store = match store::local::load( let store = match store::local::load(
"/tmp/store.db".to_string(), "/tmp/store.db".to_string(),
@ -24,5 +24,5 @@ fn main() {
Ok(x) => x, Ok(x) => x,
}; };
store.get_creds_by_key("meow".to_string()); store.get_creds("nya".to_string(), "meow".to_string());
} }

View file

@ -1,7 +1,9 @@
use std::{ use std::{
arch::x86_64::_mm_crc32_u32, arch::x86_64::_mm_crc32_u32,
collections::{hash_map::DefaultHasher, HashMap},
fs::File, fs::File,
io::{BufReader, Read, Write}, hash::{Hash, Hasher},
io::{BufReader, Read, Seek, SeekFrom, Write},
path::Path, path::Path,
}; };
@ -10,12 +12,14 @@ use argon2::PasswordHasher;
use password_hash::SaltString; use password_hash::SaltString;
use rand::{rngs::StdRng, RngCore, SeedableRng}; use rand::{rngs::StdRng, RngCore, SeedableRng};
use super::{Store, StoreRecord}; use super::{EncryptionType, HashType, RecordType, Store, StoreRecord};
pub struct LocalStore { pub struct LocalStore {
path: String, path: String,
header: LocalStoreHeader, header: LocalStoreHeader,
meta: LocalStoreMeta,
enc_ctx: EncryptionContext, enc_ctx: EncryptionContext,
cache: HashMap<String, LocalRecord>,
} }
/* /*
@ -39,13 +43,20 @@ pub struct LocalStore {
--Block 0 --Block 0
Superblock: Superblock:
Hash: Hash:
0x00 - master password hash 0x40 - master password hash
Metadata: Metadata:
0x20 - record count 0x60 - record count
0x24 - specifier count 0x64 - specifier count
0x08 - 0x68 - padding with 0
Auth tag: 0x80
--Block 1
Index:
Root node:
0x90 - root node or invalid node if empty db
*/ */
//Proposal: Initialize IV to 0?
const MAGIC_NUM: u32 = 0x6d656f77; const MAGIC_NUM: u32 = 0x6d656f77;
const HEADER_SIZE: usize = 32; const HEADER_SIZE: usize = 32;
@ -78,14 +89,15 @@ impl LocalStoreHeader {
fn is_checksum_valid(&self) -> bool { fn is_checksum_valid(&self) -> bool {
let chk = self.get_checksum(); let chk = self.get_checksum();
/*println!(
"Checksum: {:x}, Expected: {:x}, Size:: {}",
self.chksum_crc, chk, HEADER_SIZE
);*/
chk == self.chksum_crc chk == self.chksum_crc
} }
} }
struct LocalStoreMeta {
record_count: u32,
specifier_count: u32,
}
struct EncryptionContext { struct EncryptionContext {
enc_type: EncryptionType, enc_type: EncryptionType,
hash_type: HashType, hash_type: HashType,
@ -94,44 +106,6 @@ struct EncryptionContext {
iv_root: Vec<u8>, iv_root: Vec<u8>,
} }
#[derive(Clone, Copy, PartialEq, PartialOrd)]
enum EncryptionType {
Invalid = 0,
AesGcm = 1,
Chacha20Poly1305 = 2,
}
impl EncryptionType {
fn from(n: u8) -> EncryptionType {
match n {
0 => EncryptionType::Invalid,
1 => EncryptionType::AesGcm,
2 => EncryptionType::Chacha20Poly1305,
_ => panic!("Invalid value '{}'.", n),
}
}
}
#[derive(Clone, Copy, PartialEq, PartialOrd)]
enum HashType {
Invalid = 0,
Argon2 = 1,
Bcrypt = 2,
Pbkdf2 = 4,
}
impl HashType {
fn from(n: u8) -> HashType {
match n {
0 => HashType::Invalid,
1 => HashType::Argon2,
2 => HashType::Bcrypt,
4 => HashType::Pbkdf2,
_ => panic!("Invalid value '{}'.", n),
}
}
}
fn try_deserialize_header(file: &mut BufReader<File>) -> Result<LocalStoreHeader, &'static str> { fn try_deserialize_header(file: &mut BufReader<File>) -> Result<LocalStoreHeader, &'static str> {
let mut header: LocalStoreHeader = LocalStoreHeader { let mut header: LocalStoreHeader = LocalStoreHeader {
magic_h: 0, magic_h: 0,
@ -176,16 +150,18 @@ fn try_deserialize_superblock(
file: &mut BufReader<File>, file: &mut BufReader<File>,
enc: &EncryptionContext, enc: &EncryptionContext,
header: &LocalStoreHeader, header: &LocalStoreHeader,
) -> Result<String, &'static str> { ) -> Result<LocalStoreMeta, &'static str> {
let mut buf: Vec<u8> = vec![0; header.enc_sup_block_size as usize]; let data = enc
.decrypt_block_from_file(header.enc_sup_block_size as usize, 0, file)
.unwrap();
if file.read_exact(&mut buf).is_err() { if data[0..enc.key.len()].ne(&enc.key) {
return Err("Read error."); return Err("Key mismatch.");
} }
let dat = enc.decrypt_block(&buf).unwrap(); let meta = deserialize_store_meta(&data[31..]);
println!("Superblocc: {:?}", &dat);
Ok(String::from_utf8_lossy(&dat).try_into().unwrap()) Ok(meta)
} }
fn serialize_header(header: &LocalStoreHeader) -> Vec<u8> { fn serialize_header(header: &LocalStoreHeader) -> Vec<u8> {
@ -208,8 +184,8 @@ fn serialize_header(header: &LocalStoreHeader) -> Vec<u8> {
); );
buf.append(u32::to_be_bytes(header.chksum_crc).to_vec().as_mut()); buf.append(u32::to_be_bytes(header.chksum_crc).to_vec().as_mut());
let padlen = 32 - buf.len(); let padding_len = 32 - buf.len();
let mut pad: Vec<u8> = vec![0; padlen]; let mut pad: Vec<u8> = vec![0; padding_len];
buf.append(&mut pad); buf.append(&mut pad);
return buf; return buf;
@ -240,21 +216,28 @@ fn try_create_store(path: &String, passphrase: &Vec<u8>) -> bool {
iv_root: iv.to_vec(), iv_root: iv.to_vec(),
}; };
let mut enc_buf: Vec<u8> = Vec::new(); let mut superblock_buf: Vec<u8> = Vec::new();
enc_buf.extend_from_slice(&enc.key); superblock_buf.extend_from_slice(&enc.key);
if enc_buf.len() != 32 { if superblock_buf.len() != 32 {
enc_buf.append([b'\0'].repeat(32 - enc_buf.len()).as_mut()); superblock_buf.append([b'\0'].repeat(32 - superblock_buf.len()).as_mut());
} }
let enc_buf = enc.encrypt_block(&enc_buf).unwrap(); let meta = LocalStoreMeta {
record_count: 0,
specifier_count: 0,
};
superblock_buf.append(&mut serialize_store_meta(&meta));
let enc_buf = enc.encrypt_block(&superblock_buf, 0).unwrap();
let mut header: LocalStoreHeader = LocalStoreHeader { let mut header: LocalStoreHeader = LocalStoreHeader {
magic_h: MAGIC_NUM, magic_h: MAGIC_NUM,
last_access: timestamp, last_access: timestamp,
last_write: timestamp, last_write: timestamp,
enc_type: enc.enc_type, enc_type: enc.enc_type,
enc_sup_block_size: enc_buf.len() as u16, enc_sup_block_size: (enc_buf.len() - 16) as u16,
enc_key_size: 256, enc_key_size: 256,
hash_type: enc.hash_type, hash_type: enc.hash_type,
encrypted_block_offset: 32, encrypted_block_offset: 32,
@ -319,7 +302,25 @@ fn derive_key(
Ok(hash) Ok(hash)
} }
fn get_specifier_map(file: &mut BufReader<File>, header: &LocalStoreHeader) {} fn deserialize_store_meta(data: &[u8]) -> LocalStoreMeta {
let records = u32::from_be_bytes(data[0..4].try_into().unwrap());
let specifiers = u32::from_be_bytes(data[4..8].try_into().unwrap());
LocalStoreMeta {
record_count: records,
specifier_count: specifiers,
}
}
fn serialize_store_meta(meta: &LocalStoreMeta) -> Vec<u8> {
let mut buf: Vec<u8> = Vec::with_capacity(32);
buf.append(u32::to_be_bytes(meta.record_count).to_vec().as_mut());
buf.append(u32::to_be_bytes(meta.specifier_count).to_vec().as_mut());
buf.append([b'\0'].repeat(32 - buf.len()).as_mut());
buf
}
pub fn load(path: String, passphrase: Vec<u8>) -> Result<LocalStore, &'static str> { pub fn load(path: String, passphrase: Vec<u8>) -> Result<LocalStore, &'static str> {
let path_p = Path::new(&path); let path_p = Path::new(&path);
@ -362,112 +363,192 @@ pub fn load(path: String, passphrase: Vec<u8>) -> Result<LocalStore, &'static st
iv_root: iv, iv_root: iv,
}; };
let sup_data = match try_deserialize_superblock(&mut reader, &enc_ctx, &header) { let meta = match try_deserialize_superblock(&mut reader, &enc_ctx, &header) {
Ok(d) => d, Ok(d) => d,
Err(x) => return Err(x), Err(x) => return Err(x),
}; };
println!("Data: {}", &sup_data);
let spec_map = get_specifier_map(&mut reader, &header);
let store = LocalStore { let store = LocalStore {
path: path, path: path,
header, header,
enc_ctx, enc_ctx,
meta,
cache: HashMap::new(),
}; };
return Ok(store); return Ok(store);
} }
struct CredentialRecord { struct LocalRecord {
id: u64, record: StoreRecord,
key: String, position: usize,
value: String, size: usize,
specifier: Option<String>,
meta: Option<String>,
} }
const CRED_FLAG_SPECIFIER: u8 = 0b0000010;
const CRED_FLAG_METADATA: u8 = 0b0000100; const CRED_FLAG_METADATA: u8 = 0b0000100;
impl CredentialRecord { impl LocalRecord {
fn serialize(&self) -> Vec<u8> { fn serialize(&self) -> Vec<u8> {
let mut bytes = Vec::<u8>::new(); let mut buf = Vec::<u8>::new();
buf.push(self.record.r#type as u8);
let mut flags: u8 = 0; let mut flags: u8 = 0;
if self.specifier.is_some() { if self.record.meta.is_some() {
flags |= CRED_FLAG_SPECIFIER;
//put specifier here for better indexing
//use hashtable in superblock
}
if self.meta.is_some() {
flags |= CRED_FLAG_METADATA; flags |= CRED_FLAG_METADATA;
} }
bytes.push(flags); buf.push(flags);
bytes.append(u64::to_be_bytes(self.id).to_vec().as_mut()); let spec_bytes = self.record.specifier.bytes();
let key_bytes = self.record.key.bytes();
let value_bytes = &self.record.value;
let meta_len = if self.record.meta.is_none() {
0
} else {
self.record.meta.as_ref().unwrap().len()
};
let key_bytes = self.key.bytes(); buf.append(u32::to_be_bytes(spec_bytes.len() as u32).to_vec().as_mut());
let value_bytes = self.value.bytes(); buf.append(u32::to_be_bytes(key_bytes.len() as u32).to_vec().as_mut());
buf.append(u32::to_be_bytes(value_bytes.len() as u32).to_vec().as_mut());
buf.append(u32::to_be_bytes(meta_len as u32).to_vec().as_mut());
bytes.append(u32::to_be_bytes(key_bytes.len() as u32).to_vec().as_mut()); buf.extend(spec_bytes);
bytes.append(u32::to_be_bytes(value_bytes.len() as u32).to_vec().as_mut()); buf.extend(key_bytes);
buf.extend(value_bytes);
bytes.extend(key_bytes); if meta_len > 0 {
bytes.extend(value_bytes); buf.extend(self.record.meta.as_ref().unwrap().bytes());
}
bytes buf
} }
fn from_block(bytes: &[u8]) { fn from_block(bytes: &[u8]) -> Result<LocalRecord, &'static str> {
let flags = bytes[0]; let r#type = bytes[0];
let flags = bytes[1];
let spec_len = u32::from_be_bytes(bytes[2..6].try_into().unwrap()) as usize;
let key_len = u32::from_be_bytes(bytes[6..10].try_into().unwrap()) as usize;
let val_len = u32::from_be_bytes(bytes[10..14].try_into().unwrap()) as usize;
let meta_len = u32::from_be_bytes(bytes[14..18].try_into().unwrap()) as usize;
let id = u64::from_be_bytes(bytes[1..5].try_into().unwrap()); if key_len < 1 {
todo!() return Err("Invalid structure: key length is 0.");
}
if spec_len < 1 {
return Err("Invalid structure: specifier length is 0.");
}
if val_len < 1 {
return Err("Invalid structure: value length is 0.");
}
let key_offset: usize = 18 + spec_len;
let val_offset: usize = key_offset + key_len;
let specifier = std::str::from_utf8(&bytes[18..(18 + spec_len)])
.expect("Cannot deserialize the specifier.");
let key = std::str::from_utf8(&bytes[key_offset..key_offset + key_len])
.expect("Cannot deserialize the key.");
let value = &bytes[val_offset..val_offset + val_len];
let meta = if meta_len == 0 {
None
} else {
let meta_offset = val_offset + val_len;
Some(
std::str::from_utf8(&bytes[meta_offset..meta_offset + meta_len])
.expect("Cannot deserialize metadata.")
.to_owned(),
)
};
let record = LocalRecord {
record: StoreRecord {
specifier: specifier.to_owned(),
key: key.to_owned(),
r#type: RecordType::from(r#type),
value: value.to_vec(),
meta: meta,
},
position: 0,
size: val_offset + val_len + meta_len,
};
Ok(record)
} }
fn encrypt(&self, typ: EncryptionType) {}
} }
impl Store for LocalStore { impl Store for LocalStore {
fn get_creds(&self, id: u64) -> Option<StoreRecord> { fn get_creds(&self, specifier: String, key: String) -> Option<StoreRecord> {
self.btree_find_key(&key);
todo!() todo!()
} }
fn get_creds_by_key(&self, key: String) -> Vec<StoreRecord> { fn get_creds_by_specifier(&self, specifier: String) -> Vec<StoreRecord> {
todo!() todo!()
} }
fn store_creds( fn store_creds(
&self, &self,
specifier: String,
key: String, key: String,
value: String, value: Vec<u8>,
specifier: Option<String>, r#type: RecordType,
meta: Option<String>, meta: Option<String>,
) { ) {
todo!() todo!()
} }
} }
impl LocalStore {
fn btree_find_key(&self, key: &String) {
let mut file: BufReader<File> =
BufReader::new(File::open(&self.path).expect("Cannot open file"));
let hash = {
let mut hasher = DefaultHasher::new();
key.hash(&mut hasher);
hasher.finish()
};
let pos = file.seek(SeekFrom::Start(0x90)).unwrap();
println!("Pos: {:x}", pos);
}
}
fn nonce_offset(nonce_root: &Vec<u8>, offset: usize) -> Vec<u8> {
let len = nonce_root.len();
if len < (usize::BITS / 8) as usize {
panic!("Nonce length less than {}", usize::BITS / 8);
}
let mut nonce_copy = vec![0; len];
nonce_copy.clone_from_slice(&nonce_root);
for i in 0..(usize::BITS / 8) {
println!(
"{}, {:x}",
len - (i as usize + 1),
(offset & (0xff << (8 * i))) >> (8 * i)
);
nonce_copy[len - (i as usize + 1)] += ((offset & (0xff << (8 * i))) >> (8 * i)) as u8;
}
nonce_copy
}
impl EncryptionContext { impl EncryptionContext {
fn decrypt_block(&self, ciphertext: &Vec<u8>) -> Result<Vec<u8>, &'static str> { fn decrypt_block(&self, ciphertext: &Vec<u8>, offset: usize) -> Result<Vec<u8>, &'static str> {
let plaintext = match self.enc_type { let plaintext = match self.enc_type {
EncryptionType::Invalid => unreachable!(), EncryptionType::Invalid => unreachable!(),
EncryptionType::AesGcm => { EncryptionType::AesGcm => {
println!(
"Aes dec: len: {} key: {:?}: iv: {:?}",
ciphertext.len(),
&self.key,
&self.iv_root
);
let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key); let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key);
let aes = Aes256Gcm::new(aes_key); let aes = Aes256Gcm::new(aes_key);
let nonce = Nonce::from_slice(&self.iv_root); let nonce = nonce_offset(&self.iv_root, offset);
let nonce = Nonce::from_slice(&nonce);
aes.decrypt(nonce, ciphertext.as_ref()) aes.decrypt(nonce, ciphertext.as_ref())
} }
@ -481,21 +562,15 @@ impl EncryptionContext {
Ok(plaintext.unwrap()) Ok(plaintext.unwrap())
} }
fn encrypt_block(&self, plaintext: &Vec<u8>) -> Result<Vec<u8>, &'static str> { fn encrypt_block(&self, plaintext: &Vec<u8>, offset: usize) -> Result<Vec<u8>, &'static str> {
let ciphertext = match self.enc_type { let ciphertext = match self.enc_type {
EncryptionType::Invalid => unreachable!(), EncryptionType::Invalid => unreachable!(),
EncryptionType::AesGcm => { EncryptionType::AesGcm => {
println!(
"Aes enc: len: {} key: {:?}: iv: {:?}",
plaintext.len(),
&self.key,
&self.iv_root
);
let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key); let aes_key = aes_gcm::aead::generic_array::GenericArray::from_slice(&self.key);
let aes = Aes256Gcm::new(aes_key); let aes = Aes256Gcm::new(aes_key);
let nonce = Nonce::from_slice(&self.iv_root); let nonce = nonce_offset(&self.iv_root, offset);
let nonce = Nonce::from_slice(&nonce);
aes.encrypt(nonce, plaintext.as_ref()) aes.encrypt(nonce, plaintext.as_ref())
} }
@ -508,13 +583,32 @@ impl EncryptionContext {
Ok(ciphertext.unwrap()) Ok(ciphertext.unwrap())
} }
fn decrypt_block_from_file(
&self,
len: usize,
offset: usize,
file: &mut BufReader<File>,
) -> Result<Vec<u8>, &'static str> {
let mut buf: Vec<u8> = vec![0; len + 16];
if file.read_exact(&mut buf).is_err() {
return Err("Read error.");
}
self.decrypt_block(&buf, offset)
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::mem::size_of; use std::mem::size_of;
use crate::store::local::{LocalStoreHeader, HEADER_SIZE}; use crate::store::{
local::{nonce_offset, LocalStoreHeader, HEADER_SIZE},
StoreRecord,
};
use super::LocalRecord;
#[test] #[test]
fn header_size_const_greater_than_realsize() { fn header_size_const_greater_than_realsize() {
@ -523,4 +617,48 @@ mod tests {
assert!(c > r, "const = {}, real = {}", c, r); assert!(c > r, "const = {}, real = {}", c, r);
} }
#[test]
fn nonce_offset_increment() {
let nonce: Vec<u8> = vec![0; 12];
let nonce_off = nonce_offset(&nonce, 0xfeeddeadbeef);
assert!(
nonce_off[11] == 0xef
&& nonce_off[10] == 0xbe
&& nonce_off[9] == 0xad
&& nonce_off[8] == 0xde
&& nonce_off[7] == 0xed
&& nonce_off[6] == 0xfe,
"\nn1 = {:x?}\nn2 = {:x?}",
nonce,
nonce_off
);
}
#[test]
fn localstore_record_serialize() {
let rec = LocalRecord {
position: 0,
size: 0,
record: StoreRecord {
key: "meow".to_owned(),
meta: None,
specifier: "test".to_owned(),
r#type: crate::store::RecordType::LOGIN_PASSWORD,
value: vec![1, 2, 3, 4],
},
};
let deser = rec.serialize();
println!("D: {:x?}", deser);
let reser = LocalRecord::from_block(&deser).unwrap();
let redes = reser.serialize();
assert_eq!(deser, redes);
}
} }

View file

@ -2,21 +2,86 @@ pub(crate) mod local;
pub(crate) mod server; pub(crate) mod server;
pub struct StoreRecord { pub struct StoreRecord {
id: u64, specifier: String,
key: String, key: String,
value: String, r#type: RecordType,
specifier: Option<String>, value: Vec<u8>,
meta: Option<String>, meta: Option<String>,
} }
#[derive(Clone, Copy, PartialEq, PartialOrd)]
pub enum RecordType {
LOGIN_PASSWORD = 0,
PASSWORD = 1,
SSH_KEY = 2,
PGP_KEY = 3,
SYM_KEY = 4,
ASYM_KEY_PAIR = 5,
ASYM_PRIV_KEY = 6,
}
impl RecordType {
pub fn from(n: u8) -> Self {
match n {
0 => RecordType::LOGIN_PASSWORD,
1 => RecordType::PASSWORD,
2 => RecordType::SSH_KEY,
3 => RecordType::PGP_KEY,
4 => RecordType::SYM_KEY,
5 => RecordType::ASYM_KEY_PAIR,
6 => RecordType::ASYM_PRIV_KEY,
_ => panic!("Invalid value '{}'.", n),
}
}
}
pub trait Store { pub trait Store {
fn get_creds(&self, id: u64) -> Option<StoreRecord>; fn get_creds(&self, specifier: String, key: String) -> Option<StoreRecord>;
fn get_creds_by_key(&self, key: String) -> Vec<StoreRecord>; fn get_creds_by_specifier(&self, specifier: String) -> Vec<StoreRecord>;
fn store_creds( fn store_creds(
&self, &self,
specifier: String,
key: String, key: String,
value: String, value: Vec<u8>,
specifier: Option<String>, r#type: RecordType,
meta: Option<String>, meta: Option<String>,
); );
} }
#[derive(Clone, Copy, PartialEq, PartialOrd)]
pub enum EncryptionType {
Invalid = 0,
AesGcm = 1,
Chacha20Poly1305 = 2,
}
impl EncryptionType {
pub fn from(n: u8) -> Self {
match n {
0 => EncryptionType::Invalid,
1 => EncryptionType::AesGcm,
2 => EncryptionType::Chacha20Poly1305,
_ => panic!("Invalid value '{}'.", n),
}
}
}
#[derive(Clone, Copy, PartialEq, PartialOrd)]
pub enum HashType {
Invalid = 0,
Argon2 = 1,
Bcrypt = 2,
Pbkdf2 = 4,
}
impl HashType {
pub fn from(n: u8) -> Self {
match n {
0 => HashType::Invalid,
1 => HashType::Argon2,
2 => HashType::Bcrypt,
4 => HashType::Pbkdf2,
_ => panic!("Invalid value '{}'.", n),
}
}
}

View file

@ -1,6 +1,6 @@
use reqwest::header::HeaderValue; use reqwest::header::HeaderValue;
use super::{Store, StoreRecord}; use super::{Store, StoreRecord, RecordType};
pub struct ServerStore { pub struct ServerStore {
address: String, address: String,
@ -27,20 +27,34 @@ pub fn load(address: String) -> Result<ServerStore, &'static str> {
return Ok(ServerStore { address: address }); return Ok(ServerStore { address: address });
} }
impl ServerStore {
fn get_endpoint(&self, path: String) -> String {
let base = self.address.trim();
let base = if base.ends_with('/') {
&base[..base.len() - 2]
} else {
base
};
format!("{}{}", base, path)
}
}
impl Store for ServerStore { impl Store for ServerStore {
fn get_creds(&self, id: u64) -> Option<StoreRecord> { fn get_creds(&self, specifier: String, key: String) -> Option<StoreRecord> {
todo!() todo!()
} }
fn get_creds_by_key(&self, key: String) -> Vec<StoreRecord> { fn get_creds_by_specifier(&self, specifier: String) -> Vec<StoreRecord> {
todo!() todo!()
} }
fn store_creds( fn store_creds(
&self, &self,
specifier: String,
key: String, key: String,
value: String, value: Vec<u8>,
specifier: Option<String>, r#type: RecordType,
meta: Option<String>, meta: Option<String>,
) { ) {
todo!() todo!()