From 749ced044f2975c8329b8f88fdfd319a776a46c8 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 08:18:52 +1000 Subject: [PATCH 01/15] slowly removing vdrtools features Signed-off-by: gmulhearn --- aries/agents/aath-backchannel/src/main.rs | 4 +- aries/agents/aath-backchannel/src/setup.rs | 6 +- aries/agents/aries-vcx-agent/Cargo.toml | 2 +- .../agents/aries-vcx-agent/src/agent/init.rs | 26 ++- aries/agents/aries-vcx-agent/src/agent/mod.rs | 2 +- aries/agents/mediator/Cargo.toml | 2 +- aries/agents/mediator/client-tui/src/main.rs | 6 +- aries/agents/mediator/src/aries_agent/mod.rs | 25 +-- aries/agents/mediator/src/bin/mediator.rs | 9 +- .../tests/common/agent_and_transport_utils.rs | 4 +- .../tests/mediator-aries-connection.rs | 4 +- .../tests/mediator-protocol-pickup.rs | 4 +- aries/aries_vcx/tests/test_mysql_wallet.rs | 49 ----- aries/aries_vcx_wallet/Cargo.toml | 2 +- .../src/wallet/base_wallet/mod.rs | 82 -------- aries/aries_vcx_wallet/src/wallet/mod.rs | 4 +- aries/misc/test_utils/src/devsetup.rs | 12 +- .../test_utils/src/devsetup/askar_wallet.rs | 35 ++++ .../src/devsetup/vdrtools_wallet.rs | 30 --- .../wrappers/uniffi-aries-vcx/core/Cargo.toml | 5 - aries/wrappers/uniffi-aries-vcx/core/build.rs | 7 - .../core/src/core/profile/indy.rs | 61 ------ .../core/src/core/profile/mod.rs | 16 -- .../wrappers/uniffi-aries-vcx/core/src/lib.rs | 7 - .../uniffi-aries-vcx/core/src/vcx_indy.udl | 186 ------------------ 25 files changed, 87 insertions(+), 503 deletions(-) delete mode 100644 aries/aries_vcx/tests/test_mysql_wallet.rs create mode 100644 aries/misc/test_utils/src/devsetup/askar_wallet.rs delete mode 100644 aries/misc/test_utils/src/devsetup/vdrtools_wallet.rs delete mode 100644 aries/wrappers/uniffi-aries-vcx/core/src/core/profile/indy.rs delete mode 100644 aries/wrappers/uniffi-aries-vcx/core/src/vcx_indy.udl diff --git a/aries/agents/aath-backchannel/src/main.rs b/aries/agents/aath-backchannel/src/main.rs index d0fed19de4..a73fdfbf71 100644 --- a/aries/agents/aath-backchannel/src/main.rs +++ b/aries/agents/aath-backchannel/src/main.rs @@ -22,7 +22,7 @@ use std::{ use actix_web::{middleware, web, App, HttpServer}; use aries_vcx_agent::{ - aries_vcx::{aries_vcx_wallet::wallet::indy::IndySdkWallet, messages::AriesMessage}, + aries_vcx::{aries_vcx_wallet::wallet::askar::AskarWallet, messages::AriesMessage}, Agent as AriesAgent, }; use clap::Parser; @@ -69,7 +69,7 @@ enum Status { } pub struct HarnessAgent { - aries_agent: AriesAgent, + aries_agent: AriesAgent, status: Status, // did-exchange specific // todo: extra didx specific AATH service diff --git a/aries/agents/aath-backchannel/src/setup.rs b/aries/agents/aath-backchannel/src/setup.rs index 58e706f7f2..09be8fccfe 100644 --- a/aries/agents/aath-backchannel/src/setup.rs +++ b/aries/agents/aath-backchannel/src/setup.rs @@ -1,7 +1,7 @@ use std::{io::prelude::*, sync::Arc}; use aries_vcx_agent::{ - aries_vcx::aries_vcx_wallet::wallet::indy::IndySdkWallet, build_indy_wallet, + aries_vcx::aries_vcx_wallet::wallet::askar::AskarWallet, build_askar_wallet, Agent as AriesAgent, WalletInitConfig, }; use rand::{thread_rng, Rng}; @@ -100,7 +100,7 @@ async fn download_genesis_file() -> std::result::Result { } } -pub async fn initialize(port: u32) -> AriesAgent { +pub async fn initialize(port: u32) -> AriesAgent { let register_nym_res = get_writer_seed().await; let genesis_path = download_genesis_file() .await @@ -113,7 +113,7 @@ pub async fn initialize(port: u32) -> AriesAgent { wallet_key: "8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY".to_string(), wallet_kdf: "RAW".to_string(), }; - let (wallet, issuer_config) = build_indy_wallet(wallet_config, register_nym_res.seed).await; + let (wallet, issuer_config) = build_askar_wallet(wallet_config, register_nym_res.seed).await; let wallet = Arc::new(wallet); let issuer_did = AriesAgent::setup_ledger( diff --git a/aries/agents/aries-vcx-agent/Cargo.toml b/aries/agents/aries-vcx-agent/Cargo.toml index 0d205caa56..ffa46fd673 100644 --- a/aries/agents/aries-vcx-agent/Cargo.toml +++ b/aries/agents/aries-vcx-agent/Cargo.toml @@ -9,7 +9,7 @@ edition.workspace = true serde = "1.0.145" aries_vcx = { path = "../../aries_vcx" } aries_vcx_wallet = { path = "../../aries_vcx_wallet", features = [ - "vdrtools_wallet", + "askar_wallet", ] } aries_vcx_anoncreds = { path = "../../aries_vcx_anoncreds", features = ["credx"] } aries_vcx_ledger = { path = "../../aries_vcx_ledger"} diff --git a/aries/agents/aries-vcx-agent/src/agent/init.rs b/aries/agents/aries-vcx-agent/src/agent/init.rs index c8ce6cdf47..ec6589881a 100644 --- a/aries/agents/aries-vcx-agent/src/agent/init.rs +++ b/aries/agents/aries-vcx-agent/src/agent/init.rs @@ -18,8 +18,8 @@ use aries_vcx_ledger::ledger::indy_vdr_ledger::{ build_ledger_components, DefaultIndyLedgerRead, VcxPoolConfig, }; use aries_vcx_wallet::wallet::{ + askar::{askar_wallet_config::AskarWalletConfig, key_method::KeyMethod, AskarWallet}, base_wallet::{issuer_config::IssuerConfig, BaseWallet, ManageWallet}, - indy::{indy_wallet_config::IndyWalletConfig, IndySdkWallet}, }; use did_peer::resolver::PeerDidResolver; use did_resolver_registry::ResolverRegistry; @@ -27,6 +27,7 @@ use did_resolver_sov::resolution::DidSovResolver; use display_as_json::Display; use serde::Serialize; use url::Url; +use uuid::Uuid; use crate::{ agent::agent_struct::Agent, @@ -47,20 +48,17 @@ pub struct WalletInitConfig { pub wallet_kdf: String, } -pub async fn build_indy_wallet( - wallet_config: WalletInitConfig, +pub async fn build_askar_wallet( + _wallet_config: WalletInitConfig, issuer_seed: String, -) -> (IndySdkWallet, IssuerConfig) { - let config_wallet = IndyWalletConfig { - wallet_name: wallet_config.wallet_name, - wallet_key: wallet_config.wallet_key, - wallet_key_derivation: wallet_config.wallet_kdf, - wallet_type: None, - storage_config: None, - storage_credentials: None, - rekey: None, - rekey_derivation_method: None, - }; +) -> (AskarWallet, IssuerConfig) { + // TODO - actually impl this + let config_wallet = AskarWalletConfig::new( + "sqlite://:memory:", + KeyMethod::Unprotected, + "", + &Uuid::new_v4().to_string(), + ); let wallet = config_wallet.create_wallet().await.unwrap(); let config_issuer = wallet.configure_issuer(&issuer_seed).await.unwrap(); diff --git a/aries/agents/aries-vcx-agent/src/agent/mod.rs b/aries/agents/aries-vcx-agent/src/agent/mod.rs index 8b12c5b30e..405960345b 100644 --- a/aries/agents/aries-vcx-agent/src/agent/mod.rs +++ b/aries/agents/aries-vcx-agent/src/agent/mod.rs @@ -2,4 +2,4 @@ mod agent_struct; mod init; pub use agent_struct::Agent; -pub use init::{build_indy_wallet, WalletInitConfig}; +pub use init::{build_askar_wallet, WalletInitConfig}; diff --git a/aries/agents/mediator/Cargo.toml b/aries/agents/mediator/Cargo.toml index 3440ddffa4..92aa550752 100644 --- a/aries/agents/mediator/Cargo.toml +++ b/aries/agents/mediator/Cargo.toml @@ -13,7 +13,7 @@ client = [] anyhow = "1.0.75" aries_vcx = { path = "../../aries_vcx" } aries_vcx_wallet = { path = "../../aries_vcx_wallet", features = [ - "vdrtools_wallet", + "askar_wallet", ] } async-trait = "0.1.73" axum = "0.7.5" diff --git a/aries/agents/mediator/client-tui/src/main.rs b/aries/agents/mediator/client-tui/src/main.rs index 4af51eaac1..c9dbb8bc80 100644 --- a/aries/agents/mediator/client-tui/src/main.rs +++ b/aries/agents/mediator/client-tui/src/main.rs @@ -1,4 +1,4 @@ -use aries_vcx_wallet::wallet::indy::IndySdkWallet; +use aries_vcx_wallet::wallet::askar::AskarWallet; mod tui; @@ -13,9 +13,7 @@ async fn main() { load_dot_env(); setup_logging(); log::info!("TUI initializing!"); - let agent = AgentBuilder::::new_demo_agent() - .await - .unwrap(); + let agent = AgentBuilder::::new_demo_agent().await.unwrap(); tui::init_tui(agent).await; } diff --git a/aries/agents/mediator/src/aries_agent/mod.rs b/aries/agents/mediator/src/aries_agent/mod.rs index 394a5ebfc7..a7f6fad089 100644 --- a/aries/agents/mediator/src/aries_agent/mod.rs +++ b/aries/agents/mediator/src/aries_agent/mod.rs @@ -8,9 +8,7 @@ use aries_vcx::{ use aries_vcx_wallet::{ errors::error::VcxWalletError, wallet::{ - base_wallet::{BaseWallet, ManageWallet}, - indy::indy_wallet_config::IndyWalletConfig, - structs_io::UnpackMessageOutput, + askar::{askar_wallet_config::AskarWalletConfig, key_method::KeyMethod}, base_wallet::{BaseWallet, ManageWallet}, structs_io::UnpackMessageOutput }, }; use diddoc_legacy::aries::{diddoc::AriesDidDoc, service::AriesService}; @@ -22,6 +20,7 @@ use messages::{ AriesMessage, }; use serde_json::json; +use uuid::Uuid; use crate::{ persistence::{get_persistence, AccountDetails, MediatorPersistence}, @@ -61,16 +60,12 @@ impl AgentBuilder { } pub async fn new_demo_agent() -> Result, VcxWalletError> { - let config = IndyWalletConfig { - wallet_name: uuid::Uuid::new_v4().to_string(), - wallet_key: "8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY".into(), - wallet_key_derivation: "RAW".into(), - wallet_type: None, - storage_config: None, - storage_credentials: None, - rekey: None, - rekey_derivation_method: None, - }; + let config = AskarWalletConfig::new( + "sqlite://:memory:", + KeyMethod::Unprotected, + "", + &Uuid::new_v4().to_string(), + ); Self::new_from_wallet_config(config).await } } @@ -239,7 +234,7 @@ mod test { protocols::oob::oob_invitation_to_legacy_did_doc, utils::encryption_envelope::EncryptionEnvelope, }; - use aries_vcx_wallet::wallet::indy::IndySdkWallet; + use aries_vcx_wallet::wallet::askar::AskarWallet; use log::info; use serde_json::Value; use test_utils::mockdata::mock_ledger::MockLedger; @@ -250,7 +245,7 @@ mod test { pub async fn test_pack_unpack() { let message: Value = serde_json::from_str("{}").unwrap(); let message_bytes = serde_json::to_vec(&message).unwrap(); - let mut agent = AgentBuilder::::new_demo_agent() + let mut agent = AgentBuilder::::new_demo_agent() .await .unwrap(); agent diff --git a/aries/agents/mediator/src/bin/mediator.rs b/aries/agents/mediator/src/bin/mediator.rs index f0116f0251..885cc39f4a 100644 --- a/aries/agents/mediator/src/bin/mediator.rs +++ b/aries/agents/mediator/src/bin/mediator.rs @@ -1,4 +1,4 @@ -use aries_vcx_wallet::wallet::indy::{indy_wallet_config::IndyWalletConfig, IndySdkWallet}; +use aries_vcx_wallet::wallet::askar::{askar_wallet_config::AskarWalletConfig, AskarWallet}; use log::info; use mediator::aries_agent::AgentBuilder; @@ -9,7 +9,7 @@ async fn main() { info!("Starting up mediator! ⚙️⚙️"); let endpoint_root = std::env::var("ENDPOINT_ROOT").unwrap_or("127.0.0.1:8005".into()); info!("Mediator endpoint root address: {}", endpoint_root); - let indy_wallet_config_json = std::env::var("INDY_WALLET_CONFIG").unwrap_or( + let wallet_config_json = std::env::var("INDY_WALLET_CONFIG").unwrap_or( "{ \"wallet_name\": \"demo-wallet\", \"wallet_key\" : \"8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY\", @@ -17,9 +17,10 @@ async fn main() { }" .to_string(), ); - let wallet_config: IndyWalletConfig = serde_json::from_str(&indy_wallet_config_json).unwrap(); + // TODO - actually impl this + let wallet_config: AskarWalletConfig = serde_json::from_str(&wallet_config_json).unwrap(); info!("Wallet Config: {:?}", wallet_config); - let mut agent = AgentBuilder::::new_from_wallet_config(wallet_config) + let mut agent = AgentBuilder::::new_from_wallet_config(wallet_config) .await .unwrap(); agent diff --git a/aries/agents/mediator/tests/common/agent_and_transport_utils.rs b/aries/agents/mediator/tests/common/agent_and_transport_utils.rs index 1e0b84e569..5320d97076 100644 --- a/aries/agents/mediator/tests/common/agent_and_transport_utils.rs +++ b/aries/agents/mediator/tests/common/agent_and_transport_utils.rs @@ -5,7 +5,7 @@ use aries_vcx::{ }, utils::encryption_envelope::EncryptionEnvelope, }; -use aries_vcx_wallet::wallet::{base_wallet::BaseWallet, indy::IndySdkWallet}; +use aries_vcx_wallet::wallet::{askar::AskarWallet, base_wallet::BaseWallet}; use diddoc_legacy::aries::diddoc::AriesDidDoc; use mediator::{ aries_agent::{client::transports::AriesTransport, Agent}, @@ -59,7 +59,7 @@ pub async fn gen_mediator_connected_agent() -> Result<( VerKey, AriesDidDoc, )> { - let agent = mediator::aries_agent::AgentBuilder::::new_demo_agent().await?; + let agent = mediator::aries_agent::AgentBuilder::::new_demo_agent().await?; let mut aries_transport = reqwest::Client::new(); let completed_connection = didcomm_connection(&agent, &mut aries_transport).await?; let our_verkey: VerKey = completed_connection.pairwise_info().pw_vk.clone(); diff --git a/aries/agents/mediator/tests/mediator-aries-connection.rs b/aries/agents/mediator/tests/mediator-aries-connection.rs index 1f5ce29f05..db08bdceb7 100644 --- a/aries/agents/mediator/tests/mediator-aries-connection.rs +++ b/aries/agents/mediator/tests/mediator-aries-connection.rs @@ -1,6 +1,6 @@ mod common; -use aries_vcx_wallet::wallet::indy::IndySdkWallet; +use aries_vcx_wallet::wallet::askar::AskarWallet; use messages::msg_fields::protocols::out_of_band::invitation::Invitation as OOBInvitation; use crate::common::{prelude::*, test_setup::setup_env_logging}; @@ -27,7 +27,7 @@ async fn didcomm_connection_succeeds() -> Result<()> { "Got invitation {}", serde_json::to_string_pretty(&oobi.clone()).unwrap() ); - let agent = mediator::aries_agent::AgentBuilder::::new_demo_agent().await?; + let agent = mediator::aries_agent::AgentBuilder::::new_demo_agent().await?; let mut aries_transport = reqwest::Client::new(); let _state = agent .establish_connection(oobi, &mut aries_transport) diff --git a/aries/agents/mediator/tests/mediator-protocol-pickup.rs b/aries/agents/mediator/tests/mediator-protocol-pickup.rs index 8d1bb12414..e262b18cd7 100644 --- a/aries/agents/mediator/tests/mediator-protocol-pickup.rs +++ b/aries/agents/mediator/tests/mediator-protocol-pickup.rs @@ -1,7 +1,7 @@ mod common; use aries_vcx::utils::encryption_envelope::EncryptionEnvelope; -use aries_vcx_wallet::wallet::indy::IndySdkWallet; +use aries_vcx_wallet::wallet::askar::AskarWallet; use diddoc_legacy::aries::diddoc::AriesDidDoc; use mediator::aries_agent::client::transports::AriesTransport; use messages::{ @@ -32,7 +32,7 @@ async fn forward_basic_anoncrypt_message( message_text: &str, ) -> Result<()> { // Prepare forwarding agent - let agent_f = mediator::aries_agent::AgentBuilder::::new_demo_agent().await?; + let agent_f = mediator::aries_agent::AgentBuilder::::new_demo_agent().await?; // Prepare forwarding agent transport let mut agent_f_aries_transport = reqwest::Client::new(); // Prepare message and wrap into anoncrypt forward message diff --git a/aries/aries_vcx/tests/test_mysql_wallet.rs b/aries/aries_vcx/tests/test_mysql_wallet.rs deleted file mode 100644 index 4a44d45851..0000000000 --- a/aries/aries_vcx/tests/test_mysql_wallet.rs +++ /dev/null @@ -1,49 +0,0 @@ -#[macro_use] -extern crate serde_json; - -#[cfg(test)] -mod dbtests { - use std::error::Error; - - use aries_vcx::global::settings; - use aries_vcx_wallet::wallet::{ - base_wallet::{did_wallet::DidWallet, BaseWallet, ManageWallet}, - indy::indy_wallet_config::IndyWalletConfig, - }; - - #[tokio::test] - #[ignore] - async fn test_mysql_init_issuer_with_mysql_wallet() -> Result<(), Box> { - let db_name = format!("mysqltest_{}", uuid::Uuid::new_v4()).replace('-', "_"); - let storage_config = json!({ - "read_host": "localhost", - "write_host": "localhost", - "port": 3306, - "db_name": db_name, - "default_connection_limit": 50 - }) - .to_string(); - let storage_credentials = json!({ - "user": "root", - "pass": "mysecretpassword" - }) - .to_string(); - let enterprise_seed = "000000000000000000000000Trustee1"; - let config_wallet = IndyWalletConfig::builder() - .wallet_name(format!("faber_wallet_{}", uuid::Uuid::new_v4())) - .wallet_key(settings::DEFAULT_WALLET_KEY.into()) - .wallet_key_derivation(settings::WALLET_KDF_RAW.into()) - .wallet_type("mysql".into()) - .storage_config(storage_config) - .storage_credentials(storage_credentials) - .build(); - - let wallet = config_wallet.create_wallet().await?; - wallet.configure_issuer(enterprise_seed).await?; - - wallet.create_and_store_my_did(None, None).await?; - - wallet.close_wallet().await?; - Ok(()) - } -} diff --git a/aries/aries_vcx_wallet/Cargo.toml b/aries/aries_vcx_wallet/Cargo.toml index ac239e70b6..4647d80a5f 100644 --- a/aries/aries_vcx_wallet/Cargo.toml +++ b/aries/aries_vcx_wallet/Cargo.toml @@ -8,7 +8,7 @@ edition.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] -vdrtools_wallet = ["dep:libvdrtools", "dep:indy-api-types"] +vdrtools_wallet = ["dep:libvdrtools", "dep:indy-api-types"] # TODO - remove askar_wallet = ["dep:aries-askar"] [dependencies] diff --git a/aries/aries_vcx_wallet/src/wallet/base_wallet/mod.rs b/aries/aries_vcx_wallet/src/wallet/base_wallet/mod.rs index 8e5867c2da..e3ab9e3230 100644 --- a/aries/aries_vcx_wallet/src/wallet/base_wallet/mod.rs +++ b/aries/aries_vcx_wallet/src/wallet/base_wallet/mod.rs @@ -78,12 +78,6 @@ mod tests { #[allow(unused_variables)] async fn build_test_wallet() -> impl BaseWallet { - #[cfg(feature = "vdrtools_wallet")] - let wallet = { - use crate::wallet::indy::tests::dev_setup_indy_wallet; - dev_setup_indy_wallet().await - }; - #[cfg(feature = "askar_wallet")] let wallet = { use crate::wallet::askar::tests::dev_setup_askar_wallet; @@ -477,79 +471,3 @@ mod tests { assert_eq!(1, did_count); } } - -#[cfg(test)] -#[cfg(all(feature = "vdrtools_wallet", feature = "askar_wallet"))] -mod compat_tests { - use crate::wallet::{ - askar::tests::dev_setup_askar_wallet, base_wallet::BaseWallet, - indy::tests::dev_setup_indy_wallet, - }; - - async fn pack_and_unpack_anoncrypt(sender: impl BaseWallet, recipient: impl BaseWallet) { - let did_data = recipient.create_and_store_my_did(None, None).await.unwrap(); - - let msg = "send me"; - - let packed = sender - .pack_message(None, vec![did_data.verkey().to_owned()], msg.as_bytes()) - .await - .unwrap(); - - let unpacked = recipient.unpack_message(&packed).await.unwrap(); - - assert_eq!(msg, unpacked.message); - } - - async fn pack_and_unpack_authcrypt(sender: impl BaseWallet, recipient: impl BaseWallet) { - let sender_did_data = sender.create_and_store_my_did(None, None).await.unwrap(); - let recipient_did_data = recipient.create_and_store_my_did(None, None).await.unwrap(); - - let msg = "send me"; - - let packed = sender - .pack_message( - Some(sender_did_data.verkey().to_owned()), - vec![recipient_did_data.verkey().to_owned()], - msg.as_bytes(), - ) - .await - .unwrap(); - - let unpacked = recipient.unpack_message(&packed).await.unwrap(); - - assert_eq!(msg, unpacked.message); - } - - #[tokio::test] - async fn wallet_compatibility_askar_should_pack_and_indy_should_unpack_anoncrypt() { - let askar_wallet = dev_setup_askar_wallet().await; - let indy_wallet = dev_setup_indy_wallet().await; - - pack_and_unpack_anoncrypt(askar_wallet, indy_wallet).await; - } - - #[tokio::test] - async fn wallet_compatibility_indy_should_pack_and_askar_should_unpack_anoncrypt() { - let askar_wallet = dev_setup_askar_wallet().await; - let indy_wallet = dev_setup_indy_wallet().await; - - pack_and_unpack_anoncrypt(indy_wallet, askar_wallet).await; - } - - #[tokio::test] - async fn wallet_compatibility_askar_should_pack_and_indy_should_unpack_authcrypt() { - let askar_wallet = dev_setup_askar_wallet().await; - let indy_wallet = dev_setup_indy_wallet().await; - - pack_and_unpack_authcrypt(askar_wallet, indy_wallet).await; - } - - #[tokio::test] - async fn wallet_compatibility_indy_should_pack_and_askar_should_unpack_authcrypt() { - let askar_wallet = dev_setup_askar_wallet().await; - let indy_wallet = dev_setup_indy_wallet().await; - - pack_and_unpack_authcrypt(indy_wallet, askar_wallet).await; - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/mod.rs b/aries/aries_vcx_wallet/src/wallet/mod.rs index 75bc9946be..b8829bd610 100644 --- a/aries/aries_vcx_wallet/src/wallet/mod.rs +++ b/aries/aries_vcx_wallet/src/wallet/mod.rs @@ -1,8 +1,8 @@ #[cfg(feature = "askar_wallet")] pub mod askar; pub mod base_wallet; -#[cfg(feature = "vdrtools_wallet")] -pub mod indy; +// #[cfg(feature = "vdrtools_wallet")] +// pub mod indy; pub mod record_tags; pub mod structs_io; mod utils; diff --git a/aries/misc/test_utils/src/devsetup.rs b/aries/misc/test_utils/src/devsetup.rs index 817804f5b3..eab64c63ea 100644 --- a/aries/misc/test_utils/src/devsetup.rs +++ b/aries/misc/test_utils/src/devsetup.rs @@ -37,8 +37,8 @@ pub mod vdr_proxy_ledger; use crate::devsetup::vdr_proxy_ledger::dev_build_profile_vdr_proxy_ledger; use crate::logger::init_logger; -#[cfg(feature = "vdrtools_wallet")] -pub mod vdrtools_wallet; +#[cfg(feature = "askar_wallet")] +pub mod askar_wallet; const DEFAULT_AML_LABEL: &str = "eula"; @@ -190,15 +190,15 @@ pub async fn dev_build_featured_anoncreds() -> impl BaseAnonCreds { #[allow(unused_variables)] pub async fn dev_build_featured_wallet(key_seed: &str) -> (String, impl BaseWallet) { - #[cfg(feature = "vdrtools_wallet")] + #[cfg(feature = "askar_wallet")] return { info!("SetupProfile >> using indy wallet"); - use crate::devsetup::vdrtools_wallet::dev_setup_wallet_indy; - dev_setup_wallet_indy(key_seed).await + use crate::devsetup::askar_wallet::dev_setup_wallet_askar; + dev_setup_wallet_askar(key_seed).await }; - #[cfg(not(feature = "vdrtools_wallet"))] + #[cfg(not(feature = "askar_wallet"))] { use crate::{constants::INSTITUTION_DID, mock_wallet::MockWallet}; diff --git a/aries/misc/test_utils/src/devsetup/askar_wallet.rs b/aries/misc/test_utils/src/devsetup/askar_wallet.rs new file mode 100644 index 0000000000..33d6065eee --- /dev/null +++ b/aries/misc/test_utils/src/devsetup/askar_wallet.rs @@ -0,0 +1,35 @@ +use aries_vcx_wallet::wallet::{ + askar::{askar_wallet_config::AskarWalletConfig, key_method::KeyMethod, AskarWallet}, + base_wallet::{did_wallet::DidWallet, ManageWallet}, +}; +use log::info; +use uuid::Uuid; + + +pub async fn dev_setup_wallet_askar(key_seed: &str) -> (String, AskarWallet) { + info!("dev_setup_wallet_askar >>"); + // TODO - actually impl this + let config_wallet = AskarWalletConfig::new( + "sqlite://:memory:", + KeyMethod::Unprotected, + "", + &Uuid::new_v4().to_string(), + ); + // wallet_name: format!("wallet_{}", uuid::Uuid::new_v4()), + // wallet_key: DEFAULT_WALLET_KEY.into(), + // wallet_key_derivation: WALLET_KDF_RAW.into(), + // wallet_type: None, + // storage_config: None, + // storage_credentials: None, + // rekey: None, + // rekey_derivation_method: None, + + let wallet = config_wallet.create_wallet().await.unwrap(); + + let did_data = wallet + .create_and_store_my_did(Some(key_seed), None) + .await + .unwrap(); + + (did_data.did().to_owned(), wallet) +} diff --git a/aries/misc/test_utils/src/devsetup/vdrtools_wallet.rs b/aries/misc/test_utils/src/devsetup/vdrtools_wallet.rs deleted file mode 100644 index 3fcd7083ef..0000000000 --- a/aries/misc/test_utils/src/devsetup/vdrtools_wallet.rs +++ /dev/null @@ -1,30 +0,0 @@ -use aries_vcx_wallet::wallet::{ - base_wallet::{did_wallet::DidWallet, ManageWallet}, - indy::{indy_wallet_config::IndyWalletConfig, IndySdkWallet}, -}; -use log::info; - -use crate::settings::{DEFAULT_WALLET_KEY, WALLET_KDF_RAW}; - -pub async fn dev_setup_wallet_indy(key_seed: &str) -> (String, IndySdkWallet) { - info!("dev_setup_wallet_indy >>"); - let config_wallet = IndyWalletConfig { - wallet_name: format!("wallet_{}", uuid::Uuid::new_v4()), - wallet_key: DEFAULT_WALLET_KEY.into(), - wallet_key_derivation: WALLET_KDF_RAW.into(), - wallet_type: None, - storage_config: None, - storage_credentials: None, - rekey: None, - rekey_derivation_method: None, - }; - - let wallet = config_wallet.create_wallet().await.unwrap(); - - let did_data = wallet - .create_and_store_my_did(Some(key_seed), None) - .await - .unwrap(); - - (did_data.did().to_owned(), wallet) -} diff --git a/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml b/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml index 709720dd1b..c7ef7dcb10 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml +++ b/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml @@ -15,11 +15,6 @@ path = "uniffi-bindgen.rs" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[features] -default = ["askar_wallet"] -askar_wallet = ["aries_vcx/askar_wallet"] -vdrtools_wallet = ["aries_vcx/vdrtools_wallet"] - [dependencies] uniffi = { version = "0.23.0", features = ["cli"] } aries_vcx = { path = "../../../aries_vcx", features = [ diff --git a/aries/wrappers/uniffi-aries-vcx/core/build.rs b/aries/wrappers/uniffi-aries-vcx/core/build.rs index 00be3d76ca..702c6cc8a2 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/build.rs +++ b/aries/wrappers/uniffi-aries-vcx/core/build.rs @@ -1,10 +1,3 @@ fn main() { - #[cfg(all(feature = "vdrtools_wallet", feature = "askar_wallet"))] - compile_error!("features `vdrtools_wallet` and `askar_wallet` are mutually exclusive"); - - #[cfg(feature = "vdrtools_wallet")] - uniffi::generate_scaffolding("./src/vcx_indy.udl").unwrap(); - - #[cfg(feature = "askar_wallet")] uniffi::generate_scaffolding("./src/vcx.udl").unwrap(); } diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/indy.rs b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/indy.rs deleted file mode 100644 index e8dd0037df..0000000000 --- a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/indy.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::sync::Arc; - -use aries_vcx::aries_vcx_wallet::wallet::{ - base_wallet::ManageWallet, - indy::{indy_wallet_config::IndyWalletConfig, IndySdkWallet}, -}; -use aries_vcx_anoncreds::anoncreds::{ - base_anoncreds::BaseAnonCreds, credx_anoncreds::IndyCredxAnonCreds, -}; -use aries_vcx_ledger::ledger::{ - indy_vdr_ledger::{indyvdr_build_ledger_read, IndyVdrLedgerRead}, - request_submitter::vdr_ledger::{IndyVdrLedgerPool, IndyVdrSubmitter}, - response_cacher::in_memory::{InMemoryResponseCacher, InMemoryResponseCacherConfig}, -}; -use indy_vdr::config::PoolConfig; - -use crate::{ - core::logging::enable_logging, errors::error::VcxUniFFIResult, runtime::block_on, ProfileHolder, -}; - -#[derive(Debug)] -pub struct UniffiProfile { - pub wallet: IndySdkWallet, - pub anoncreds: IndyCredxAnonCreds, - pub ledger_read: IndyVdrLedgerRead, -} - -pub fn new_indy_profile( - wallet_config: IndyWalletConfig, - genesis_file_path: String, -) -> VcxUniFFIResult> { - // Enable android logging - enable_logging(); - - block_on(async { - let wallet = wallet_config.create_wallet().await?; - - let anoncreds = IndyCredxAnonCreds; - - anoncreds - .prover_create_link_secret(&wallet, &"main".to_string()) - .await - .ok(); - - let indy_vdr_config = PoolConfig::default(); - let cache_config = InMemoryResponseCacherConfig::builder() - .ttl(std::time::Duration::from_secs(60)) - .capacity(1000)? - .build(); - let ledger_pool = IndyVdrLedgerPool::new(genesis_file_path, indy_vdr_config, vec![])?; - let request_submitter = IndyVdrSubmitter::new(ledger_pool); - let ledger_read = indyvdr_build_ledger_read(request_submitter, cache_config)?; - let profile = UniffiProfile { - anoncreds: IndyCredxAnonCreds, - wallet, - ledger_read, - }; - - Ok(Arc::new(ProfileHolder { inner: profile })) - }) -} diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs index 7fda608038..44292a2e1d 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs +++ b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs @@ -6,18 +6,8 @@ use aries_vcx_ledger::ledger::{ response_cacher::in_memory::InMemoryResponseCacher, }; -#[cfg(feature = "vdrtools_wallet")] -pub mod indy; -#[cfg(feature = "vdrtools_wallet")] -use aries_vcx::aries_vcx_wallet::wallet::indy::IndySdkWallet; -#[cfg(feature = "vdrtools_wallet")] -pub use indy as profile; - -#[cfg(feature = "askar_wallet")] pub mod askar; -#[cfg(feature = "askar_wallet")] use aries_vcx::aries_vcx_wallet::wallet::askar::AskarWallet; -#[cfg(feature = "askar_wallet")] pub use askar as profile; use crate::profile::UniffiProfile; @@ -31,12 +21,6 @@ impl UniffiProfile { &self.anoncreds } - #[cfg(feature = "vdrtools_wallet")] - pub fn wallet(&self) -> &IndySdkWallet { - &self.wallet - } - - #[cfg(feature = "askar_wallet")] pub fn wallet(&self) -> &AskarWallet { &self.wallet } diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs b/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs index 94b5a50fd2..0899e03716 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs +++ b/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs @@ -1,7 +1,3 @@ -#[cfg(feature = "vdrtools_wallet")] -uniffi::include_scaffolding!("vcx_indy"); - -#[cfg(feature = "askar_wallet")] uniffi::include_scaffolding!("vcx"); pub mod core; @@ -9,13 +5,10 @@ pub mod errors; pub mod handlers; pub mod runtime; -#[cfg(feature = "askar_wallet")] use aries_vcx::aries_vcx_wallet::wallet::askar::{ askar_wallet_config::AskarWalletConfig, key_method::{ArgonLevel, AskarKdfMethod, KeyMethod}, }; -#[cfg(feature = "vdrtools_wallet")] -use aries_vcx::aries_vcx_wallet::wallet::indy::indy_wallet_config::IndyWalletConfig; use aries_vcx::protocols::connection::pairwise_info::PairwiseInfo; use handlers::{connection::*, holder::*}; diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/vcx_indy.udl b/aries/wrappers/uniffi-aries-vcx/core/src/vcx_indy.udl deleted file mode 100644 index 767c148391..0000000000 --- a/aries/wrappers/uniffi-aries-vcx/core/src/vcx_indy.udl +++ /dev/null @@ -1,186 +0,0 @@ -interface ProfileHolder { - -}; - -dictionary IndyWalletConfig { - string wallet_name; - string wallet_key; - string wallet_key_derivation; - string? wallet_type; - string? storage_config; - string? storage_credentials; - string? rekey; - string? rekey_derivation_method; -}; - -dictionary ConnectionState { - ConnectionRole role; - ConnectionProtocolState protocol_state; -}; - -enum ConnectionRole { - "Invitee", - "Inviter", -}; - -enum ConnectionProtocolState { - "Initial", - "Invited", - "Requested", - "Responded", - "Completed", -}; - -dictionary PairwiseInfo { - string pw_did; - string pw_vk; -}; - -dictionary UnpackMessage { - string message; - string recipient_verkey; - string? sender_verkey; -}; - -interface Connection { - [Throws=VcxUniFFIError] - ConnectionState get_state(); - - [Throws=VcxUniFFIError] - PairwiseInfo pairwise_info(); - - [Throws=VcxUniFFIError] - void accept_invitation(ProfileHolder profile, string invitation); - - [Throws=VcxUniFFIError] - void handle_request(ProfileHolder profile, string request, string service_endpoint, sequence routing_keys); - - [Throws=VcxUniFFIError] - void handle_response(ProfileHolder profile, string response); - - [Throws=VcxUniFFIError] - void send_request(ProfileHolder profile, string service_endpoint, sequence routing_keys); - - [Throws=VcxUniFFIError] - void send_response(ProfileHolder profile); - - [Throws=VcxUniFFIError] - void send_ack(ProfileHolder profile); - - [Throws=VcxUniFFIError] - void send_message(ProfileHolder profile, string message); -}; - -interface Holder { - [Throws=VcxUniFFIError] - void set_proposal(string credential_proposal); - - [Throws=VcxUniFFIError] - void prepare_credential_request(ProfileHolder profile, string my_pw_did); - - [Throws=VcxUniFFIError] - string get_msg_credential_request(); - - [Throws=VcxUniFFIError] - string decline_offer(string? comment); - - [Throws=VcxUniFFIError] - void process_credential(ProfileHolder profile, string credential); - - [Throws=VcxUniFFIError] - boolean is_terminal_state(); - - [Throws=VcxUniFFIError] - HolderState get_state(); - - [Throws=VcxUniFFIError] - string get_source_id(); - - [Throws=VcxUniFFIError] - string get_credential(); - - [Throws=VcxUniFFIError] - string get_attributes(); - - [Throws=VcxUniFFIError] - string get_attachment(); - - [Throws=VcxUniFFIError] - string get_offer(); - - [Throws=VcxUniFFIError] - string get_tails_location(); - - [Throws=VcxUniFFIError] - string get_tails_hash(); - - [Throws=VcxUniFFIError] - string get_rev_reg_id(); - - [Throws=VcxUniFFIError] - string get_cred_id(); - - [Throws=VcxUniFFIError] - string get_thread_id(); - - [Throws=VcxUniFFIError] - boolean is_revokable(ProfileHolder profile); - - [Throws=VcxUniFFIError] - boolean is_revoked(ProfileHolder profile); - - [Throws=VcxUniFFIError] - u32 get_cred_rev_id(ProfileHolder profile); - - [Throws=VcxUniFFIError] - string get_problem_report(); - - [Throws=VcxUniFFIError] - string? get_final_message(); -}; - -enum HolderState { - "Initial", - "ProposalSet", - "OfferReceived", - "RequestSet", - "Finished", - "Failed" -}; - -[Error] -enum VcxUniFFIError { - "AriesVcxError", - "AriesVcxWalletError", - "AriesVcxLedgerError", - "AriesVcxAnoncredsError", - "SerializationError", - "InternalError", - "StringParseError" -}; - -namespace vcx { - [Throws=VcxUniFFIError] - ProfileHolder new_indy_profile(IndyWalletConfig wallet_config, string genesis_file_path); - - [Throws=VcxUniFFIError] - Connection create_inviter(ProfileHolder profile); - - [Throws=VcxUniFFIError] - Connection create_invitee(ProfileHolder profile); - - [Throws=VcxUniFFIError] - UnpackMessage unpack_message(ProfileHolder profile, string packed_msg); - - [Throws=VcxUniFFIError] - Holder create(string source_id); - - [Throws=VcxUniFFIError] - Holder create_from_offer(string source_id, string offer_message); - - [Throws=VcxUniFFIError] - Holder create_with_proposal(string source_id, string propose_credential); - - [Throws=VcxUniFFIError] - string get_credentials(ProfileHolder profile); -}; From 4aa09e674ec48aeda0bfe322bbe90f1090731d8a Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 08:20:52 +1000 Subject: [PATCH 02/15] remove migrator. somehow is compiling Signed-off-by: gmulhearn --- Cargo.lock | 140 +---- Cargo.toml | 1 - aries/aries_vcx/Cargo.toml | 1 - aries/misc/wallet_migrator/Cargo.toml | 25 - aries/misc/wallet_migrator/src/error.rs | 15 - aries/misc/wallet_migrator/src/lib.rs | 50 -- .../wallet_migrator/src/migrate2askar/mod.rs | 583 ------------------ 7 files changed, 3 insertions(+), 812 deletions(-) delete mode 100644 aries/misc/wallet_migrator/Cargo.toml delete mode 100644 aries/misc/wallet_migrator/src/error.rs delete mode 100644 aries/misc/wallet_migrator/src/lib.rs delete mode 100644 aries/misc/wallet_migrator/src/migrate2askar/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 60e9e295c3..5261fd1bd3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -576,7 +576,6 @@ dependencies = [ "tokio", "url", "uuid", - "wallet_migrator", ] [[package]] @@ -3551,15 +3550,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - [[package]] name = "matchit" version = "0.7.3" @@ -3788,16 +3778,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - [[package]] name = "num" version = "0.4.3" @@ -3987,12 +3967,6 @@ version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "owning_ref" version = "0.4.1" @@ -4464,17 +4438,8 @@ checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", + "regex-automata", + "regex-syntax", ] [[package]] @@ -4485,7 +4450,7 @@ checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.5", + "regex-syntax", ] [[package]] @@ -4494,12 +4459,6 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - [[package]] name = "regex-syntax" version = "0.8.5" @@ -4977,15 +4936,6 @@ dependencies = [ "keccak", ] -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - [[package]] name = "shared" version = "0.65.0" @@ -5542,28 +5492,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" -[[package]] -name = "test-log" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dffced63c2b5c7be278154d76b479f9f9920ed34e7574201407f0b14e2bbb93" -dependencies = [ - "env_logger 0.11.5", - "test-log-macros", - "tracing-subscriber", -] - -[[package]] -name = "test-log-macros" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.79", -] - [[package]] name = "test_utils" version = "0.65.0" @@ -5613,16 +5541,6 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if", - "once_cell", -] - [[package]] name = "time" version = "0.1.45" @@ -5856,35 +5774,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "sharded-slab", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", ] [[package]] @@ -6224,12 +6113,6 @@ dependencies = [ "serde", ] -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - [[package]] name = "value-bag" version = "1.9.0" @@ -6248,23 +6131,6 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" -[[package]] -name = "wallet_migrator" -version = "0.1.0" -dependencies = [ - "aries_vcx_wallet", - "indy-credx", - "libvdrtools", - "log", - "public_key", - "serde", - "serde_json", - "test-log", - "thiserror", - "tokio", - "uuid", -] - [[package]] name = "want" version = "0.3.1" diff --git a/Cargo.toml b/Cargo.toml index f0fe515e3a..eae8408f3f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,6 @@ members = [ "aries/aries_vcx_wallet", "aries/aries_vcx_ledger", "aries/misc/indy_ledger_response_parser", - "aries/misc/wallet_migrator", "aries/misc/test_utils", "did_core/did_doc", "did_core/did_methods/did_peer", diff --git a/aries/aries_vcx/Cargo.toml b/aries/aries_vcx/Cargo.toml index a3cabdeaf1..549b3dabe9 100644 --- a/aries/aries_vcx/Cargo.toml +++ b/aries/aries_vcx/Cargo.toml @@ -78,7 +78,6 @@ backtrace = { optional = true, version = "0.3" } [dev-dependencies] test_utils = { path = "../misc/test_utils" } -wallet_migrator = { path = "../misc/wallet_migrator" } async-channel = "2.3.1" tokio = { version = "1.38", features = ["rt", "macros", "rt-multi-thread"] } pretty_assertions = "1.4.0" diff --git a/aries/misc/wallet_migrator/Cargo.toml b/aries/misc/wallet_migrator/Cargo.toml deleted file mode 100644 index dbc5cbc209..0000000000 --- a/aries/misc/wallet_migrator/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "wallet_migrator" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[features] -vdrtools_wallet = ["aries_vcx_wallet/vdrtools_wallet"] -askar_wallet = ["aries_vcx_wallet/askar_wallet"] - -[dependencies] -aries_vcx_wallet = { path = "../../aries_vcx_wallet" } -indy-credx.workspace = true -vdrtools = { package = "libvdrtools", path = "../legacy/libvdrtools" } -serde = { version = "1.0.159", features = ["derive"] } -serde_json = "1.0.96" -thiserror = "1.0.40" -log = "0.4.19" -public_key = { path = "../../../did_core/public_key"} - -[dev-dependencies] -tokio = { version = "1.38.0", features = ["macros", "rt"] } -uuid = { version = "1.3.0", default-features = false, features = ["v4"] } -test-log = "0.2.14" - diff --git a/aries/misc/wallet_migrator/src/error.rs b/aries/misc/wallet_migrator/src/error.rs deleted file mode 100644 index 4545fe47a6..0000000000 --- a/aries/misc/wallet_migrator/src/error.rs +++ /dev/null @@ -1,15 +0,0 @@ -use aries_vcx_wallet::errors::error::VcxWalletError; -use serde_json::Error as JsonError; -use thiserror::Error as ThisError; - -pub type MigrationResult = Result; - -#[derive(Debug, ThisError)] -pub enum MigrationError { - #[error("JSON error: {0}")] - Json(#[from] JsonError), - #[error("VcxWallet error: {0}")] - VcxWallet(#[from] VcxWalletError), - #[error("Unsupported wallet migration")] - Unsupported, -} diff --git a/aries/misc/wallet_migrator/src/lib.rs b/aries/misc/wallet_migrator/src/lib.rs deleted file mode 100644 index 72d3604d54..0000000000 --- a/aries/misc/wallet_migrator/src/lib.rs +++ /dev/null @@ -1,50 +0,0 @@ -pub mod error; -pub mod migrate2askar; - -use aries_vcx_wallet::wallet::base_wallet::{ - migrate::{migrate_records, MigrationStats}, - record::Record, - BaseWallet, -}; -use error::{MigrationError, MigrationResult}; -use log::info; -use migrate2askar::migrate_any_record_to_askar; -pub use vdrtools::types::domain::wallet::IndyRecord; - -pub enum MigrationKind { - ToAskar, - Unknown, -} - -impl MigrationKind { - pub fn select_migrate_fn( - &self, - ) -> MigrationResult Result, MigrationError>> { - match self { - MigrationKind::ToAskar => Ok(migrate_any_record_to_askar), - MigrationKind::Unknown => Err(error::MigrationError::Unsupported), - } - } -} - -/// Retrieves all records from the source wallet and migrates them -/// by applying the `migrate_fn` argument. The records are then -/// placed in the destination wallet. -pub async fn migrate_wallet( - src_wallet: &impl BaseWallet, - dest_wallet: &impl BaseWallet, - migration_kind: MigrationKind, -) -> MigrationResult { - info!("Starting wallet migration"); - - info!( - "Migrating records from wallet with handle {src_wallet:?} to wallet with handle \ - {dest_wallet:?}" - ); - let migrate_fn = migration_kind.select_migrate_fn()?; - let res = migrate_records(src_wallet, dest_wallet, migrate_fn).await?; - - info!("Migration completed"); - - Ok(res) -} diff --git a/aries/misc/wallet_migrator/src/migrate2askar/mod.rs b/aries/misc/wallet_migrator/src/migrate2askar/mod.rs deleted file mode 100644 index f00683fca5..0000000000 --- a/aries/misc/wallet_migrator/src/migrate2askar/mod.rs +++ /dev/null @@ -1,583 +0,0 @@ -use aries_vcx_wallet::wallet::base_wallet::{ - did_value::DidValue, record::Record, record_category::RecordCategory, -}; -use public_key::{Key, KeyType}; -use serde::Deserialize; - -use crate::error::MigrationResult; - -pub fn migrate_any_record_to_askar(record: Record) -> MigrationResult> { - Ok(match record.category() { - RecordCategory::Did => transform_did(record, RecordCategory::Did), - RecordCategory::TmpDid => transform_did(record, RecordCategory::TmpDid), - _ => Some(record), - }) -} - -#[derive(Deserialize, Debug)] -pub struct DidDataDeserializable { - pub did: String, - pub verkey: String, -} - -fn transform_did(record: Record, category: RecordCategory) -> Option { - let val = match serde_json::from_str::(record.value()) { - Ok(res) => res, - Err(_) => return None, - }; - - let key = match Key::from_base58(&val.verkey, KeyType::Ed25519) { - Ok(key_content) => key_content, - Err(_) => return None, - }; - - let value = match serde_json::to_string(&DidValue::new(&key)) { - Ok(res) => res, - Err(_) => return None, - }; - - Some( - Record::builder() - .category(category) - .name(record.name().to_owned()) - .tags(record.tags().clone()) - .value(value) - .build(), - ) -} - -#[cfg(test)] -#[cfg(all(feature = "vdrtools_wallet", feature = "askar_wallet"))] -mod compat_tests { - use std::collections::{HashMap, HashSet}; - - use aries_vcx_wallet::wallet::{ - askar::{askar_wallet_config::AskarWalletConfig, key_method::KeyMethod, AskarWallet}, - base_wallet::{ - did_wallet::DidWallet, record::Record, record_category::RecordCategory, - record_wallet::RecordWallet, - }, - indy::{indy_wallet_config::IndyWalletConfig, IndySdkWallet}, - }; - use serde_json::json; - use uuid::Uuid; - use vdrtools::Locator; - - use crate::{migrate_wallet, MigrationKind}; - - type TestDataVec = Vec<(RecordCategory, String, u32)>; - struct TestData { - data_vec: TestDataVec, - expected_count: u32, - } - - fn make_dummy_master_secret() -> String { - json!({ - "value": { - "ms": "1234567890" - } - }) - .to_string() - } - - fn make_dummy_cred() -> String { - let cred_sig_str = json!({ - "p_credential": { - "m_2": "1234567890", - "a": "1234567890", - "e": "1234567890", - "v": "1234567890" - }, - "r_credential": null - }) - .to_string(); - - let sig_cor_proof_str = json!({ - "se": "1234567890", - "c": "1234567890" - }) - .to_string(); - - serde_json::to_string(&vdrtools::Credential { - schema_id: vdrtools::SchemaId("test_schema_id".to_owned()), - cred_def_id: vdrtools::CredentialDefinitionId("test_cred_def_id".to_owned()), - rev_reg_id: Some(vdrtools::RevocationRegistryId("test_rev_reg_id".to_owned())), - values: vdrtools::CredentialValues(HashMap::new()), - signature: serde_json::from_str(&cred_sig_str).unwrap(), - signature_correctness_proof: serde_json::from_str(&sig_cor_proof_str).unwrap(), - rev_reg: None, - witness: None, - }) - .unwrap() - } - - fn make_dummy_cred_def() -> String { - let primary = json!({ - "n": "1234567890", - "s": "1234567890", - "r": {}, - "rctxt": "1234567890", - "z": "1234567890", - }) - .to_string(); - - serde_json::to_string(&vdrtools::CredentialDefinition::CredentialDefinitionV1( - vdrtools::CredentialDefinitionV1 { - id: vdrtools::CredentialDefinitionId("test_cred_def_id".to_owned()), - schema_id: vdrtools::SchemaId("test_schema_id".to_owned()), - signature_type: vdrtools::SignatureType::CL, - tag: "{}".to_owned(), - value: vdrtools::CredentialDefinitionData { - primary: serde_json::from_str(&primary).unwrap(), - revocation: None, - }, - }, - )) - .unwrap() - } - - fn make_dummy_cred_def_priv_key() -> String { - let priv_key = json!({ - "p_key": { - "p": "1234567890", - "q": "1234567890" - } - }) - .to_string(); - - serde_json::to_string(&vdrtools::CredentialDefinitionPrivateKey { - value: serde_json::from_str(&priv_key).unwrap(), - }) - .unwrap() - } - - fn make_dummy_cred_def_correctness_proof() -> String { - let cor_proof = json!({ - "c": "1234567890", - "xz_cap": "1234567890", - "xr_cap": [] - }) - .to_string(); - - serde_json::to_string(&vdrtools::CredentialDefinitionCorrectnessProof { - value: serde_json::from_str(&cor_proof).unwrap(), - }) - .unwrap() - } - - fn make_dummy_schema() -> String { - serde_json::to_string(&vdrtools::Schema::SchemaV1(vdrtools::SchemaV1 { - id: vdrtools::SchemaId("test_schema_id".to_owned()), - name: "test_schema_name".to_owned(), - version: "test_schema_version".to_owned(), - attr_names: vdrtools::AttributeNames(HashSet::new()), - seq_no: None, - })) - .unwrap() - } - - fn make_dummy_schema_id() -> String { - "test_schema_id".to_owned() - } - - fn make_dummy_rev_reg() -> String { - let rev_reg = json!({ - "accum": "21 11ED98357F9B9B3077E633D35A72CECEF107F85DA7BBFBF2873E2EE7E0F27D326 21 1371CDA6174D6F01A39157428768D328B4B80088EB14AA0AAB7F046B645E1A235 6 65BBFAC37012790BB8B283F164BE3C0585AB60CD7B72123E4DC43DDA7A6A4E6D 4 3BB64FAF922865095CD5AA4349C0437D04EA30FB7592D932531732F2DCB83DB8 6 77039B80A78AB4A2476373C6F8ECC5E2D94B8F37F924549AFA247E2D6EE86DEE 4 24E94FB6B5233B22BDF47745AA821A1797BC6504BC11D5B825B4F8137F1E307F" - }).to_string(); - - serde_json::to_string(&vdrtools::RevocationRegistry::RevocationRegistryV1( - vdrtools::RevocationRegistryV1 { - value: serde_json::from_str(&rev_reg).unwrap(), - }, - )) - .unwrap() - } - - fn make_dummy_rev_reg_delta() -> String { - let rev_reg = json!({ - "prevAccum": "21 11ED98357F9B9B3077E633D35A72CECEF107F85DA7BBFBF2873E2EE7E0F27D326 21 1371CDA6174D6F01A39157428768D328B4B80088EB14AA0AAB7F046B645E1A235 6 65BBFAC37012790BB8B283F164BE3C0585AB60CD7B72123E4DC43DDA7A6A4E6D 4 3BB64FAF922865095CD5AA4349C0437D04EA30FB7592D932531732F2DCB83DB8 6 77039B80A78AB4A2476373C6F8ECC5E2D94B8F37F924549AFA247E2D6EE86DEE 4 24E94FB6B5233B22BDF47745AA821A1797BC6504BC11D5B825B4F8137F1E307F", - "accum": "21 11ED98357F9B9B3077E633D35A72CECEF107F85DA7BBFBF2873E2EE7E0F27D326 21 1371CDA6174D6F01A39157428768D328B4B80088EB14AA0AAB7F046B645E1A235 6 65BBFAC37012790BB8B283F164BE3C0585AB60CD7B72123E4DC43DDA7A6A4E6D 4 3BB64FAF922865095CD5AA4349C0437D04EA30FB7592D932531732F2DCB83DB8 6 77039B80A78AB4A2476373C6F8ECC5E2D94B8F37F924549AFA247E2D6EE86DEE 4 24E94FB6B5233B22BDF47745AA821A1797BC6504BC11D5B825B4F8137F1E307F", - "issued": [], - "revoked": [] - }).to_string(); - - let rev_reg_delta = vdrtools::RevocationRegistryDelta::RevocationRegistryDeltaV1( - vdrtools::RevocationRegistryDeltaV1 { - value: serde_json::from_str(&rev_reg).unwrap(), - }, - ); - - json!(rev_reg_delta).to_string() - } - - fn make_dummy_rev_reg_info() -> String { - serde_json::to_string(&vdrtools::RevocationRegistryInfo { - id: vdrtools::RevocationRegistryId("test_rev_reg_id".to_owned()), - curr_id: 1, - used_ids: HashSet::new(), - }) - .unwrap() - } - - fn make_dummy_rev_reg_def() -> String { - let accum_key = json!({ - "z": "1 042CDA7AA76FFD05D0EA1C97F0F238A579AAE4348442298B7F8513277A21D671 1 04C49DDECC3731B11BC98A1495C39DF7F94A297EA6D691DADAF1493300D2977E 1 0D78B673DE9F1CE37FA98E0765B69D963BFF9973317722981943797EFEF1F628 1 1F4DFD2C1ED2BD80D9D92600AB7A1B2911180B4B44C6BC42962084AC4C042385 1 07724871AD4FFC1C30BCAEFE289FAF6F2F322203C34D8D2D3C36DFD816AF9430 1 050F4014E2AFD680A67C197B39D35CA4D03332D6C6922A4D991EC1402B7FF4E6 1 07C0DCAF303CF4B0741447A1A808C8C2BAE6CD30397AAF834428848FEE70FC3D 1 1C028C08BD426B053942A4409F71A5215B6B0B58FF651C72303F1B4C5DDB84C4 1 22DE20332A0E1B0C58F76CBADBF73D0B6875A5F3479AC0E3C4D27A605656BF6E 1 1F461563E404002F9AFE37D09FA98F34B4666D1A4424C89B3C8CE7E85DE23B8A 1 096DA55063F6ABA1B578471DEBDEACA5DE485994F99099BBBB6E326DDF8C3DD2 1 12FFCEFF31CE5781FF6BB9AB279BF8A100E97D43B0F6C31E6FCD6373227E34FD" - }).to_string(); - - serde_json::to_string( - &vdrtools::RevocationRegistryDefinition::RevocationRegistryDefinitionV1( - vdrtools::RevocationRegistryDefinitionV1 { - id: vdrtools::RevocationRegistryId("test_rev_reg_id".to_owned()), - revoc_def_type: vdrtools::RegistryType::CL_ACCUM, - tag: "{}".to_owned(), - cred_def_id: vdrtools::CredentialDefinitionId("test_cred_def_id".to_owned()), - value: vdrtools::RevocationRegistryDefinitionValue { - issuance_type: vdrtools::IssuanceType::ISSUANCE_BY_DEFAULT, - max_cred_num: 10, - public_keys: vdrtools::RevocationRegistryDefinitionValuePublicKeys { - accum_key: serde_json::from_str(&accum_key).unwrap(), - }, - tails_hash: "abc".to_owned(), - tails_location: "/dev/null".to_owned(), - }, - }, - ), - ) - .unwrap() - } - - fn make_dummy_rev_reg_def_priv() -> String { - let rev_key_priv = json!({ - "gamma": "12345" - }) - .to_string(); - - serde_json::to_string(&vdrtools::RevocationRegistryDefinitionPrivate { - value: serde_json::from_str(&rev_key_priv).unwrap(), - }) - .unwrap() - } - - fn generate_test_data() -> TestData { - let master_secret_count = 1; - let indy_cred_count = 1; - let indy_cred_def_count = 1; - let indy_cred_def_priv_count = 1; - let indy_cred_def_cr_proof_count = 1; - let indy_schema_count = 1; - let indy_schema_id_count = 1; - let indy_rev_reg_count = 1; - let indy_rev_reg_delta_count = 1; - let indy_rev_reg_info_count = 1; - let infy_rev_reg_def_count = 1; - let indy_rev_reg_def_priv_count = 1; - - let wallet_items = vec![ - ( - RecordCategory::LinkSecret, - make_dummy_master_secret(), - master_secret_count, - ), - (RecordCategory::Cred, make_dummy_cred(), indy_cred_count), - ( - RecordCategory::CredDef, - make_dummy_cred_def(), - indy_cred_def_count, - ), - ( - RecordCategory::CredDefPriv, - make_dummy_cred_def_priv_key(), - indy_cred_def_priv_count, - ), - ( - RecordCategory::CredKeyCorrectnessProof, - make_dummy_cred_def_correctness_proof(), - indy_cred_def_cr_proof_count, - ), - ( - RecordCategory::CredSchema, - make_dummy_schema(), - indy_schema_count, - ), - ( - RecordCategory::CredMapSchemaId, - make_dummy_schema_id(), - indy_schema_id_count, - ), - ( - RecordCategory::RevReg, - make_dummy_rev_reg(), - indy_rev_reg_count, - ), - ( - RecordCategory::RevRegDelta, - make_dummy_rev_reg_delta(), - indy_rev_reg_delta_count, - ), - ( - RecordCategory::RevRegInfo, - make_dummy_rev_reg_info(), - indy_rev_reg_info_count, - ), - ( - RecordCategory::RevRegDef, - make_dummy_rev_reg_def(), - infy_rev_reg_def_count, - ), - ( - RecordCategory::RevRegDefPriv, - make_dummy_rev_reg_def_priv(), - indy_rev_reg_def_priv_count, - ), - ]; - - let expected_count = indy_cred_count - + indy_cred_def_count - + indy_cred_def_priv_count - + indy_cred_def_cr_proof_count - + indy_schema_count - + indy_schema_id_count - + indy_rev_reg_count - + indy_rev_reg_delta_count - + indy_rev_reg_info_count - + infy_rev_reg_def_count - + indy_rev_reg_def_priv_count - + master_secret_count; - - TestData { - expected_count, - data_vec: wallet_items, - } - } - - async fn create_test_data(indy_wallet: &IndySdkWallet, data_vec: TestDataVec) { - for (category, value, count) in data_vec { - for _ in 0..count { - let record = Record::builder() - .category(category) - .name(Uuid::new_v4().to_string()) - .value(value.clone()) - .build(); - indy_wallet.add_record(record).await.unwrap() - } - } - } - - fn setup_indy_wallet_config() -> IndyWalletConfig { - IndyWalletConfig { - wallet_name: format!("wallet_{}", uuid::Uuid::new_v4()), - wallet_key: "8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY".into(), - wallet_key_derivation: "RAW".into(), - wallet_type: None, - storage_config: None, - storage_credentials: None, - rekey: None, - rekey_derivation_method: None, - } - } - - async fn setup_indy_wallet(wallet_config: &IndyWalletConfig) -> IndySdkWallet { - IndySdkWallet::create(wallet_config).await.unwrap() - } - - async fn setup_askar_wallet() -> AskarWallet { - let config = AskarWalletConfig::new( - "sqlite://:memory:", - KeyMethod::Unprotected, - "", - &Uuid::new_v4().to_string(), - ); - - AskarWallet::create(&config, true).await.unwrap() - } - - async fn teardown_indy_wallet(wallet: &IndySdkWallet, wallet_config: &IndyWalletConfig) { - let (config, creds) = wallet_config.to_config_and_creds().unwrap(); - - Locator::instance() - .wallet_controller - .close(wallet.get_wallet_handle()) - .await - .unwrap(); - - Locator::instance() - .wallet_controller - .delete(config, creds) - .await - .unwrap(); - } - - #[test_log::test(tokio::test)] - async fn wallet_compatibility_migration_to_askar() { - let askar_wallet = setup_askar_wallet().await; - let indy_config = setup_indy_wallet_config(); - let indy_wallet = setup_indy_wallet(&indy_config).await; - - let data = generate_test_data(); - create_test_data(&indy_wallet, data.data_vec).await; - - let res = migrate_wallet(&indy_wallet, &askar_wallet, MigrationKind::ToAskar) - .await - .unwrap(); - - teardown_indy_wallet(&indy_wallet, &indy_config).await; - - assert_eq!(data.expected_count, res.migrated); - } - - #[test_log::test(tokio::test)] - async fn wallet_compatibility_test_create_and_store_my_did() { - let indy_config = setup_indy_wallet_config(); - let indy_wallet = setup_indy_wallet(&indy_config).await; - let askar_wallet = setup_askar_wallet().await; - - let did_data = indy_wallet - .create_and_store_my_did(None, None) - .await - .unwrap(); - - assert_eq!(0, askar_wallet.key_count().await.unwrap()); - migrate_wallet(&indy_wallet, &askar_wallet, MigrationKind::ToAskar) - .await - .unwrap(); - - teardown_indy_wallet(&indy_wallet, &indy_config).await; - - let res = askar_wallet.key_for_did(did_data.did()).await.unwrap(); - - assert_eq!(did_data.verkey().base58(), res.base58()); - assert_eq!(1, askar_wallet.key_count().await.unwrap()) - } - - #[test_log::test(tokio::test)] - async fn wallet_compatibility_test_replace_key() { - let indy_config = setup_indy_wallet_config(); - let indy_wallet = setup_indy_wallet(&indy_config).await; - let askar_wallet = setup_askar_wallet().await; - - let did_data = indy_wallet - .create_and_store_my_did(None, None) - .await - .unwrap(); - - indy_wallet - .replace_did_key_start(did_data.did(), None) - .await - .unwrap(); - - migrate_wallet(&indy_wallet, &askar_wallet, MigrationKind::ToAskar) - .await - .unwrap(); - - teardown_indy_wallet(&indy_wallet, &indy_config).await; - - askar_wallet - .replace_did_key_apply(did_data.did()) - .await - .unwrap(); - - let res = askar_wallet.key_for_did(did_data.did()).await.unwrap(); - - assert_ne!(did_data.verkey().base58(), res.base58()); - } - - #[test_log::test(tokio::test)] - async fn wallet_compatibility_test_sign_and_verify() { - let indy_config = setup_indy_wallet_config(); - let indy_wallet = setup_indy_wallet(&indy_config).await; - let askar_wallet = setup_askar_wallet().await; - - let did_data = indy_wallet - .create_and_store_my_did(None, None) - .await - .unwrap(); - - let msg = "sign this message"; - let sig = indy_wallet - .sign(did_data.verkey(), msg.as_bytes()) - .await - .unwrap(); - - migrate_wallet(&indy_wallet, &askar_wallet, MigrationKind::ToAskar) - .await - .unwrap(); - - teardown_indy_wallet(&indy_wallet, &indy_config).await; - - assert!(askar_wallet - .verify(did_data.verkey(), msg.as_bytes(), &sig) - .await - .unwrap()); - } - - #[test_log::test(tokio::test)] - async fn wallet_compatibility_test_pack_and_unpack_authcrypt() { - let indy_config = setup_indy_wallet_config(); - let indy_wallet = setup_indy_wallet(&indy_config).await; - let askar_wallet = setup_askar_wallet().await; - - let sender_did_data = indy_wallet - .create_and_store_my_did(None, None) - .await - .unwrap(); - - let recipient_did_data = indy_wallet - .create_and_store_my_did(None, None) - .await - .unwrap(); - - let msg = "pack me"; - - let data = indy_wallet - .pack_message( - Some(sender_did_data.verkey().to_owned()), - vec![recipient_did_data.verkey().to_owned()], - msg.as_bytes(), - ) - .await - .unwrap(); - - migrate_wallet(&indy_wallet, &askar_wallet, MigrationKind::ToAskar) - .await - .unwrap(); - - teardown_indy_wallet(&indy_wallet, &indy_config).await; - - let res = askar_wallet.unpack_message(&data).await.unwrap(); - - assert_eq!(res.message, msg); - } - - #[test_log::test(tokio::test)] - async fn wallet_compatibility_test_pack_and_unpack_anoncrypt() { - let indy_config = setup_indy_wallet_config(); - let indy_wallet = setup_indy_wallet(&indy_config).await; - let askar_wallet = setup_askar_wallet().await; - - let recipient_did_data = indy_wallet - .create_and_store_my_did(None, None) - .await - .unwrap(); - - let msg = "pack me"; - - let data = indy_wallet - .pack_message( - None, - vec![recipient_did_data.verkey().to_owned()], - msg.as_bytes(), - ) - .await - .unwrap(); - - migrate_wallet(&indy_wallet, &askar_wallet, MigrationKind::ToAskar) - .await - .unwrap(); - - teardown_indy_wallet(&indy_wallet, &indy_config).await; - - let res = askar_wallet.unpack_message(&data).await.unwrap(); - - assert_eq!(res.message, msg); - } -} From fe2ea1f49864cdd6b0b21d812627d49c14c7d071 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 08:27:06 +1000 Subject: [PATCH 03/15] remove indy wallet and move everyone to use askar_wallet feature Signed-off-by: gmulhearn --- Cargo.lock | 2 - aries/agents/mediator/client-tui/Cargo.toml | 2 +- aries/aries_vcx/Cargo.toml | 11 +- aries/aries_vcx/src/errors/error.rs | 2 - aries/aries_vcx_wallet/Cargo.toml | 3 - .../src/wallet/indy/all_indy_records.rs | 30 --- .../src/wallet/indy/indy_did_wallet.rs | 114 --------- .../src/wallet/indy/indy_import_config.rs | 53 ----- .../src/wallet/indy/indy_record_wallet.rs | 113 --------- .../src/wallet/indy/indy_tags.rs | 33 --- .../src/wallet/indy/indy_utils.rs | 14 -- .../src/wallet/indy/indy_wallet_config.rs | 119 ---------- .../src/wallet/indy/indy_wallet_record.rs | 29 --- aries/aries_vcx_wallet/src/wallet/indy/mod.rs | 216 ------------------ .../src/wallet/indy/partial_record.rs | 21 -- aries/aries_vcx_wallet/src/wallet/mod.rs | 2 - aries/misc/test_utils/Cargo.toml | 2 +- .../did_methods/did_resolver_sov/Cargo.toml | 2 +- 18 files changed, 6 insertions(+), 762 deletions(-) delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/all_indy_records.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_did_wallet.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_import_config.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_record_wallet.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_tags.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_utils.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_config.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_record.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/mod.rs delete mode 100644 aries/aries_vcx_wallet/src/wallet/indy/partial_record.rs diff --git a/Cargo.lock b/Cargo.lock index 5261fd1bd3..8a5bcdb9e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -627,8 +627,6 @@ dependencies = [ "async-trait", "base64 0.22.1", "bs58", - "indy-api-types", - "libvdrtools", "log", "public_key", "rand 0.8.5", diff --git a/aries/agents/mediator/client-tui/Cargo.toml b/aries/agents/mediator/client-tui/Cargo.toml index 67e3d277d7..0735c525bb 100644 --- a/aries/agents/mediator/client-tui/Cargo.toml +++ b/aries/agents/mediator/client-tui/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] aries_vcx_wallet = { path = "../../../aries_vcx_wallet", features = [ - "vdrtools_wallet", + "askar_wallet", ] } axum = "0.7.5" cursive = { version = "0.20.0", features = ["crossterm-backend"] } diff --git a/aries/aries_vcx/Cargo.toml b/aries/aries_vcx/Cargo.toml index 549b3dabe9..985fa750d9 100644 --- a/aries/aries_vcx/Cargo.toml +++ b/aries/aries_vcx/Cargo.toml @@ -11,22 +11,17 @@ path = "src/lib.rs" doctest = false [features] -# Feature flag to include the 'modular library' dependencies (vdrtools alternatives; indy-vdr, indy-credx) credx = [ "aries_vcx_anoncreds/credx", - "test_utils/vdrtools_wallet", + "test_utils/askar_wallet", "test_utils/credx" ] -anoncreds = ["aries_vcx_anoncreds/anoncreds", "test_utils/anoncreds", "test_utils/vdrtools_wallet"] +anoncreds = ["aries_vcx_anoncreds/anoncreds", "test_utils/anoncreds", "test_utils/askar_wallet"] vdr_proxy_ledger = [ - "aries_vcx_wallet/vdrtools_wallet", + "aries_vcx_wallet/askar_wallet", "test_utils/vdr_proxy_ledger", "credx", ] -vdrtools_wallet = [ - "aries_vcx_wallet/vdrtools_wallet", - "test_utils/vdrtools_wallet", -] backtrace_errors = ["backtrace"] # Feature for allowing legacy proof verification diff --git a/aries/aries_vcx/src/errors/error.rs b/aries/aries_vcx/src/errors/error.rs index d03927827c..09049bb120 100644 --- a/aries/aries_vcx/src/errors/error.rs +++ b/aries/aries_vcx/src/errors/error.rs @@ -159,8 +159,6 @@ pub enum AriesVcxErrorKind { #[error("Error Retrieving messages from API")] InvalidMessages, - #[error("Libndy error {}", 0)] - VdrToolsError(u32), #[error("Ursa error")] UrsaError, #[error("No Agent pairwise information")] diff --git a/aries/aries_vcx_wallet/Cargo.toml b/aries/aries_vcx_wallet/Cargo.toml index 4647d80a5f..582537cbcb 100644 --- a/aries/aries_vcx_wallet/Cargo.toml +++ b/aries/aries_vcx_wallet/Cargo.toml @@ -8,7 +8,6 @@ edition.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] -vdrtools_wallet = ["dep:libvdrtools", "dep:indy-api-types"] # TODO - remove askar_wallet = ["dep:aries-askar"] [dependencies] @@ -17,9 +16,7 @@ aries-askar = { workspace = true, optional = true } async-trait = "0.1.68" bs58 = { version = "0.5" } base64 = "0.22.1" -libvdrtools = { path = "../misc/legacy/libvdrtools", optional = true } log = "0.4.17" -indy-api-types = { path = "../misc/legacy/libvdrtools/indy-api-types", optional = true } serde = { version = "1.0.159", features = ["derive"] } serde_json = "1.0.95" public_key = { path = "../../did_core/public_key" } diff --git a/aries/aries_vcx_wallet/src/wallet/indy/all_indy_records.rs b/aries/aries_vcx_wallet/src/wallet/indy/all_indy_records.rs deleted file mode 100644 index 89940e0b1f..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/all_indy_records.rs +++ /dev/null @@ -1,30 +0,0 @@ -use async_trait::async_trait; -use vdrtools::indy_wallet::iterator::WalletIterator; - -use crate::{ - errors::error::VcxWalletResult, - wallet::base_wallet::record::{AllRecords, PartialRecord}, -}; - -pub struct AllIndyRecords { - iterator: WalletIterator, -} - -impl AllIndyRecords { - pub fn new(iterator: WalletIterator) -> Self { - Self { iterator } - } -} - -#[async_trait] -impl AllRecords for AllIndyRecords { - fn total_count(&self) -> VcxWalletResult> { - Ok(self.iterator.get_total_count()?) - } - - async fn next(&mut self) -> VcxWalletResult> { - let item = self.iterator.next().await?; - - Ok(item.map(PartialRecord::from_wallet_record)) - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_did_wallet.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_did_wallet.rs deleted file mode 100644 index 28cbc4f54e..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_did_wallet.rs +++ /dev/null @@ -1,114 +0,0 @@ -use async_trait::async_trait; -use public_key::{Key, KeyType}; -use vdrtools::{DidMethod, DidValue, KeyInfo, Locator, MyDidInfo}; - -use crate::{ - errors::error::VcxWalletResult, - wallet::{ - base_wallet::{did_data::DidData, did_wallet::DidWallet, record_category::RecordCategory}, - indy::IndySdkWallet, - structs_io::UnpackMessageOutput, - }, -}; - -#[async_trait] -impl DidWallet for IndySdkWallet { - async fn key_count(&self) -> VcxWalletResult { - Ok(self.search(RecordCategory::Did, None).await?.len()) - } - - async fn create_and_store_my_did( - &self, - seed: Option<&str>, - did_method_name: Option<&str>, - ) -> VcxWalletResult { - let (did, vk) = Locator::instance() - .did_controller - .create_and_store_my_did( - self.wallet_handle, - MyDidInfo { - method_name: did_method_name.map(|m| DidMethod(m.into())), - seed: seed.map(Into::into), - ..MyDidInfo::default() - }, - ) - .await?; - - let verkey = Key::from_base58(&vk, KeyType::Ed25519)?; - Ok(DidData::new(&did, &verkey)) - } - - async fn key_for_did(&self, did: &str) -> VcxWalletResult { - let res = Locator::instance() - .did_controller - .key_for_local_did(self.wallet_handle, DidValue(did.into())) - .await?; - - Ok(Key::from_base58(&res, KeyType::Ed25519)?) - } - - async fn replace_did_key_start(&self, did: &str, seed: Option<&str>) -> VcxWalletResult { - let key_info = KeyInfo { - seed: seed.map(Into::into), - ..Default::default() - }; - - let key_string = Locator::instance() - .did_controller - .replace_keys_start(self.wallet_handle, key_info, DidValue(did.into())) - .await?; - - Ok(Key::from_base58(&key_string, KeyType::Ed25519)?) - } - - async fn replace_did_key_apply(&self, did: &str) -> VcxWalletResult<()> { - Ok(Locator::instance() - .did_controller - .replace_keys_apply(self.wallet_handle, DidValue(did.into())) - .await?) - } - - async fn sign(&self, key: &Key, msg: &[u8]) -> VcxWalletResult> { - Locator::instance() - .crypto_controller - .crypto_sign(self.wallet_handle, &key.base58(), msg) - .await - .map_err(From::from) - } - - async fn verify(&self, key: &Key, msg: &[u8], signature: &[u8]) -> VcxWalletResult { - Locator::instance() - .crypto_controller - .crypto_verify(&key.base58(), msg, signature) - .await - .map_err(From::from) - } - - async fn pack_message( - &self, - sender_vk: Option, - receiver_keys: Vec, - msg: &[u8], - ) -> VcxWalletResult> { - let receiver_keys_str = receiver_keys.into_iter().map(|key| key.base58()).collect(); - - Ok(Locator::instance() - .crypto_controller - .pack_msg( - msg.into(), - receiver_keys_str, - sender_vk.map(|key| key.base58()), - self.wallet_handle, - ) - .await?) - } - - async fn unpack_message(&self, msg: &[u8]) -> VcxWalletResult { - let unpacked_bytes = Locator::instance() - .crypto_controller - .unpack_msg(serde_json::from_slice(msg)?, self.wallet_handle) - .await?; - - Ok(serde_json::from_slice(&unpacked_bytes[..])?) - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_import_config.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_import_config.rs deleted file mode 100644 index 165a292c4e..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_import_config.rs +++ /dev/null @@ -1,53 +0,0 @@ -use async_trait::async_trait; -use serde::{Deserialize, Serialize}; -use vdrtools::{types::domain::wallet::default_key_derivation_method, Locator}; - -use super::indy_utils::parse_key_derivation_method; -use crate::{errors::error::VcxWalletResult, wallet::base_wallet::ImportWallet}; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct IndyImportConfig { - pub wallet_name: String, - pub wallet_key: String, - pub exported_wallet_path: String, - pub backup_key: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub wallet_key_derivation: Option, -} - -#[async_trait] -impl ImportWallet for IndyImportConfig { - async fn import_wallet(&self) -> VcxWalletResult<()> { - Locator::instance() - .wallet_controller - .import( - vdrtools::types::domain::wallet::Config { - id: self.wallet_name.clone(), - ..Default::default() - }, - vdrtools::types::domain::wallet::Credentials { - key: self.wallet_key.clone(), - key_derivation_method: self - .wallet_key_derivation - .as_deref() - .map(parse_key_derivation_method) - .transpose()? - .unwrap_or_else(default_key_derivation_method), - - rekey: None, - rekey_derivation_method: default_key_derivation_method(), // default value - - storage_credentials: None, // default value - }, - vdrtools::types::domain::wallet::ExportConfig { - key: self.backup_key.clone(), - path: self.exported_wallet_path.clone(), - - key_derivation_method: default_key_derivation_method(), - }, - ) - .await?; - - Ok(()) - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_record_wallet.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_record_wallet.rs deleted file mode 100644 index b6fd08b147..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_record_wallet.rs +++ /dev/null @@ -1,113 +0,0 @@ -use async_trait::async_trait; -use indy_api_types::domain::wallet::IndyRecord; -use vdrtools::Locator; - -use super::{all_indy_records::AllIndyRecords, indy_tags::IndyTags, WALLET_OPTIONS}; -use crate::{ - errors::error::VcxWalletResult, - wallet::{ - base_wallet::{ - record::{AllRecords, Record}, - record_category::RecordCategory, - record_wallet::RecordWallet, - }, - indy::IndySdkWallet, - record_tags::RecordTags, - }, -}; - -#[async_trait] -impl RecordWallet for IndySdkWallet { - async fn all_records(&self) -> VcxWalletResult> { - let all = Locator::instance() - .wallet_controller - .get_all(self.get_wallet_handle()) - .await?; - - Ok(Box::new(AllIndyRecords::new(all))) - } - - async fn add_record(&self, record: Record) -> VcxWalletResult<()> { - let tags_map = if record.tags().is_empty() { - None - } else { - Some(IndyTags::from_record_tags(record.tags().clone()).into_inner()) - }; - - Ok(Locator::instance() - .non_secret_controller - .add_record( - self.wallet_handle, - record.category().to_string(), - record.name().into(), - record.value().into(), - tags_map, - ) - .await?) - } - - async fn get_record(&self, category: RecordCategory, name: &str) -> VcxWalletResult { - let res = Locator::instance() - .non_secret_controller - .get_record( - self.wallet_handle, - category.to_string(), - name.into(), - WALLET_OPTIONS.into(), - ) - .await?; - - let indy_record: IndyRecord = serde_json::from_str(&res)?; - - Ok(Record::try_from_indy_record(indy_record)?) - } - - async fn update_record_tags( - &self, - category: RecordCategory, - name: &str, - new_tags: RecordTags, - ) -> VcxWalletResult<()> { - Ok(Locator::instance() - .non_secret_controller - .update_record_tags( - self.wallet_handle, - category.to_string(), - name.into(), - IndyTags::from_record_tags(new_tags).into_inner(), - ) - .await?) - } - - async fn update_record_value( - &self, - category: RecordCategory, - name: &str, - new_value: &str, - ) -> VcxWalletResult<()> { - Ok(Locator::instance() - .non_secret_controller - .update_record_value( - self.wallet_handle, - category.to_string(), - name.into(), - new_value.into(), - ) - .await?) - } - - async fn delete_record(&self, category: RecordCategory, name: &str) -> VcxWalletResult<()> { - Ok(Locator::instance() - .non_secret_controller - .delete_record(self.wallet_handle, category.to_string(), name.into()) - .await?) - } - - async fn search_record( - &self, - category: RecordCategory, - search_filter: Option, - ) -> VcxWalletResult> { - self.search(category, search_filter).await - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_tags.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_tags.rs deleted file mode 100644 index ed159686be..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_tags.rs +++ /dev/null @@ -1,33 +0,0 @@ -use std::collections::HashMap; - -use crate::wallet::record_tags::{RecordTag, RecordTags}; - -pub struct IndyTags(HashMap); - -impl IndyTags { - pub fn new(map: HashMap) -> Self { - Self(map) - } - - pub fn into_inner(self) -> HashMap { - self.0 - } - - pub fn from_record_tags(tags: RecordTags) -> Self { - let mut map = HashMap::new(); - let tags_vec: Vec<_> = tags.into_iter().map(|tag| tag.into_pair()).collect(); - map.extend(tags_vec); - Self(map) - } - - pub fn into_record_tags(self) -> RecordTags { - let mut items: Vec<_> = self - .0 - .into_iter() - .map(|(key, val)| RecordTag::new(&key, &val)) - .collect(); - items.sort(); - - RecordTags::new(items) - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_utils.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_utils.rs deleted file mode 100644 index a893d0da9d..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_utils.rs +++ /dev/null @@ -1,14 +0,0 @@ -use indy_api_types::domain::wallet::KeyDerivationMethod; - -use crate::errors::error::{VcxWalletError, VcxWalletResult}; - -pub fn parse_key_derivation_method(method: &str) -> VcxWalletResult { - match method { - "RAW" => Ok(KeyDerivationMethod::RAW), - "ARGON2I_MOD" => Ok(KeyDerivationMethod::ARGON2I_MOD), - "ARGON2I_INT" => Ok(KeyDerivationMethod::ARGON2I_INT), - _ => Err(VcxWalletError::InvalidInput(format!( - "Unknown derivation method {method}" - ))), - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_config.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_config.rs deleted file mode 100644 index fc5f8d9109..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_config.rs +++ /dev/null @@ -1,119 +0,0 @@ -use async_trait::async_trait; -use indy_api_types::{ - domain::wallet::{default_key_derivation_method, Config, Credentials}, - errors::IndyErrorKind, -}; -use log::warn; -use serde::{Deserialize, Serialize}; -use typed_builder::TypedBuilder; -use vdrtools::Locator; - -use super::indy_utils::parse_key_derivation_method; -use crate::{ - errors::error::{VcxWalletError, VcxWalletResult}, - wallet::{base_wallet::ManageWallet, indy::IndySdkWallet}, -}; - -#[derive(Clone, Debug, TypedBuilder, Serialize, Deserialize)] -#[builder(field_defaults(default))] -pub struct IndyWalletConfig { - pub wallet_name: String, - pub wallet_key: String, - pub wallet_key_derivation: String, - #[serde(skip_serializing_if = "Option::is_none")] - #[builder(setter(strip_option))] - pub wallet_type: Option, - #[serde(skip_serializing_if = "Option::is_none")] - #[builder(setter(strip_option))] - pub storage_config: Option, - #[serde(skip_serializing_if = "Option::is_none")] - #[builder(setter(strip_option))] - pub storage_credentials: Option, - #[serde(skip_serializing_if = "Option::is_none")] - #[builder(setter(strip_option))] - pub rekey: Option, - #[serde(skip_serializing_if = "Option::is_none")] - #[builder(setter(strip_option))] - pub rekey_derivation_method: Option, -} - -impl IndyWalletConfig { - pub fn to_config_and_creds(&self) -> VcxWalletResult<(Config, Credentials)> { - let creds = Credentials { - key: self.wallet_key.clone(), - key_derivation_method: parse_key_derivation_method(&self.wallet_key_derivation)?, - - rekey: None, - rekey_derivation_method: default_key_derivation_method(), - - storage_credentials: self - .storage_credentials - .as_deref() - .map(serde_json::from_str) - .transpose()?, - }; - - let config = Config { - id: self.wallet_name.clone(), - storage_type: self.wallet_type.clone(), - storage_config: self - .storage_config - .as_deref() - .map(serde_json::from_str) - .transpose()?, - cache: None, - }; - - Ok((config, creds)) - } -} - -#[async_trait] -impl ManageWallet for IndyWalletConfig { - type ManagedWalletType = IndySdkWallet; - - async fn create_wallet(&self) -> VcxWalletResult { - let (config, creds) = self.to_config_and_creds()?; - - let res = Locator::instance() - .wallet_controller - .create(config, creds) - .await; - - match res { - Ok(()) => self.open_wallet().await, - - Err(err) if err.kind() == IndyErrorKind::WalletAlreadyExists => { - warn!( - "wallet \"{}\" already exists. skipping creation", - self.wallet_name - ); - self.open_wallet().await - } - - Err(err) => Err(VcxWalletError::create_wallet_error(err)), - } - } - - async fn open_wallet(&self) -> VcxWalletResult { - let (config, creds) = self.to_config_and_creds()?; - - let handle = Locator::instance() - .wallet_controller - .open(config, creds) - .await?; - - Ok(IndySdkWallet::new(handle)) - } - - async fn delete_wallet(&self) -> VcxWalletResult<()> { - let (config, creds) = self.to_config_and_creds()?; - - let res = Locator::instance() - .wallet_controller - .delete(config, creds) - .await; - - Ok(res.map(|_| ())?) - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_record.rs b/aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_record.rs deleted file mode 100644 index 6e398572ab..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/indy_wallet_record.rs +++ /dev/null @@ -1,29 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::{errors::error::VcxWalletResult, wallet::base_wallet::record::Record}; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct IndyWalletRecord { - id: Option, - #[serde(rename = "type")] - record_type: Option, - pub value: Option, - tags: Option, -} - -impl IndyWalletRecord { - pub fn from_record(record: Record) -> VcxWalletResult { - let tags = if record.tags().is_empty() { - None - } else { - Some(serde_json::to_string(&record.tags())?) - }; - - Ok(Self { - id: Some(record.name().into()), - record_type: Some(record.category().to_string()), - value: Some(record.value().into()), - tags, - }) - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/mod.rs b/aries/aries_vcx_wallet/src/wallet/indy/mod.rs deleted file mode 100644 index bd4e6455fc..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/mod.rs +++ /dev/null @@ -1,216 +0,0 @@ -use std::str::FromStr; - -use async_trait::async_trait; -use indy_api_types::{ - domain::wallet::{default_key_derivation_method, IndyRecord}, - errors::IndyErrorKind, -}; -use log::warn; -use serde::Deserialize; -use serde_json::Value; -use vdrtools::{Locator, WalletHandle}; - -use self::{indy_tags::IndyTags, indy_wallet_config::IndyWalletConfig}; -use super::{ - base_wallet::{ - key_value::KeyValue, record::Record, record_category::RecordCategory, - record_wallet::RecordWallet, BaseWallet, - }, - record_tags::RecordTags, -}; -use crate::errors::error::{VcxWalletError, VcxWalletResult}; - -mod all_indy_records; -mod indy_did_wallet; -pub mod indy_import_config; -mod indy_record_wallet; -mod indy_tags; -mod indy_utils; -pub mod indy_wallet_config; -pub mod indy_wallet_record; -mod partial_record; - -impl Record { - pub fn try_from_indy_record(indy_record: IndyRecord) -> VcxWalletResult { - Ok(Record::builder() - .name(indy_record.id) - .category(RecordCategory::from_str(&indy_record.type_)?) - .value(indy_record.value) - .tags(IndyTags::new(indy_record.tags).into_record_tags()) - .build()) - } -} - -impl From for IndyRecord { - fn from(record: Record) -> Self { - Self { - id: record.name().into(), - type_: record.category().to_string(), - value: record.value().into(), - tags: IndyTags::from_record_tags(record.tags().to_owned()).into_inner(), - } - } -} - -#[derive(Debug, Clone, Copy)] -pub struct IndySdkWallet { - wallet_handle: WalletHandle, -} - -impl IndySdkWallet { - pub fn new(wallet_handle: WalletHandle) -> Self { - IndySdkWallet { wallet_handle } - } - - pub async fn create(wallet_config: &IndyWalletConfig) -> VcxWalletResult { - let (config, creds) = wallet_config.to_config_and_creds()?; - - let res = Locator::instance() - .wallet_controller - .create(config, creds) - .await; - - match res { - Ok(()) => Self::open(wallet_config).await, - - Err(err) if err.kind() == IndyErrorKind::WalletAlreadyExists => { - warn!( - "wallet \"{}\" already exists. skipping creation", - wallet_config.wallet_name - ); - Self::open(wallet_config).await - } - - Err(err) => Err(VcxWalletError::create_wallet_error(err)), - } - } - - async fn open(wallet_config: &IndyWalletConfig) -> VcxWalletResult { - let (config, creds) = wallet_config.to_config_and_creds()?; - - let handle = Locator::instance() - .wallet_controller - .open(config, creds) - .await?; - - Ok(Self::new(handle)) - } - - pub fn get_wallet_handle(&self) -> WalletHandle { - self.wallet_handle - } - - #[allow(unreachable_patterns)] - async fn search( - &self, - category: RecordCategory, - search_filter: Option, - ) -> VcxWalletResult> { - let query_json = search_filter.unwrap_or("{}".into()); - - let search_handle = Locator::instance() - .non_secret_controller - .open_search( - self.wallet_handle, - category.to_string(), - query_json, - SEARCH_OPTIONS.into(), - ) - .await?; - - let next = || async { - let record = Locator::instance() - .non_secret_controller - .fetch_search_next_records(self.wallet_handle, search_handle, 1) - .await?; - - let indy_res: Value = serde_json::from_str(&record)?; - - indy_res - .get("records") - .and_then(|v| v.as_array()) - .and_then(|arr| arr.first()) - .map(|item| IndyRecord::deserialize(item).map_err(VcxWalletError::from)) - .transpose() - }; - - let mut records = Vec::new(); - while let Some(indy_record) = next().await? { - records.push(Record::try_from_indy_record(indy_record)?); - } - - Ok(records) - } -} - -const WALLET_OPTIONS: &str = - r#"{"retrieveType": true, "retrieveValue": true, "retrieveTags": true}"#; - -const SEARCH_OPTIONS: &str = r#"{"retrieveType": true, "retrieveValue": true, "retrieveTags": true, "retrieveRecords": true}"#; - -#[async_trait] -impl BaseWallet for IndySdkWallet { - async fn export_wallet(&self, path: &str, backup_key: &str) -> VcxWalletResult<()> { - Locator::instance() - .wallet_controller - .export( - self.wallet_handle, - vdrtools::types::domain::wallet::ExportConfig { - key: backup_key.into(), - path: path.into(), - - key_derivation_method: default_key_derivation_method(), - }, - ) - .await?; - - Ok(()) - } - - async fn close_wallet(&self) -> VcxWalletResult<()> { - Locator::instance() - .wallet_controller - .close(self.wallet_handle) - .await?; - - Ok(()) - } - - async fn create_key( - &self, - name: &str, - value: KeyValue, - tags: &RecordTags, - ) -> VcxWalletResult<()> { - let value = serde_json::to_string(&value)?; - let record = Record::builder() - .name(name.into()) - .value(value) - .category(RecordCategory::Key) - .tags(tags.clone()) - .build(); - - Ok(self.add_record(record).await?) - } -} - -#[cfg(test)] -pub mod tests { - use super::IndySdkWallet; - use crate::wallet::{base_wallet::ManageWallet, indy::indy_wallet_config::IndyWalletConfig}; - - pub async fn dev_setup_indy_wallet() -> IndySdkWallet { - let config_wallet = IndyWalletConfig { - wallet_name: format!("wallet_{}", uuid::Uuid::new_v4()), - wallet_key: "8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY".into(), - wallet_key_derivation: "RAW".into(), - wallet_type: None, - storage_config: None, - storage_credentials: None, - rekey: None, - rekey_derivation_method: None, - }; - - config_wallet.create_wallet().await.unwrap() - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/indy/partial_record.rs b/aries/aries_vcx_wallet/src/wallet/indy/partial_record.rs deleted file mode 100644 index 5c81dd4dda..0000000000 --- a/aries/aries_vcx_wallet/src/wallet/indy/partial_record.rs +++ /dev/null @@ -1,21 +0,0 @@ -use vdrtools::WalletRecord; - -use super::indy_tags::IndyTags; -use crate::wallet::base_wallet::record::PartialRecord; - -impl PartialRecord { - pub fn from_wallet_record(wallet_record: WalletRecord) -> Self { - let name = wallet_record.get_id().into(); - let category = wallet_record.get_type(); - let value = wallet_record.get_value(); - - let found_tags = wallet_record.get_tags(); - - Self::builder() - .name(name) - .category(category.map(Into::into)) - .value(value.map(Into::into)) - .tags(found_tags.map(|tags| IndyTags::new(tags.clone()).into_record_tags())) - .build() - } -} diff --git a/aries/aries_vcx_wallet/src/wallet/mod.rs b/aries/aries_vcx_wallet/src/wallet/mod.rs index b8829bd610..1b489ed5f3 100644 --- a/aries/aries_vcx_wallet/src/wallet/mod.rs +++ b/aries/aries_vcx_wallet/src/wallet/mod.rs @@ -1,8 +1,6 @@ #[cfg(feature = "askar_wallet")] pub mod askar; pub mod base_wallet; -// #[cfg(feature = "vdrtools_wallet")] -// pub mod indy; pub mod record_tags; pub mod structs_io; mod utils; diff --git a/aries/misc/test_utils/Cargo.toml b/aries/misc/test_utils/Cargo.toml index 07a2880123..518b9ea0aa 100644 --- a/aries/misc/test_utils/Cargo.toml +++ b/aries/misc/test_utils/Cargo.toml @@ -9,7 +9,7 @@ edition.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] -vdrtools_wallet = ["aries_vcx_wallet/vdrtools_wallet"] +askar_wallet = ["aries_vcx_wallet/askar_wallet"] vdr_proxy_ledger = [ "aries_vcx_ledger/vdr_proxy_ledger", "credx", diff --git a/did_core/did_methods/did_resolver_sov/Cargo.toml b/did_core/did_methods/did_resolver_sov/Cargo.toml index a34a921f58..4384056e1f 100644 --- a/did_core/did_methods/did_resolver_sov/Cargo.toml +++ b/did_core/did_methods/did_resolver_sov/Cargo.toml @@ -20,5 +20,5 @@ mockall = "0.13.0" aries_vcx = { path = "../../../aries/aries_vcx" } tokio = { version = "1.38.0", default-features = false, features = ["macros", "rt"] } uuid = "1.3.1" -test_utils = {path = "../../../aries/misc/test_utils", features = ["vdrtools_wallet"] } +test_utils = {path = "../../../aries/misc/test_utils", features = ["askar_wallet"] } aries_vcx_wallet = { path = "../../../aries/aries_vcx_wallet" } From 7c74697a8e8592e94aeb4e09bdf3d20448bc532e Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 08:29:36 +1000 Subject: [PATCH 04/15] remove more vdrtools features Signed-off-by: gmulhearn --- aries/aries_vcx/src/errors/mapping_wallet.rs | 11 ----------- aries/aries_vcx_wallet/src/errors/error.rs | 8 -------- .../src/errors/mapping_indy_error.rs | 15 --------------- aries/aries_vcx_wallet/src/errors/mod.rs | 2 -- .../src/wallet/base_wallet/migrate.rs | 9 --------- 5 files changed, 45 deletions(-) delete mode 100644 aries/aries_vcx_wallet/src/errors/mapping_indy_error.rs diff --git a/aries/aries_vcx/src/errors/mapping_wallet.rs b/aries/aries_vcx/src/errors/mapping_wallet.rs index d483caa77b..8890d1ee8f 100644 --- a/aries/aries_vcx/src/errors/mapping_wallet.rs +++ b/aries/aries_vcx/src/errors/mapping_wallet.rs @@ -45,17 +45,6 @@ impl From for AriesVcxError { VcxWalletError::NotBase64(_) => { Self::from_msg(AriesVcxErrorKind::ParsingError, value.to_string()) } - // imperfect solution: - // ideally we want this to be conditionally compiled if the - // aries_vcx_wallet/vdrtools_wallet feature is enabled, rather than the - // aries_vcx/vdrtools_wallet feature. however that is not possible currently - // with cargo. as such, if we have a case where - // aries_vcx_wallet/vdrtool_wallet is enabled, but aries_vcx/vdrtools_wallet - // is not, then the error will fall thru to the catch all case `_ =>` - #[cfg(feature = "vdrtools_wallet")] - VcxWalletError::IndyApiError(_) => { - Self::from_msg(AriesVcxErrorKind::InvalidLedgerResponse, value.to_string()) - } // can be #[allow(unreachable_patterns)] _ => Self::from_msg(AriesVcxErrorKind::UnknownError, value.to_string()), diff --git a/aries/aries_vcx_wallet/src/errors/error.rs b/aries/aries_vcx_wallet/src/errors/error.rs index a9a4cf5563..3e30e9d9a7 100644 --- a/aries/aries_vcx_wallet/src/errors/error.rs +++ b/aries/aries_vcx_wallet/src/errors/error.rs @@ -4,8 +4,6 @@ use std::{ }; use thiserror::Error as ThisError; -#[cfg(feature = "vdrtools_wallet")] -use vdrtools::IndyError; use crate::wallet::base_wallet::record_category::RecordCategory; @@ -53,8 +51,6 @@ pub enum VcxWalletError { NotBase64(base64::DecodeError), RecordNotFound(NotFoundInfo), UnknownRecordCategory(String), - #[cfg(feature = "vdrtools_wallet")] - IndyApiError(IndyError), InvalidInput(String), NoRecipientKeyFound, InvalidJson(serde_json::Error), @@ -84,8 +80,6 @@ impl Display for VcxWalletError { VcxWalletError::UnknownRecordCategory(inner) => { write!(f, "Unknown RecordCategory: {}", inner) } - #[cfg(feature = "vdrtools_wallet")] - VcxWalletError::IndyApiError(inner) => write!(f, "Indy API error: {}", inner), VcxWalletError::InvalidInput(inner) => write!(f, "Invalid input: {}", inner), VcxWalletError::NoRecipientKeyFound => write!(f, "No recipient key found"), VcxWalletError::InvalidJson(inner) => write!(f, "Invalid JSON: {}", inner), @@ -106,8 +100,6 @@ impl std::error::Error for VcxWalletError { VcxWalletError::NotBase64(inner) => Some(inner), VcxWalletError::RecordNotFound(_) => None, VcxWalletError::UnknownRecordCategory(_) => None, - #[cfg(feature = "vdrtools_wallet")] - VcxWalletError::IndyApiError(inner) => Some(inner), VcxWalletError::InvalidInput(_) => None, VcxWalletError::NoRecipientKeyFound => None, VcxWalletError::InvalidJson(inner) => Some(inner), diff --git a/aries/aries_vcx_wallet/src/errors/mapping_indy_error.rs b/aries/aries_vcx_wallet/src/errors/mapping_indy_error.rs deleted file mode 100644 index 3a1559f0d8..0000000000 --- a/aries/aries_vcx_wallet/src/errors/mapping_indy_error.rs +++ /dev/null @@ -1,15 +0,0 @@ -use indy_api_types::errors::IndyErrorKind; -use vdrtools::IndyError; - -use super::error::VcxWalletError; - -impl From for VcxWalletError { - fn from(value: IndyError) -> Self { - match value.kind() { - IndyErrorKind::WalletItemNotFound => { - Self::record_not_found_from_str(&value.to_string()) - } - _ => Self::IndyApiError(value), - } - } -} diff --git a/aries/aries_vcx_wallet/src/errors/mod.rs b/aries/aries_vcx_wallet/src/errors/mod.rs index cba47761c3..e7b99715ab 100644 --- a/aries/aries_vcx_wallet/src/errors/mod.rs +++ b/aries/aries_vcx_wallet/src/errors/mod.rs @@ -1,6 +1,4 @@ pub mod error; #[cfg(feature = "askar_wallet")] mod mapping_askar; -#[cfg(feature = "vdrtools_wallet")] -mod mapping_indy_error; mod mapping_others; diff --git a/aries/aries_vcx_wallet/src/wallet/base_wallet/migrate.rs b/aries/aries_vcx_wallet/src/wallet/base_wallet/migrate.rs index e000bdfb1f..758997ff36 100644 --- a/aries/aries_vcx_wallet/src/wallet/base_wallet/migrate.rs +++ b/aries/aries_vcx_wallet/src/wallet/base_wallet/migrate.rs @@ -1,7 +1,5 @@ use std::str::FromStr; -#[cfg(feature = "vdrtools_wallet")] -use indy_api_types::errors::IndyErrorKind; use log::{error, info, trace, warn}; use super::{ @@ -172,13 +170,6 @@ async fn add_record( trace!("Record type: {record:?} already exists in destination wallet, skipping"); migration_stats.duplicated += 1; } - #[cfg(feature = "vdrtools_wallet")] - VcxWalletError::IndyApiError(indy_err) - if indy_err.kind() == IndyErrorKind::CredDefAlreadyExists => - { - trace!("Cred def: {record:?} already exists in destination wallet, skipping"); - migration_stats.duplicated += 1; - } _ => { error!("Error adding record {record:?} to destination wallet: {err:?}"); migration_stats.failed += 1; From d9aeb42fa7bd7bb2b9b4452ec4bb2e8c281dcb12 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 08:37:42 +1000 Subject: [PATCH 05/15] =?UTF-8?q?remove=20vdrtools=20crate=20=F0=9F=92=A5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: gmulhearn --- Cargo.lock | 657 +--- Cargo.toml | 1 - aries/agents/mediator/Cargo.toml | 2 +- aries/misc/legacy/libvdrtools/Cargo.toml | 50 - .../libvdrtools/indy-api-types/Cargo.toml | 20 - .../indy-api-types/src/domain/mod.rs | 1 - .../indy-api-types/src/domain/wallet/mod.rs | 101 - .../libvdrtools/indy-api-types/src/errors.rs | 639 ---- .../libvdrtools/indy-api-types/src/lib.rs | 280 -- .../legacy/libvdrtools/indy-utils/Cargo.toml | 38 - .../src/crypto/base64/rust_base64.rs | 78 - .../crypto/chacha20poly1305_ietf/sodium.rs | 428 --- .../src/crypto/ed25519_box/sodium.rs | 73 - .../src/crypto/ed25519_sign/sodium.rs | 123 - .../indy-utils/src/crypto/hash/openssl.rs | 84 - .../src/crypto/hmacsha256/sodium.rs | 17 - .../libvdrtools/indy-utils/src/crypto/mod.rs | 44 - .../src/crypto/pwhash_argon2i13/sodium.rs | 133 - .../src/crypto/randombytes/sodium.rs | 66 - .../indy-utils/src/crypto/sealedbox/sodium.rs | 45 - .../indy-utils/src/crypto/sodium_type.rs | 94 - .../indy-utils/src/crypto/xsalsa20/sodium.rs | 72 - .../libvdrtools/indy-utils/src/environment.rs | 142 - .../legacy/libvdrtools/indy-utils/src/lib.rs | 44 - .../libvdrtools/indy-utils/src/sequence.rs | 11 - .../legacy/libvdrtools/indy-utils/src/wql.rs | 2791 ----------------- .../legacy/libvdrtools/indy-wallet/Cargo.toml | 32 - .../libvdrtools/indy-wallet/src/cache/lru.rs | 50 - .../libvdrtools/indy-wallet/src/cache/mod.rs | 17 - .../indy-wallet/src/cache/wallet_cache.rs | 338 -- .../libvdrtools/indy-wallet/src/encryption.rs | 505 --- .../indy-wallet/src/export_import.rs | 329 -- .../libvdrtools/indy-wallet/src/iterator.rs | 38 - .../libvdrtools/indy-wallet/src/language.rs | 152 - .../legacy/libvdrtools/indy-wallet/src/lib.rs | 1206 ------- .../indy-wallet/src/query_encryption.rs | 120 - .../indy-wallet/src/storage/default/mod.rs | 939 ------ .../indy-wallet/src/storage/default/query.rs | 376 --- .../indy-wallet/src/storage/mod.rs | 112 - .../indy-wallet/src/storage/mysql/mod.rs | 2246 ------------- .../indy-wallet/src/storage/mysql/query.rs | 315 -- .../libvdrtools/indy-wallet/src/wallet.rs | 487 --- .../libvdrtools/src/controllers/crypto.rs | 594 ---- .../legacy/libvdrtools/src/controllers/did.rs | 678 ---- .../legacy/libvdrtools/src/controllers/mod.rs | 9 - .../src/controllers/non_secrets.rs | 504 --- .../libvdrtools/src/controllers/wallet.rs | 436 --- .../src/domain/anoncreds/credential.rs | 56 - .../domain/anoncreds/credential_definition.rs | 437 --- .../src/domain/anoncreds/credential_offer.rs | 30 - .../domain/anoncreds/credential_request.rs | 34 - .../src/domain/anoncreds/indy_identifiers.rs | 155 - .../src/domain/anoncreds/master_secret.rs | 6 - .../libvdrtools/src/domain/anoncreds/mod.rs | 15 - .../libvdrtools/src/domain/anoncreds/proof.rs | 86 - .../src/domain/anoncreds/proof_request.rs | 476 --- .../domain/anoncreds/requested_credential.rs | 21 - .../domain/anoncreds/revocation_registry.rs | 42 - .../revocation_registry_definition.rs | 323 -- .../anoncreds/revocation_registry_delta.rs | 22 - .../src/domain/anoncreds/schema.rs | 255 -- .../legacy/libvdrtools/src/domain/cache.rs | 15 - .../libvdrtools/src/domain/crypto/did.rs | 128 - .../libvdrtools/src/domain/crypto/key.rs | 38 - .../libvdrtools/src/domain/crypto/mod.rs | 11 - .../libvdrtools/src/domain/crypto/pack.rs | 40 - .../misc/legacy/libvdrtools/src/domain/mod.rs | 9 - aries/misc/legacy/libvdrtools/src/lib.rs | 109 - .../src/services/crypto/ed25519.rs | 98 - .../libvdrtools/src/services/crypto/mod.rs | 1183 ------- .../legacy/libvdrtools/src/services/mod.rs | 5 - .../legacy/libvdrtools/src/services/wallet.rs | 1 - .../libvdrtools/src/utils/crypto/base58.rs | 26 - .../libvdrtools/src/utils/crypto/mod.rs | 2 - .../src/utils/crypto/verkey_builder.rs | 132 - .../misc/legacy/libvdrtools/src/utils/mod.rs | 4 - .../legacy/libvdrtools/src/utils/qualifier.rs | 94 - 77 files changed, 56 insertions(+), 19314 deletions(-) delete mode 100644 aries/misc/legacy/libvdrtools/Cargo.toml delete mode 100644 aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml delete mode 100644 aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs delete mode 100755 aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs delete mode 100644 aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/controllers/crypto.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/controllers/did.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/controllers/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/controllers/wallet.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/cache.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/domain/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/lib.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/services/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/services/wallet.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs delete mode 100755 aries/misc/legacy/libvdrtools/src/utils/mod.rs delete mode 100644 aries/misc/legacy/libvdrtools/src/utils/qualifier.rs diff --git a/Cargo.lock b/Cargo.lock index 8a5bcdb9e0..48049a63ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -49,7 +49,7 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rand 0.8.5", + "rand", "sha1", "smallvec", "tokio", @@ -169,7 +169,7 @@ dependencies = [ "serde_urlencoded", "smallvec", "socket2", - "time 0.3.20", + "time", "url", ] @@ -242,7 +242,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom 0.2.15", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -330,7 +330,7 @@ dependencies = [ "ffi-support", "log", "once_cell", - "rand 0.8.5", + "rand", "regex", "rmp-serde", "serde", @@ -354,7 +354,7 @@ dependencies = [ "num-traits", "once_cell", "openssl", - "rand 0.8.5", + "rand", "serde", "sha2", ] @@ -373,7 +373,7 @@ dependencies = [ "num-traits", "once_cell", "openssl", - "rand 0.8.5", + "rand", "serde", "sha2", ] @@ -386,7 +386,7 @@ dependencies = [ "bitvec", "log", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -522,7 +522,7 @@ dependencies = [ "display_as_json", "env_logger 0.11.5", "log", - "rand 0.8.5", + "rand", "reqwest 0.12.8", "serde", "serde_derive", @@ -538,7 +538,7 @@ dependencies = [ "aries_vcx_anoncreds", "aries_vcx_ledger", "aries_vcx_wallet", - "async-channel 2.3.1", + "async-channel", "async-trait", "backtrace", "base64 0.22.1", @@ -561,7 +561,7 @@ dependencies = [ "num-bigint", "pretty_assertions", "public_key", - "rand 0.8.5", + "rand", "regex", "serde", "serde_derive", @@ -572,7 +572,7 @@ dependencies = [ "strum_macros", "test_utils", "thiserror", - "time 0.3.20", + "time", "tokio", "url", "uuid", @@ -593,7 +593,7 @@ dependencies = [ "serde", "serde_json", "thiserror", - "time 0.3.20", + "time", "uuid", ] @@ -614,7 +614,7 @@ dependencies = [ "serde", "serde_json", "thiserror", - "time 0.3.20", + "time", "tokio", ] @@ -629,7 +629,7 @@ dependencies = [ "bs58", "log", "public_key", - "rand 0.8.5", + "rand", "serde", "serde_json", "thiserror", @@ -712,7 +712,7 @@ dependencies = [ "k256", "p256", "p384", - "rand 0.8.5", + "rand", "serde", "serde-json-core", "sha2", @@ -753,27 +753,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "async-attributes" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - [[package]] name = "async-channel" version = "2.3.1" @@ -786,53 +765,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-executor" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand", - "futures-lite", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io", - "async-lock", - "blocking", - "futures-lite", - "once_cell", -] - -[[package]] -name = "async-io" -version = "2.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" -dependencies = [ - "async-lock", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite", - "parking", - "polling", - "rustix", - "slab", - "tracing", - "windows-sys 0.59.0", -] - [[package]] name = "async-lock" version = "3.4.0" @@ -844,33 +776,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-std" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" -dependencies = [ - "async-attributes", - "async-channel 1.9.0", - "async-global-executor", - "async-io", - "async-lock", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - [[package]] name = "async-stream" version = "0.3.6" @@ -893,12 +798,6 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - [[package]] name = "async-trait" version = "0.1.83" @@ -1132,19 +1031,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite", - "piper", -] - [[package]] name = "bls12_381" version = "0.8.0" @@ -1153,7 +1039,7 @@ checksum = "d7bc6d6292be3a19e6379786dac800f551e5865a5bb51ebbe3064ab80433f403" dependencies = [ "ff", "group", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1442,16 +1328,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - [[package]] name = "const-oid" version = "0.9.6" @@ -1471,7 +1347,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ "percent-encoding", - "time 0.3.20", + "time", "version_check", ] @@ -1598,7 +1474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1610,7 +1486,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "typenum", ] @@ -1688,7 +1564,7 @@ dependencies = [ "log", "num", "owning_ref", - "time 0.3.20", + "time", "unicode-segmentation", "unicode-width", "xi-unicode", @@ -1721,38 +1597,14 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "darling" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d706e75d87e35569db781a9b5e2416cff1236a47ed380831f959382ccd5f858" -dependencies = [ - "darling_core 0.10.2", - "darling_macro 0.10.2", -] - [[package]] name = "darling" version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ - "darling_core 0.20.10", - "darling_macro 0.20.10", -] - -[[package]] -name = "darling_core" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c960ae2da4de88a91b2d920c2a7233b400bc33cb28453a2987822d8392519b" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.9.3", - "syn 1.0.109", + "darling_core", + "darling_macro", ] [[package]] @@ -1769,24 +1621,13 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "darling_macro" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5a2f4ac4969822c62224815d069952656cadc7084fdca9751e6d959189b72" -dependencies = [ - "darling_core 0.10.2", - "quote", - "syn 1.0.109", -] - [[package]] name = "darling_macro" version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ - "darling_core 0.20.10", + "darling_core", "quote", "syn 2.0.79", ] @@ -1843,7 +1684,7 @@ version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7431fa049613920234f22c47fdc33e6cf3ee83067091ea4277a3f8c4587aae38" dependencies = [ - "darling 0.20.10", + "darling", "proc-macro2", "quote", "syn 2.0.79", @@ -2037,27 +1878,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "dirs" -version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.48.0", -] - [[package]] name = "display_as_json" version = "0.1.0" @@ -2144,7 +1964,7 @@ dependencies = [ "generic-array", "group", "hkdf", - "rand_core 0.6.4", + "rand_core", "sec1", "subtle", "zeroize", @@ -2194,7 +2014,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59c3b24c345d8c314966bdc1832f6c2635bfcce8e7cf363bd115987bba2ee242" dependencies = [ - "darling 0.20.10", + "darling", "proc-macro2", "quote", "syn 2.0.79", @@ -2327,28 +2147,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "failure" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" -dependencies = [ - "backtrace", - "failure_derive", -] - -[[package]] -name = "failure_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "synstructure", -] - [[package]] name = "fastrand" version = "2.1.1" @@ -2361,7 +2159,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2503,7 +2301,6 @@ dependencies = [ "futures-core", "futures-task", "futures-util", - "num_cpus", ] [[package]] @@ -2597,19 +2394,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - [[package]] name = "getrandom" version = "0.2.15" @@ -2618,7 +2402,7 @@ checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", ] [[package]] @@ -2648,7 +2432,7 @@ dependencies = [ "num-integer", "num-traits", "once_cell", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -2657,18 +2441,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "gloo-timers" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "goblin" version = "0.6.1" @@ -2687,7 +2459,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -3084,23 +2856,6 @@ dependencies = [ "hashbrown 0.15.0", ] -[[package]] -name = "indy-api-types" -version = "0.1.0" -dependencies = [ - "bs58", - "futures", - "libc", - "log", - "openssl", - "serde", - "serde_derive", - "serde_json", - "sqlx", - "thiserror", - "ursa", -] - [[package]] name = "indy-blssignatures" version = "0.1.0" @@ -3108,7 +2863,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ea4f5684155eae13be942fc61ff57706254adc0c6e99316c98805a0985200db" dependencies = [ "amcl", - "rand 0.8.5", + "rand", "serde", "sha2", "sha3", @@ -3124,7 +2879,7 @@ dependencies = [ "indy-data-types 0.7.0", "log", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -3143,7 +2898,7 @@ dependencies = [ "ed25519-dalek", "hex", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -3165,7 +2920,7 @@ dependencies = [ "ed25519-dalek", "hex", "once_cell", - "rand 0.8.5", + "rand", "regex", "serde", "serde_json", @@ -3184,28 +2939,7 @@ dependencies = [ "serde", "serde_json", "thiserror", - "time 0.3.20", -] - -[[package]] -name = "indy-utils" -version = "0.1.0" -dependencies = [ - "base64 0.21.7", - "dirs", - "failure", - "indy-api-types", - "lazy_static", - "libc", - "log", - "openssl", - "rand 0.8.5", - "rmp-serde", - "serde", - "serde_derive", - "serde_json", - "sodiumoxide", - "zeroize", + "time", ] [[package]] @@ -3228,7 +2962,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-utils", - "rand 0.8.5", + "rand", "regex", "rmp-serde", "serde", @@ -3237,7 +2971,7 @@ dependencies = [ "sha3", "sled", "thiserror", - "time 0.3.20", + "time", "url", "zmq", ] @@ -3253,28 +2987,6 @@ dependencies = [ "url", ] -[[package]] -name = "indy-wallet" -version = "0.1.0" -dependencies = [ - "async-std", - "async-trait", - "bs58", - "byteorder", - "futures", - "indy-api-types", - "indy-utils", - "libc", - "log", - "lru", - "rmp-serde", - "serde", - "serde_derive", - "serde_json", - "sqlx", - "zeroize", -] - [[package]] name = "inout" version = "0.1.3" @@ -3294,12 +3006,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "int_traits" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b33c9a5c599d67d051c4dc25eb1b6b4ef715d1763c20c85c688717a1734f204e" - [[package]] name = "ipnet" version = "2.10.0" @@ -3386,15 +3092,6 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "language-tags" version = "0.3.2" @@ -3422,26 +3119,6 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.6.0", - "libc", -] - -[[package]] -name = "libsodium-sys" -version = "0.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcbd1beeed8d44caa8a669ebaa697c313976e242c03cc9fb23d88bf1656f5542" -dependencies = [ - "libc", - "pkg-config", -] - [[package]] name = "libsqlite3-sys" version = "0.27.0" @@ -3453,32 +3130,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "libvdrtools" -version = "0.8.6" -dependencies = [ - "async-std", - "async-trait", - "bs58", - "dirs", - "failure", - "futures", - "hex", - "indy-api-types", - "indy-utils", - "indy-wallet", - "lazy_static", - "libc", - "log", - "log-derive", - "regex", - "serde", - "serde_derive", - "serde_json", - "ursa", - "zeroize", -] - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3517,21 +3168,6 @@ name = "log" version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -dependencies = [ - "value-bag", -] - -[[package]] -name = "log-derive" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a42526bb432bcd1b43571d5f163984effa25409a29f1a3242a54d0577d55bcf" -dependencies = [ - "darling 0.10.2", - "proc-macro2", - "quote", - "syn 1.0.109", -] [[package]] name = "lru" @@ -3627,7 +3263,7 @@ dependencies = [ name = "messages_macros" version = "0.1.0" dependencies = [ - "darling 0.20.10", + "darling", "proc-macro2", "quote", "shared", @@ -3684,7 +3320,7 @@ checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.48.0", ] @@ -3697,7 +3333,7 @@ dependencies = [ "hermit-abi 0.3.9", "libc", "log", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.52.0", ] @@ -3797,7 +3433,7 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -3812,7 +3448,7 @@ dependencies = [ "num-integer", "num-iter", "num-traits", - "rand 0.8.5", + "rand", "smallvec", "zeroize", ] @@ -3866,16 +3502,6 @@ dependencies = [ "libm", ] -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi 0.3.9", - "libc", -] - [[package]] name = "num_threads" version = "0.1.7" @@ -3953,12 +3579,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - [[package]] name = "os_str_bytes" version = "6.6.1" @@ -4059,7 +3679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -4124,17 +3744,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "piper" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" -dependencies = [ - "atomic-waker", - "fastrand", - "futures-io", -] - [[package]] name = "pkcs1" version = "0.7.5" @@ -4168,21 +3777,6 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" -[[package]] -name = "polling" -version = "3.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.4.0", - "pin-project-lite", - "rustix", - "tracing", - "windows-sys 0.59.0", -] - [[package]] name = "poly1305" version = "0.8.0" @@ -4328,19 +3922,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -4348,18 +3929,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -4369,16 +3940,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -4387,16 +3949,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", + "getrandom", ] [[package]] @@ -4417,17 +3970,6 @@ dependencies = [ "bitflags 2.6.0", ] -[[package]] -name = "redox_users" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" -dependencies = [ - "getrandom 0.2.15", - "libredox", - "thiserror", -] - [[package]] name = "regex" version = "1.11.0" @@ -4562,7 +4104,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom", "libc", "spin", "untrusted", @@ -4604,7 +4146,7 @@ dependencies = [ "num-traits", "pkcs1", "pkcs8", - "rand_core 0.6.4", + "rand_core", "signature", "spki", "subtle", @@ -4991,7 +4533,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -5048,17 +4590,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "sodiumoxide" -version = "0.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5cb2f14f9a51352ad65e59257a0a9459d5a36a3615f3d53a974c82fdaaa00a" -dependencies = [ - "libc", - "libsodium-sys", - "serde", -] - [[package]] name = "spin" version = "0.9.8" @@ -5213,7 +4744,7 @@ dependencies = [ "memchr", "once_cell", "percent-encoding", - "rand 0.8.5", + "rand", "rsa", "serde", "sha1", @@ -5253,7 +4784,7 @@ dependencies = [ "md-5", "memchr", "once_cell", - "rand 0.8.5", + "rand", "serde", "serde_json", "sha2", @@ -5312,12 +4843,6 @@ dependencies = [ "unicode-properties", ] -[[package]] -name = "strsim" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" - [[package]] name = "strsim" version = "0.10.0" @@ -5392,18 +4917,6 @@ dependencies = [ "futures-core", ] -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - [[package]] name = "system-configuration" version = "0.5.1" @@ -5507,7 +5020,7 @@ dependencies = [ "lazy_static", "log", "public_key", - "rand 0.8.5", + "rand", "serde_json", "thiserror", "uuid", @@ -5539,17 +5052,6 @@ dependencies = [ "syn 2.0.79", ] -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - [[package]] name = "time" version = "0.3.20" @@ -5865,12 +5367,6 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - [[package]] name = "unicode_categories" version = "0.1.1" @@ -6072,29 +5568,6 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" -[[package]] -name = "ursa" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8760a62e18e4d3e3f599e15c09a9f9567fd9d4a90594d45166162be8d232e63b" -dependencies = [ - "amcl", - "console_error_panic_hook", - "failure", - "hex", - "int_traits", - "js-sys", - "lazy_static", - "log", - "openssl", - "rand 0.7.3", - "serde", - "serde_json", - "time 0.1.45", - "wasm-bindgen", - "zeroize", -] - [[package]] name = "utf8parse" version = "0.2.2" @@ -6107,16 +5580,10 @@ version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" dependencies = [ - "getrandom 0.2.15", + "getrandom", "serde", ] -[[package]] -name = "value-bag" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" - [[package]] name = "vcpkg" version = "0.2.15" @@ -6138,18 +5605,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6170,8 +5625,6 @@ checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", "once_cell", - "serde", - "serde_json", "wasm-bindgen-macro", ] @@ -6510,7 +5963,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" dependencies = [ "curve25519-dalek", - "rand_core 0.6.4", + "rand_core", "zeroize", ] diff --git a/Cargo.toml b/Cargo.toml index eae8408f3f..715a48e0e6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,6 @@ resolver = "2" members = [ "aries/aries_vcx", - "aries/misc/legacy/libvdrtools", "aries/messages_macros", "aries/messages", "aries/misc/shared", diff --git a/aries/agents/mediator/Cargo.toml b/aries/agents/mediator/Cargo.toml index 92aa550752..7358766c3a 100644 --- a/aries/agents/mediator/Cargo.toml +++ b/aries/agents/mediator/Cargo.toml @@ -27,7 +27,7 @@ messages = { path = "../../messages" } reqwest = { version = "0.12.5", features = ["json"] } serde = "1.0.188" serde_json = "1.0.106" -sqlx = "0.7" +sqlx = { version = "0.7", features = ["mysql"] } thiserror = "1.0.49" tokio = { version = "1", features = ["rt-multi-thread", "macros"] } tower-http = { version = "0.5.2", features = ["catch-panic"] } diff --git a/aries/misc/legacy/libvdrtools/Cargo.toml b/aries/misc/legacy/libvdrtools/Cargo.toml deleted file mode 100644 index ee932f628e..0000000000 --- a/aries/misc/legacy/libvdrtools/Cargo.toml +++ /dev/null @@ -1,50 +0,0 @@ -[package] -name = "libvdrtools" -version = "0.8.6" -authors = ["Evernym"] -edition = "2018" - -description = "A library that facilitates building standards compliant and interoperable solutions for self-sovereign identity by abstracting the operations for interacting with a verifiable data registry as defined by Hyperledger Aries." -license = "Apache-2.0" - -[lib] -name = "vdrtools" -path = "src/lib.rs" - -[features] -default = ["base58_bs58", "pair_amcl", "local_nodes_pool", "revocation_tests"] -base58_bs58 = ["bs58"] -pair_amcl = ["ursa"] -local_nodes_pool = [] -revocation_tests = [] -force_full_interaction_tests = [] -sodium_static = [] -only_high_cases = [] -mysql_storage = [] - -# Causes the build to fail on all warnings -fatal_warnings = [] - -[dependencies] -async-std = { version = "1", features = ["attributes"] } -async-trait = "0.1" -failure = { version = "0.1", features = ["backtrace"] } -hex = "0.4" -libc = "0.2" -log = "0.4" -log-derive = "0.4" -bs58 = { version = "0.5", optional = true } -serde = "1" -serde_json = "1" -serde_derive = "1" -lazy_static = "1" -zeroize = "1" -regex = "1" -indy-api-types = { path = "indy-api-types" } -indy-utils = { path = "indy-utils" } -indy-wallet = { path = "indy-wallet" } -futures = { version = "0.3", default-features = false, features = [ "executor", "alloc", "thread-pool" ] } -ursa = { version = "0.3.7", optional = true, default-features = false, features = ["cl_native"] } - -[dev-dependencies] -dirs = "5" diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml b/aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml deleted file mode 100644 index 1cde54b95c..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "indy-api-types" -version = "0.1.0" -authors = ["Hyperledger Indy Contributors "] -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -thiserror = "1" -futures = { version = "0.3", default-features = false, features = ["std"] } -log = { version = "0.4", features = ["std"] } -libc = "0.2" -serde = "1" -serde_json = "1" -serde_derive = "1" -openssl = { version = "0.10" } -bs58 = "0.5" -sqlx = { version = "0.7", features = [ "sqlite", "mysql", "runtime-tokio-rustls" ] } -ursa = { version = "0.3.7", default-features = false, features = ["wasm"] } \ No newline at end of file diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs deleted file mode 100644 index 2fff25cab2..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod wallet; diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs deleted file mode 100644 index 7d1fa5271f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/domain/wallet/mod.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::{collections::HashMap, fmt}; - -use serde_json::value::Value; - -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct Config { - pub id: String, - pub storage_type: Option, - pub storage_config: Option, - pub cache: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum CachingAlgorithm { - #[serde(rename = "lru")] - LRU, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct CacheConfig { - #[serde(default = "default_cache_size")] - pub size: usize, - pub entities: Vec, - #[serde(default = "default_caching_algorithm")] - pub algorithm: CachingAlgorithm, -} - -pub const DEFAULT_CACHE_SIZE: usize = 10; - -fn default_cache_size() -> usize { - DEFAULT_CACHE_SIZE -} - -fn default_caching_algorithm() -> CachingAlgorithm { - CachingAlgorithm::LRU -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Credentials { - pub key: String, - #[serde(default = "default_key_derivation_method")] - pub key_derivation_method: KeyDerivationMethod, - - pub rekey: Option, - #[serde(default = "default_key_derivation_method")] - pub rekey_derivation_method: KeyDerivationMethod, - - pub storage_credentials: Option, -} - -#[allow(non_camel_case_types)] -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum KeyDerivationMethod { - RAW, - ARGON2I_MOD, - ARGON2I_INT, -} - -pub fn default_key_derivation_method() -> KeyDerivationMethod { - KeyDerivationMethod::ARGON2I_MOD -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct ExportConfig { - pub key: String, - pub path: String, - #[serde(default = "default_key_derivation_method")] - pub key_derivation_method: KeyDerivationMethod, -} - -#[derive(Debug, Deserialize)] -pub struct KeyConfig { - pub seed: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct IndyRecord { - // Wallet record type - #[serde(rename = "type")] - pub type_: String, - // Wallet record id - pub id: String, - // Wallet record value - pub value: String, - // Wallet record tags - pub tags: HashMap, -} - -impl fmt::Debug for IndyRecord { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Record") - .field("type_", &self.type_) - .field("id", &self.id) - // Censor the value - .field("value", &"******".to_string()) - .field("tags", &self.tags) - .finish() - } -} - -pub type Tags = HashMap; diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs deleted file mode 100644 index cfa8677fe0..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/errors.rs +++ /dev/null @@ -1,639 +0,0 @@ -use std::{ - cell, - cell::RefCell, - error::Error, - ffi::{CString, NulError}, - fmt, io, ptr, - sync::Arc, -}; - -use libc::c_char; -use log; -#[cfg(feature = "casting_errors_wallet")] -use sqlx; -use thiserror::Error as ThisError; -use ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; -#[cfg(feature = "casting_errors_misc")] -use ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; - -use crate::ErrorCode; - -pub mod prelude { - pub use super::{ - err_msg, get_current_error_c_json, set_current_error, IndyError, IndyErrorExt, - IndyErrorKind, IndyResult, IndyResultExt, - }; -} - -#[derive(Copy, Clone, Eq, PartialEq, Debug, ThisError)] -pub enum IndyErrorKind { - // Common errors - #[error("Invalid library state")] - InvalidState, - #[error("Invalid structure")] - InvalidStructure, - #[error("Invalid parameter {0}")] - InvalidParam(u32), - #[error("IO error")] - IOError, - // Anoncreds errors - #[error("Duplicated master secret")] - MasterSecretDuplicateName, - #[error("Proof rejected")] - ProofRejected, - #[error("Revocation registry is full")] - RevocationRegistryFull, - #[error("Invalid revocation id")] - InvalidUserRevocId, - #[error("Credential revoked")] - CredentialRevoked, - #[error("Credential definition already exists")] - CredDefAlreadyExists, - // Ledger errors - #[error("No consensus")] - NoConsensus, - #[error("Invalid transaction")] - InvalidTransaction, - #[error("Item not found on ledger")] - LedgerItemNotFound, - // Pool errors - #[error("Pool not created")] - PoolNotCreated, - #[error("Invalid pool handle")] - InvalidPoolHandle, - #[error("Pool work terminated")] - PoolTerminated, - #[error("Pool timeout")] - PoolTimeout, - #[error("Pool ledger config already exists")] - PoolConfigAlreadyExists, - #[error("Pool Genesis Transactions are not compatible with Protocol version")] - PoolIncompatibleProtocolVersion, - // Crypto errors - #[error("Unknown crypto")] - UnknownCrypto, - // Wallet errors - #[error("Invalid wallet handle was passed")] - InvalidWalletHandle, - #[error("Unknown wallet storage type")] - UnknownWalletStorageType, - #[error("Wallet storage type already registered")] - WalletStorageTypeAlreadyRegistered, - #[error("Wallet with this name already exists")] - WalletAlreadyExists, - #[error("Wallet not found")] - WalletNotFound, - #[error("Wallet already opened")] - WalletAlreadyOpened, - #[error("Wallet security error")] - WalletAccessFailed, - #[error("Wallet encoding error")] - WalletEncodingError, - #[error("Wallet storage error occurred")] - WalletStorageError, - #[error("Wallet encryption error")] - WalletEncryptionError, - #[error("Wallet item not found")] - WalletItemNotFound, - #[error("Wallet item already exists")] - WalletItemAlreadyExists, - #[error("Wallet query error")] - WalletQueryError, - // DID errors - #[error("DID already exists")] - DIDAlreadyExists, - // Payments errors - #[error("Unknown payment method type")] - UnknownPaymentMethodType, - #[error("No method were scraped from inputs/outputs or more than one were scraped")] - IncompatiblePaymentMethods, - #[error("Payment insufficient funds on inputs")] - PaymentInsufficientFunds, - #[error("Payment Source does not exist")] - PaymentSourceDoesNotExist, - #[error("Payment operation not supported")] - PaymentOperationNotSupported, - #[error("Payment extra funds")] - PaymentExtraFunds, - #[error("The transaction is not allowed to a requester")] - TransactionNotAllowed, - #[error("Query account does not exist")] - QueryAccountDoesNotExist, - - #[error("Invalid VDR handle")] - InvalidVDRHandle, - #[error("Failed to get ledger for VDR Namespace")] - InvalidVDRNamespace, - #[error("Registered Ledger type does not match to the network of id")] - IncompatibleLedger, -} - -#[derive(Debug, Clone, ThisError)] -pub struct IndyError { - // FIXME: We have to use Arc as for now we clone messages in pool service - // FIXME: In theory we can avoid sync by refactoring of pool service - #[source] - kind: IndyErrorKind, - msg: Arc, -} - -impl fmt::Display for IndyError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - writeln!(f, "Error: {}", self.kind())?; - - if let Some(src) = self.kind.source() { - writeln!(f, " Caused by: {}", src)?; - } - - Ok(()) - } -} - -impl IndyError { - pub fn from_msg(kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static, - { - IndyError { - kind, - msg: Arc::new(msg.to_string()), - } - } - - pub fn kind(&self) -> IndyErrorKind { - self.kind - } - - pub fn extend(self, msg: D) -> IndyError - where - D: fmt::Display + fmt::Debug + Send + Sync + 'static, - { - IndyError { - kind: self.kind, - msg: Arc::new(format!("{}\n Caused by: {msg}", self.msg)), - } - } - - pub fn map(self, kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static, - { - IndyError { - kind, - msg: Arc::new(format!("{}\n Caused by: {msg}", self.msg)), - } - } -} - -pub fn err_msg(kind: IndyErrorKind, msg: D) -> IndyError -where - D: fmt::Display + fmt::Debug + Send + Sync + 'static, -{ - IndyError::from_msg(kind, msg) -} - -impl From for IndyError { - fn from(kind: IndyErrorKind) -> IndyError { - IndyError { - kind, - msg: Arc::new(String::new()), - } - } -} - -impl From for IndyError { - fn from(err: io::Error) -> Self { - IndyError { - kind: IndyErrorKind::IOError, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: cell::BorrowError) -> Self { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: cell::BorrowMutError) -> Self { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: futures::channel::oneshot::Canceled) -> Self { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: log::SetLoggerError) -> IndyError { - IndyError { - kind: IndyErrorKind::InvalidState, - msg: Arc::new(err.to_string()), - } - } -} - -impl From for IndyError { - fn from(err: UrsaCryptoError) -> Self { - match err.kind() { - UrsaCryptoErrorKind::InvalidState => { - IndyError::from_msg(IndyErrorKind::InvalidState, err) - } - UrsaCryptoErrorKind::InvalidStructure => { - IndyError::from_msg(IndyErrorKind::InvalidStructure, err) - } - UrsaCryptoErrorKind::IOError => IndyError::from_msg(IndyErrorKind::IOError, err), - UrsaCryptoErrorKind::InvalidRevocationAccumulatorIndex => { - IndyError::from_msg(IndyErrorKind::InvalidUserRevocId, err) - } - UrsaCryptoErrorKind::RevocationAccumulatorIsFull => { - IndyError::from_msg(IndyErrorKind::RevocationRegistryFull, err) - } - UrsaCryptoErrorKind::ProofRejected => { - IndyError::from_msg(IndyErrorKind::ProofRejected, err) - } - UrsaCryptoErrorKind::CredentialRevoked => { - IndyError::from_msg(IndyErrorKind::CredentialRevoked, err) - } - UrsaCryptoErrorKind::InvalidParam(_) => { - IndyError::from_msg(IndyErrorKind::InvalidStructure, err) - } - } - } -} - -impl From for IndyError { - fn from(_err: bs58::decode::Error) -> Self { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "The base58 input contained a character not part of the base58 alphabet", - ) - } -} - -impl From for IndyError { - fn from(err: openssl::error::ErrorStack) -> IndyError { - // TODO: FIXME: Analyze ErrorStack and split invalid structure errors from other errors - err.to_indy(IndyErrorKind::InvalidState, "Internal OpenSSL error") - } -} - -impl From for IndyError { - fn from(err: sqlx::Error) -> IndyError { - match &err { - sqlx::Error::RowNotFound => { - err.to_indy(IndyErrorKind::WalletItemNotFound, "Item not found") - } - sqlx::Error::Database(e) => match e.code() { - Some(code) => match code.as_ref() { - // Constraint unuque - sqlite (2067) - "2067" => err.to_indy( - IndyErrorKind::WalletItemAlreadyExists, - "Wallet item already exists", - ), - // Integrity constraint violation (23000) - "23000" => err.to_indy( - IndyErrorKind::WalletItemAlreadyExists, - "Wallet item already exists", - ), - _ => err.to_indy(IndyErrorKind::InvalidState, "Unexpected database error"), - }, - None => err.to_indy(IndyErrorKind::InvalidState, "Unexpected database error"), - }, - sqlx::Error::Io(_) => err.to_indy( - IndyErrorKind::IOError, - "IO error during access sqlite database", - ), - sqlx::Error::Tls(_) => err.to_indy( - IndyErrorKind::IOError, - "IO error during access sqlite database", - ), - _ => err.to_indy(IndyErrorKind::InvalidState, "Unexpected database error"), - } - } -} - -impl From for IndyError { - fn from(err: NulError) -> IndyError { - err.to_indy( - IndyErrorKind::InvalidState, - "Null symbols in payments strings", - ) // TODO: Review kind - } -} - -impl From> for ErrorCode { - fn from(r: Result) -> ErrorCode { - match r { - Ok(_) => ErrorCode::Success, - Err(err) => err.into(), - } - } -} - -impl From for ErrorCode { - fn from(err: IndyError) -> ErrorCode { - set_current_error(&err); - err.kind().into() - } -} - -impl From for ErrorCode { - fn from(code: IndyErrorKind) -> ErrorCode { - match code { - IndyErrorKind::InvalidState => ErrorCode::CommonInvalidState, - IndyErrorKind::InvalidStructure => ErrorCode::CommonInvalidStructure, - IndyErrorKind::InvalidParam(num) => match num { - 1 => ErrorCode::CommonInvalidParam1, - 2 => ErrorCode::CommonInvalidParam2, - 3 => ErrorCode::CommonInvalidParam3, - 4 => ErrorCode::CommonInvalidParam4, - 5 => ErrorCode::CommonInvalidParam5, - 6 => ErrorCode::CommonInvalidParam6, - 7 => ErrorCode::CommonInvalidParam7, - 8 => ErrorCode::CommonInvalidParam8, - 9 => ErrorCode::CommonInvalidParam9, - 10 => ErrorCode::CommonInvalidParam10, - 11 => ErrorCode::CommonInvalidParam11, - 12 => ErrorCode::CommonInvalidParam12, - 13 => ErrorCode::CommonInvalidParam13, - 14 => ErrorCode::CommonInvalidParam14, - 15 => ErrorCode::CommonInvalidParam15, - 16 => ErrorCode::CommonInvalidParam16, - 17 => ErrorCode::CommonInvalidParam17, - 18 => ErrorCode::CommonInvalidParam18, - 19 => ErrorCode::CommonInvalidParam19, - 20 => ErrorCode::CommonInvalidParam20, - 21 => ErrorCode::CommonInvalidParam21, - 22 => ErrorCode::CommonInvalidParam22, - 23 => ErrorCode::CommonInvalidParam23, - 24 => ErrorCode::CommonInvalidParam24, - 25 => ErrorCode::CommonInvalidParam25, - 26 => ErrorCode::CommonInvalidParam26, - 27 => ErrorCode::CommonInvalidParam27, - _ => ErrorCode::CommonInvalidState, - }, - IndyErrorKind::IOError => ErrorCode::CommonIOError, - IndyErrorKind::MasterSecretDuplicateName => { - ErrorCode::AnoncredsMasterSecretDuplicateNameError - } - IndyErrorKind::ProofRejected => ErrorCode::AnoncredsProofRejected, - IndyErrorKind::RevocationRegistryFull => { - ErrorCode::AnoncredsRevocationRegistryFullError - } - IndyErrorKind::InvalidUserRevocId => ErrorCode::AnoncredsInvalidUserRevocId, - IndyErrorKind::CredentialRevoked => ErrorCode::AnoncredsCredentialRevoked, - IndyErrorKind::CredDefAlreadyExists => ErrorCode::AnoncredsCredDefAlreadyExistsError, - IndyErrorKind::NoConsensus => ErrorCode::LedgerNoConsensusError, - IndyErrorKind::InvalidTransaction => ErrorCode::LedgerInvalidTransaction, - IndyErrorKind::LedgerItemNotFound => ErrorCode::LedgerNotFound, - IndyErrorKind::PoolNotCreated => ErrorCode::PoolLedgerNotCreatedError, - IndyErrorKind::InvalidPoolHandle => ErrorCode::PoolLedgerInvalidPoolHandle, - IndyErrorKind::PoolTerminated => ErrorCode::PoolLedgerTerminated, - IndyErrorKind::PoolTimeout => ErrorCode::PoolLedgerTimeout, - IndyErrorKind::PoolConfigAlreadyExists => ErrorCode::PoolLedgerConfigAlreadyExistsError, - IndyErrorKind::PoolIncompatibleProtocolVersion => { - ErrorCode::PoolIncompatibleProtocolVersion - } - IndyErrorKind::UnknownCrypto => ErrorCode::UnknownCryptoTypeError, - IndyErrorKind::InvalidWalletHandle => ErrorCode::WalletInvalidHandle, - IndyErrorKind::UnknownWalletStorageType => ErrorCode::WalletUnknownTypeError, - IndyErrorKind::WalletStorageTypeAlreadyRegistered => { - ErrorCode::WalletTypeAlreadyRegisteredError - } - IndyErrorKind::WalletAlreadyExists => ErrorCode::WalletAlreadyExistsError, - IndyErrorKind::WalletNotFound => ErrorCode::WalletNotFoundError, - IndyErrorKind::WalletAlreadyOpened => ErrorCode::WalletAlreadyOpenedError, - IndyErrorKind::WalletAccessFailed => ErrorCode::WalletAccessFailed, - IndyErrorKind::WalletEncodingError => ErrorCode::WalletDecodingError, - IndyErrorKind::WalletStorageError => ErrorCode::WalletStorageError, - IndyErrorKind::WalletEncryptionError => ErrorCode::WalletEncryptionError, - IndyErrorKind::WalletItemNotFound => ErrorCode::WalletItemNotFound, - IndyErrorKind::WalletItemAlreadyExists => ErrorCode::WalletItemAlreadyExists, - IndyErrorKind::WalletQueryError => ErrorCode::WalletQueryError, - IndyErrorKind::DIDAlreadyExists => ErrorCode::DidAlreadyExistsError, - IndyErrorKind::UnknownPaymentMethodType => ErrorCode::PaymentUnknownMethodError, - IndyErrorKind::IncompatiblePaymentMethods => ErrorCode::PaymentIncompatibleMethodsError, - IndyErrorKind::PaymentInsufficientFunds => ErrorCode::PaymentInsufficientFundsError, - IndyErrorKind::PaymentSourceDoesNotExist => ErrorCode::PaymentSourceDoesNotExistError, - IndyErrorKind::PaymentOperationNotSupported => { - ErrorCode::PaymentOperationNotSupportedError - } - IndyErrorKind::PaymentExtraFunds => ErrorCode::PaymentExtraFundsError, - IndyErrorKind::TransactionNotAllowed => ErrorCode::TransactionNotAllowedError, - IndyErrorKind::QueryAccountDoesNotExist => ErrorCode::QueryAccountDoesNotexistError, - IndyErrorKind::InvalidVDRHandle => ErrorCode::InvalidVDRHandle, - IndyErrorKind::InvalidVDRNamespace => ErrorCode::InvalidVDRNamespace, - IndyErrorKind::IncompatibleLedger => ErrorCode::IncompatibleLedger, - } - } -} - -impl From for IndyResult<()> { - fn from(err: ErrorCode) -> IndyResult<()> { - if err == ErrorCode::Success { - Ok(()) - } else { - Err(err.into()) - } - } -} - -impl From for IndyError { - fn from(err: ErrorCode) -> IndyError { - err_msg(err.into(), "Plugin returned error".to_string()) - } -} - -impl From for IndyErrorKind { - fn from(err: ErrorCode) -> IndyErrorKind { - match err { - ErrorCode::CommonInvalidState => IndyErrorKind::InvalidState, - ErrorCode::CommonInvalidStructure => IndyErrorKind::InvalidStructure, - ErrorCode::CommonInvalidParam1 => IndyErrorKind::InvalidParam(1), - ErrorCode::CommonInvalidParam2 => IndyErrorKind::InvalidParam(2), - ErrorCode::CommonInvalidParam3 => IndyErrorKind::InvalidParam(3), - ErrorCode::CommonInvalidParam4 => IndyErrorKind::InvalidParam(4), - ErrorCode::CommonInvalidParam5 => IndyErrorKind::InvalidParam(5), - ErrorCode::CommonInvalidParam6 => IndyErrorKind::InvalidParam(6), - ErrorCode::CommonInvalidParam7 => IndyErrorKind::InvalidParam(7), - ErrorCode::CommonInvalidParam8 => IndyErrorKind::InvalidParam(8), - ErrorCode::CommonInvalidParam9 => IndyErrorKind::InvalidParam(9), - ErrorCode::CommonInvalidParam10 => IndyErrorKind::InvalidParam(10), - ErrorCode::CommonInvalidParam11 => IndyErrorKind::InvalidParam(11), - ErrorCode::CommonInvalidParam12 => IndyErrorKind::InvalidParam(12), - ErrorCode::CommonInvalidParam13 => IndyErrorKind::InvalidParam(13), - ErrorCode::CommonInvalidParam14 => IndyErrorKind::InvalidParam(14), - ErrorCode::CommonInvalidParam15 => IndyErrorKind::InvalidParam(15), - ErrorCode::CommonInvalidParam16 => IndyErrorKind::InvalidParam(16), - ErrorCode::CommonInvalidParam17 => IndyErrorKind::InvalidParam(17), - ErrorCode::CommonInvalidParam18 => IndyErrorKind::InvalidParam(18), - ErrorCode::CommonInvalidParam19 => IndyErrorKind::InvalidParam(19), - ErrorCode::CommonInvalidParam20 => IndyErrorKind::InvalidParam(20), - ErrorCode::CommonInvalidParam21 => IndyErrorKind::InvalidParam(21), - ErrorCode::CommonInvalidParam22 => IndyErrorKind::InvalidParam(22), - ErrorCode::CommonInvalidParam23 => IndyErrorKind::InvalidParam(23), - ErrorCode::CommonInvalidParam24 => IndyErrorKind::InvalidParam(24), - ErrorCode::CommonInvalidParam25 => IndyErrorKind::InvalidParam(25), - ErrorCode::CommonInvalidParam26 => IndyErrorKind::InvalidParam(26), - ErrorCode::CommonInvalidParam27 => IndyErrorKind::InvalidParam(27), - ErrorCode::CommonIOError => IndyErrorKind::IOError, - ErrorCode::AnoncredsMasterSecretDuplicateNameError => { - IndyErrorKind::MasterSecretDuplicateName - } - ErrorCode::AnoncredsProofRejected => IndyErrorKind::ProofRejected, - ErrorCode::AnoncredsRevocationRegistryFullError => { - IndyErrorKind::RevocationRegistryFull - } - ErrorCode::AnoncredsInvalidUserRevocId => IndyErrorKind::InvalidUserRevocId, - ErrorCode::AnoncredsCredentialRevoked => IndyErrorKind::CredentialRevoked, - ErrorCode::AnoncredsCredDefAlreadyExistsError => IndyErrorKind::CredDefAlreadyExists, - ErrorCode::LedgerNoConsensusError => IndyErrorKind::NoConsensus, - ErrorCode::LedgerInvalidTransaction => IndyErrorKind::InvalidTransaction, - ErrorCode::LedgerNotFound => IndyErrorKind::LedgerItemNotFound, - ErrorCode::PoolLedgerNotCreatedError => IndyErrorKind::PoolNotCreated, - ErrorCode::PoolLedgerInvalidPoolHandle => IndyErrorKind::InvalidPoolHandle, - ErrorCode::PoolLedgerTerminated => IndyErrorKind::PoolTerminated, - ErrorCode::PoolLedgerTimeout => IndyErrorKind::PoolTimeout, - ErrorCode::PoolLedgerConfigAlreadyExistsError => IndyErrorKind::PoolConfigAlreadyExists, - ErrorCode::PoolIncompatibleProtocolVersion => { - IndyErrorKind::PoolIncompatibleProtocolVersion - } - ErrorCode::UnknownCryptoTypeError => IndyErrorKind::UnknownCrypto, - ErrorCode::WalletInvalidHandle => IndyErrorKind::InvalidWalletHandle, - ErrorCode::WalletUnknownTypeError => IndyErrorKind::UnknownWalletStorageType, - ErrorCode::WalletTypeAlreadyRegisteredError => { - IndyErrorKind::WalletStorageTypeAlreadyRegistered - } - ErrorCode::WalletAlreadyExistsError => IndyErrorKind::WalletAlreadyExists, - ErrorCode::WalletNotFoundError => IndyErrorKind::WalletNotFound, - ErrorCode::WalletAlreadyOpenedError => IndyErrorKind::WalletAlreadyOpened, - ErrorCode::WalletAccessFailed => IndyErrorKind::WalletAccessFailed, - ErrorCode::WalletDecodingError => IndyErrorKind::WalletEncodingError, - ErrorCode::WalletStorageError => IndyErrorKind::WalletStorageError, - ErrorCode::WalletEncryptionError => IndyErrorKind::WalletEncryptionError, - ErrorCode::WalletItemNotFound => IndyErrorKind::WalletItemNotFound, - ErrorCode::WalletItemAlreadyExists => IndyErrorKind::WalletItemAlreadyExists, - ErrorCode::WalletQueryError => IndyErrorKind::WalletQueryError, - ErrorCode::DidAlreadyExistsError => IndyErrorKind::DIDAlreadyExists, - ErrorCode::PaymentUnknownMethodError => IndyErrorKind::UnknownPaymentMethodType, - ErrorCode::PaymentIncompatibleMethodsError => IndyErrorKind::IncompatiblePaymentMethods, - ErrorCode::PaymentInsufficientFundsError => IndyErrorKind::PaymentInsufficientFunds, - ErrorCode::PaymentSourceDoesNotExistError => IndyErrorKind::PaymentSourceDoesNotExist, - ErrorCode::PaymentOperationNotSupportedError => { - IndyErrorKind::PaymentOperationNotSupported - } - ErrorCode::PaymentExtraFundsError => IndyErrorKind::PaymentExtraFunds, - ErrorCode::TransactionNotAllowedError => IndyErrorKind::TransactionNotAllowed, - ErrorCode::InvalidVDRHandle => IndyErrorKind::InvalidVDRHandle, - ErrorCode::InvalidVDRNamespace => IndyErrorKind::InvalidVDRNamespace, - ErrorCode::IncompatibleLedger => IndyErrorKind::IncompatibleLedger, - _code => IndyErrorKind::InvalidState, - } - } -} - -pub type IndyResult = Result; - -/// Extension methods for `Result`. -pub trait IndyResultExt { - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyResult - where - D: fmt::Display + Send + Sync + 'static; -} - -impl IndyResultExt for Result -where - E: fmt::Display, -{ - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyResult - where - D: fmt::Display + Send + Sync + 'static, - { - self.map_err(|err| err.to_indy(kind, msg)) - } -} - -/// Extension methods for `Error`. -pub trait IndyErrorExt { - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static; -} - -impl IndyErrorExt for E -where - E: fmt::Display, -{ - fn to_indy(self, kind: IndyErrorKind, msg: D) -> IndyError - where - D: fmt::Display + Send + Sync + 'static, - { - IndyError::from_msg(kind, format!("{msg}\n Caused by: {self}")) - } -} - -thread_local! { - pub static CURRENT_ERROR_C_JSON: RefCell> = const { RefCell::new(None) }; -} - -pub fn set_current_error(err: &IndyError) { - CURRENT_ERROR_C_JSON - .try_with(|error| { - let error_json = json!({ - "message": err.to_string(), - "backtrace": err.source().map(|bt| bt.to_string()) - }) - .to_string(); - error.replace(Some(string_to_cstring(error_json))); - }) - .map_err(|err| error!("Thread local variable access failed with: {:?}", err)) - .ok(); -} - -/// Get details for last occurred error. -/// -/// This function should be called in two places to handle both cases of error occurrence: -/// 1) synchronous - in the same application thread -/// 2) asynchronous - inside of function callback -/// -/// NOTE: Error is stored until the next one occurs in the same execution thread or until -/// asynchronous callback finished. Returning pointer has the same lifetime. -/// -/// #Params -/// * `error_json_p` - Reference that will contain error details (if any error has occurred before) -/// in the format: -/// { -/// "backtrace": Optional - error backtrace. -/// Collecting of backtrace can be enabled by: -/// 1) setting environment variable `RUST_BACKTRACE=1` -/// 2) calling `indy_set_runtime_config` API function with `collect_backtrace: true` -/// "message": str - human-readable error description -/// } -pub fn get_current_error_c_json() -> *const c_char { - let mut value = ptr::null(); - - CURRENT_ERROR_C_JSON - .try_with(|err| err.borrow().as_ref().map(|err| value = err.as_ptr())) - .map_err(|err| error!("Thread local variable access failed with: {:?}", err)) - .ok(); - - value -} - -pub fn string_to_cstring(s: String) -> CString { - CString::new(s).unwrap() -} diff --git a/aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs b/aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs deleted file mode 100644 index 8e9f98c1b0..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-api-types/src/lib.rs +++ /dev/null @@ -1,280 +0,0 @@ -#[macro_use] -extern crate log; - -extern crate serde; - -#[macro_use] -extern crate serde_derive; - -#[macro_use] -extern crate serde_json; - -pub type IndyHandle = i32; - -#[repr(transparent)] -#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone, Serialize, Deserialize)] -pub struct WalletHandle(pub i32); -pub const INVALID_WALLET_HANDLE: WalletHandle = WalletHandle(0); - -impl From for WalletHandle { - fn from(value: i32) -> Self { - Self(value) - } -} - -pub type CallbackHandle = i32; - -pub type CommandHandle = i32; -pub const INVALID_COMMAND_HANDLE: CommandHandle = 0; - -pub type StorageHandle = i32; - -pub type VdrHandle = i32; -pub const INVALID_VDR_HANDLE: VdrHandle = 0; - -#[repr(transparent)] -#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone, Serialize, Deserialize)] -pub struct SearchHandle(pub i32); -pub const INVALID_SEARCH_HANDLE: SearchHandle = SearchHandle(0); - -/* -pub type SearchHandle = i32; -pub const INVALID_SEARCH_HANDLE : SearchHandle = 0; -*/ - -pub mod domain; - -pub mod errors; -pub use errors::IndyError; - -#[derive(Debug, PartialEq, Eq, Copy, Clone)] -#[repr(i32)] -pub enum ErrorCode { - Success = 0, - - // Common errors - - // Caller passed invalid value as param 1 (null, invalid json and etc..) - CommonInvalidParam1 = 100, - - // Caller passed invalid value as param 2 (null, invalid json and etc..) - CommonInvalidParam2 = 101, - - // Caller passed invalid value as param 3 (null, invalid json and etc..) - CommonInvalidParam3 = 102, - - // Caller passed invalid value as param 4 (null, invalid json and etc..) - CommonInvalidParam4 = 103, - - // Caller passed invalid value as param 5 (null, invalid json and etc..) - CommonInvalidParam5 = 104, - - // Caller passed invalid value as param 6 (null, invalid json and etc..) - CommonInvalidParam6 = 105, - - // Caller passed invalid value as param 7 (null, invalid json and etc..) - CommonInvalidParam7 = 106, - - // Caller passed invalid value as param 8 (null, invalid json and etc..) - CommonInvalidParam8 = 107, - - // Caller passed invalid value as param 9 (null, invalid json and etc..) - CommonInvalidParam9 = 108, - - // Caller passed invalid value as param 10 (null, invalid json and etc..) - CommonInvalidParam10 = 109, - - // Caller passed invalid value as param 11 (null, invalid json and etc..) - CommonInvalidParam11 = 110, - - // Caller passed invalid value as param 12 (null, invalid json and etc..) - CommonInvalidParam12 = 111, - - // Invalid library state was detected in runtime. It signals library bug - CommonInvalidState = 112, - - // Object (json, config, key, credential and etc...) passed by library caller has invalid - // structure - CommonInvalidStructure = 113, - - // IO Error - CommonIOError = 114, - - // Caller passed invalid value as param 13 (null, invalid json and etc..) - CommonInvalidParam13 = 115, - - // Caller passed invalid value as param 14 (null, invalid json and etc..) - CommonInvalidParam14 = 116, - - // Caller passed invalid value as param 15 (null, invalid json and etc..) - CommonInvalidParam15 = 117, - - // Caller passed invalid value as param 16 (null, invalid json and etc..) - CommonInvalidParam16 = 118, - - // Caller passed invalid value as param 17 (null, invalid json and etc..) - CommonInvalidParam17 = 119, - - // Caller passed invalid value as param 18 (null, invalid json and etc..) - CommonInvalidParam18 = 120, - - // Caller passed invalid value as param 19 (null, invalid json and etc..) - CommonInvalidParam19 = 121, - - // Caller passed invalid value as param 20 (null, invalid json and etc..) - CommonInvalidParam20 = 122, - - // Caller passed invalid value as param 21 (null, invalid json and etc..) - CommonInvalidParam21 = 123, - - // Caller passed invalid value as param 22 (null, invalid json and etc..) - CommonInvalidParam22 = 124, - - // Caller passed invalid value as param 23 (null, invalid json and etc..) - CommonInvalidParam23 = 125, - - // Caller passed invalid value as param 24 (null, invalid json and etc..) - CommonInvalidParam24 = 126, - - // Caller passed invalid value as param 25 (null, invalid json and etc..) - CommonInvalidParam25 = 127, - - // Caller passed invalid value as param 26 (null, invalid json and etc..) - CommonInvalidParam26 = 128, - - // Caller passed invalid value as param 27 (null, invalid json and etc..) - CommonInvalidParam27 = 129, - - // Wallet errors - // Caller passed invalid wallet handle - WalletInvalidHandle = 200, - - // Unknown type of wallet was passed on create_wallet - WalletUnknownTypeError = 201, - - // Attempt to register already existing wallet type - WalletTypeAlreadyRegisteredError = 202, - - // Attempt to create wallet with name used for another exists wallet - WalletAlreadyExistsError = 203, - - // Requested entity id isn't present in wallet - WalletNotFoundError = 204, - - // Trying to use wallet with pool that has different name - WalletIncompatiblePoolError = 205, - - // Trying to open wallet that was opened already - WalletAlreadyOpenedError = 206, - - // Attempt to open encrypted wallet with invalid credentials - WalletAccessFailed = 207, - - // Input provided to wallet operations is considered not valid - WalletInputError = 208, - - // Decoding of wallet data during input/output failed - WalletDecodingError = 209, - - // Storage error occurred during wallet operation - WalletStorageError = 210, - - // Error during encryption-related operations - WalletEncryptionError = 211, - - // Requested wallet item not found - WalletItemNotFound = 212, - - // Returned if wallet's add_record operation is used with record name that already exists - WalletItemAlreadyExists = 213, - - // Returned if provided wallet query is invalid - WalletQueryError = 214, - - // Ledger errors - // Trying to open pool ledger that wasn't created before - PoolLedgerNotCreatedError = 300, - - // Caller passed invalid pool ledger handle - PoolLedgerInvalidPoolHandle = 301, - - // Pool ledger terminated - PoolLedgerTerminated = 302, - - // No consensus during ledger operation - LedgerNoConsensusError = 303, - - // Attempt to parse invalid transaction response - LedgerInvalidTransaction = 304, - - // Attempt to send transaction without the necessary privileges - LedgerSecurityError = 305, - - // Attempt to create pool ledger config with name used for another existing pool - PoolLedgerConfigAlreadyExistsError = 306, - - // Timeout for action - PoolLedgerTimeout = 307, - - // Attempt to open Pool for witch Genesis Transactions are not compatible with set Protocol - // version. Call pool.indy_set_protocol_version to set correct Protocol version. - PoolIncompatibleProtocolVersion = 308, - - // Item not found on ledger. - LedgerNotFound = 309, - - // Revocation registry is full and creation of new registry is necessary - AnoncredsRevocationRegistryFullError = 400, - - AnoncredsInvalidUserRevocId = 401, - - // Attempt to generate master secret with duplicated name - AnoncredsMasterSecretDuplicateNameError = 404, - - AnoncredsProofRejected = 405, - - AnoncredsCredentialRevoked = 406, - - // Attempt to create credential definition with duplicated id - AnoncredsCredDefAlreadyExistsError = 407, - - // Crypto errors - // Unknown format of DID entity keys - UnknownCryptoTypeError = 500, - - // Attempt to create duplicate did - DidAlreadyExistsError = 600, - - // Unknown payment method was given - PaymentUnknownMethodError = 700, - - //No method were scraped from inputs/outputs or more than one were scraped - PaymentIncompatibleMethodsError = 701, - - // Insufficient funds on inputs - PaymentInsufficientFundsError = 702, - - // No such source on a ledger - PaymentSourceDoesNotExistError = 703, - - // Operation is not supported for payment method - PaymentOperationNotSupportedError = 704, - - // Extra funds on inputs - PaymentExtraFundsError = 705, - - // The transaction is not allowed to a requester - TransactionNotAllowedError = 706, - - // Query Account does not exist in the pool - QueryAccountDoesNotexistError = 808, - - // Caller passed invalid wallet handle - InvalidVDRHandle = 810, - - // Unable to get register Ledger for specified namespace and VDR - InvalidVDRNamespace = 811, - - // Registered Ledger type does not match to the network of id - IncompatibleLedger = 812, -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml b/aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml deleted file mode 100644 index a502e61f0e..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "indy-utils" -version = "0.1.0" -authors = ["Hyperledger Indy Contributors "] -edition = "2018" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[features] -default = ["base64_rust_base64", "ed25519_sign_sodium", "ed25519_box_sodium", "sealedbox_sodium", "base64_rust_base64", "xsalsa20_sodium", "chacha20poly1305_ietf_sodium", "hash_openssl", "pwhash_argon2i13_sodium", "hmacsha256_sodium", "randombytes_sodium"] -base64_rust_base64 = [] -ed25519_sign_sodium = [] -ed25519_box_sodium = [] -sealedbox_sodium = [] -xsalsa20_sodium = [] -chacha20poly1305_ietf_sodium = [] -pwhash_argon2i13_sodium = [] -hmacsha256_sodium = [] -hash_openssl = [] -randombytes_sodium = [] - -[dependencies] -base64 = {version = "0.21.2"} -dirs = "5" -failure = "0.1" -indy-api-types = { path = "../indy-api-types" } -lazy_static = "1" -libc = "0.2" -log = "0.4" -openssl = { version = "0.10" } -serde = "1" -serde_json = "1" -serde_derive = "1" -sodiumoxide = {version = "0.0.16"} # touching this makes things go boom -zeroize = "1" - -[dev-dependencies] -rmp-serde = "1" -rand = "0.8" diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs deleted file mode 100644 index 204d91fc60..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs +++ /dev/null @@ -1,78 +0,0 @@ -use base64::{ - alphabet, - engine::{general_purpose, DecodePaddingMode, GeneralPurpose, GeneralPurposeConfig}, - Engine, -}; -use indy_api_types::errors::prelude::*; - -/// Default general purpose configuration, but padding decode mode of 'indifferent' (will decode -/// either) -const ANY_PADDING: GeneralPurposeConfig = - GeneralPurposeConfig::new().with_decode_padding_mode(DecodePaddingMode::Indifferent); -/// Standard Base64 URL Safe decoding and encoding, with indifference for padding mode when decoding -const URL_SAFE_ANY_PADDING: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, ANY_PADDING); - -pub fn encode(doc: &[u8]) -> String { - general_purpose::STANDARD.encode(doc) -} - -pub fn decode(doc: &str) -> Result, IndyError> { - general_purpose::STANDARD - .decode(doc) - .map_err(|e| e.to_indy(IndyErrorKind::InvalidStructure, "Invalid base64 sequence")) -} - -pub fn encode_urlsafe(doc: &[u8]) -> String { - general_purpose::URL_SAFE.encode(doc) -} - -pub fn decode_urlsafe(doc: &str) -> Result, IndyError> { - URL_SAFE_ANY_PADDING.decode(doc).map_err(|e| { - e.to_indy( - IndyErrorKind::InvalidStructure, - "Invalid base64URL_SAFE sequence", - ) - }) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn encode_works() { - let result = encode(&[1, 2, 3]); - assert_eq!("AQID", &result); - } - - #[test] - fn decode_works() { - let result = decode("AQID"); - - assert!(result.is_ok(), "Got error"); - assert_eq!(&[1, 2, 3], &result.unwrap()[..]); - } - - #[test] - fn encode_urlsafe_works() { - let result = encode_urlsafe(&[1, 2, 3]); - assert_eq!("AQID", &result); - } - - #[test] - fn decode_urlsafe_works() { - let result = decode_urlsafe("AQID"); - - assert!(result.is_ok(), "Got error"); - assert_eq!(&[1, 2, 3], &result.unwrap()[..]); - } - - #[test] - fn decode_urlsafe_works_with_or_without_padding() { - let result = decode_urlsafe("YWJjZA=="); - assert_eq!(vec![97, 98, 99, 100], result.unwrap()); - - let result = decode_urlsafe("YWJjZA"); - assert_eq!(vec![97, 98, 99, 100], result.unwrap()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs deleted file mode 100644 index 5b913ec5da..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs +++ /dev/null @@ -1,428 +0,0 @@ -extern crate sodiumoxide; - -use std::{ - cmp, io, - io::{Read, Write}, -}; - -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; - -use self::sodiumoxide::{crypto::aead::chacha20poly1305_ietf, utils}; -use super::pwhash_argon2i13; - -pub const KEYBYTES: usize = chacha20poly1305_ietf::KEYBYTES; -pub const NONCEBYTES: usize = chacha20poly1305_ietf::NONCEBYTES; -pub const TAGBYTES: usize = chacha20poly1305_ietf::TAGBYTES; - -sodium_type!(Key, chacha20poly1305_ietf::Key, KEYBYTES); -sodium_type!(Nonce, chacha20poly1305_ietf::Nonce, NONCEBYTES); -sodium_type!(Tag, chacha20poly1305_ietf::Tag, TAGBYTES); - -impl Nonce { - pub fn increment(&mut self) { - utils::increment_le(&mut (self.0).0); - } -} - -pub fn gen_key() -> Key { - Key(chacha20poly1305_ietf::gen_key()) -} - -pub fn derive_key( - passphrase: &str, - salt: &pwhash_argon2i13::Salt, - key_derivation_method: &KeyDerivationMethod, -) -> Result { - let mut key_bytes = [0u8; chacha20poly1305_ietf::KEYBYTES]; - - pwhash_argon2i13::pwhash( - &mut key_bytes, - passphrase.as_bytes(), - salt, - key_derivation_method, - ) - .map_err(|err| err.extend("Can't derive key"))?; - - Ok(Key::new(key_bytes)) -} - -pub fn gen_nonce() -> Nonce { - Nonce(chacha20poly1305_ietf::gen_nonce()) -} - -pub fn gen_nonce_and_encrypt(data: &[u8], key: &Key) -> (Vec, Nonce) { - let nonce = gen_nonce(); - - let encrypted_data = chacha20poly1305_ietf::seal(data, None, &nonce.0, &key.0); - - (encrypted_data, nonce) -} - -pub fn gen_nonce_and_encrypt_detached(data: &[u8], aad: &[u8], key: &Key) -> (Vec, Nonce, Tag) { - let nonce = gen_nonce(); - - let mut plain = data.to_vec(); - let tag = - chacha20poly1305_ietf::seal_detached(plain.as_mut_slice(), Some(aad), &nonce.0, &key.0); - - (plain.to_vec(), nonce, Tag(tag)) -} - -pub fn decrypt_detached( - data: &[u8], - key: &Key, - nonce: &Nonce, - tag: &Tag, - ad: Option<&[u8]>, -) -> Result, IndyError> { - let mut plain = data.to_vec(); - chacha20poly1305_ietf::open_detached(plain.as_mut_slice(), ad, &tag.0, &nonce.0, &key.0) - .map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to decrypt data: {:?}", - ) - }) - .map(|()| plain) -} - -pub fn encrypt(data: &[u8], key: &Key, nonce: &Nonce) -> Vec { - chacha20poly1305_ietf::seal(data, None, &nonce.0, &key.0) -} - -pub fn decrypt(data: &[u8], key: &Key, nonce: &Nonce) -> Result, IndyError> { - chacha20poly1305_ietf::open(data, None, &nonce.0, &key.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium chacha20poly1305_ietf", - ) - }) -} - -pub struct Writer { - buffer: Vec, - chunk_size: usize, - key: Key, - nonce: Nonce, - inner: W, -} - -impl Writer { - pub fn new(inner: W, key: Key, nonce: Nonce, chunk_size: usize) -> Self { - Writer { - buffer: Vec::new(), - chunk_size, - key, - nonce, - inner, - } - } - - #[allow(unused)] - pub fn into_inner(self) -> W { - self.inner - } -} - -impl Write for Writer { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.buffer.write_all(buf)?; // TODO: Small optimizations are possible - - let mut chunk_start = 0; - - while self.buffer.len() >= chunk_start + self.chunk_size { - let chunk = &self.buffer[chunk_start..chunk_start + self.chunk_size]; - self.inner - .write_all(&encrypt(chunk, &self.key, &self.nonce))?; - self.nonce.increment(); - chunk_start += self.chunk_size; - } - - if chunk_start > 0 { - self.buffer.drain(..chunk_start); - } - - Ok(buf.len()) - } - - fn flush(&mut self) -> io::Result<()> { - if !self.buffer.is_empty() { - self.inner - .write_all(&encrypt(&self.buffer, &self.key, &self.nonce))?; - self.nonce.increment(); - } - - self.buffer.flush() - } -} - -pub struct Reader { - rest_buffer: Vec, - chunk_buffer: Vec, - key: Key, - nonce: Nonce, - inner: R, -} - -impl Reader { - pub fn new(inner: R, key: Key, nonce: Nonce, chunk_size: usize) -> Self { - Reader { - rest_buffer: Vec::new(), - chunk_buffer: vec![0; chunk_size + TAGBYTES], - key, - nonce, - inner, - } - } - - #[allow(unused)] - pub fn into_inner(self) -> R { - self.inner - } - - fn _read_chunk(&mut self) -> io::Result { - let mut read = 0; - - while read < self.chunk_buffer.len() { - match self.inner.read(&mut self.chunk_buffer[read..]) { - Ok(0) => break, - Ok(n) => read += n, - Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, - Err(e) => return Err(e), - } - } - - if read == 0 { - Err(io::Error::new( - io::ErrorKind::UnexpectedEof, - "No more crypto chucks to consume", - )) - } else { - Ok(read) - } - } -} - -impl Read for Reader { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - let mut pos = 0; - - // Consume from rest buffer - if !self.rest_buffer.is_empty() { - let to_copy = cmp::min(self.rest_buffer.len(), buf.len() - pos); - buf[pos..pos + to_copy].copy_from_slice(&self.rest_buffer[..to_copy]); - pos += to_copy; - self.rest_buffer.drain(..to_copy); - } - - // Consume from chunks - while pos < buf.len() { - let chunk_size = self._read_chunk()?; - - let chunk = decrypt(&self.chunk_buffer[..chunk_size], &self.key, &self.nonce).map_err( - |_| io::Error::new(io::ErrorKind::InvalidData, "Invalid data in crypto chunk"), - )?; - - self.nonce.increment(); - - let to_copy = cmp::min(chunk.len(), buf.len() - pos); - buf[pos..pos + to_copy].copy_from_slice(&chunk[..to_copy]); - pos += to_copy; - - // Save rest in rest buffer - if pos == buf.len() && to_copy < chunk.len() { - self.rest_buffer.extend(&chunk[to_copy..]); - } - } - - Ok(buf.len()) - } -} - -#[cfg(test)] -mod tests { - extern crate rmp_serde; - - use super::*; - use crate::crypto::randombytes::randombytes; - - #[test] - fn derivation_argon2i_mod_produces_expected_result() { - let passphrase = "passphrase"; - let salt_bytes: [u8; 32] = [ - 24, 62, 35, 31, 123, 241, 94, 24, 192, 110, 199, 143, 173, 20, 23, 102, 184, 99, 221, - 64, 247, 230, 11, 253, 10, 7, 80, 236, 185, 249, 110, 187, - ]; - let key_bytes: [u8; 32] = [ - 148, 89, 76, 239, 127, 103, 13, 86, 84, 217, 216, 13, 223, 141, 225, 41, 223, 126, 145, - 138, 174, 31, 142, 199, 81, 12, 40, 201, 67, 8, 6, 251, - ]; - - let res = derive_key( - passphrase, - &pwhash_argon2i13::Salt::from_slice(&salt_bytes).unwrap(), - &KeyDerivationMethod::ARGON2I_MOD, - ) - .unwrap(); - - assert_eq!(res, Key::new(key_bytes)) - } - - #[test] - fn derivation_argon2i_int_produces_expected_result() { - let passphrase = "passphrase"; - let salt_bytes: [u8; 32] = [ - 24, 62, 35, 31, 123, 241, 94, 24, 192, 110, 199, 143, 173, 20, 23, 102, 184, 99, 221, - 64, 247, 230, 11, 253, 10, 7, 80, 236, 185, 249, 110, 187, - ]; - let key_bytes: [u8; 32] = [ - 247, 55, 177, 252, 244, 130, 218, 129, 113, 206, 72, 44, 29, 68, 134, 215, 249, 233, - 131, 199, 38, 87, 69, 217, 156, 217, 10, 160, 30, 148, 80, 160, - ]; - - let res = derive_key( - passphrase, - &pwhash_argon2i13::Salt::from_slice(&salt_bytes).unwrap(), - &KeyDerivationMethod::ARGON2I_INT, - ) - .unwrap(); - - assert_eq!(res, Key::new(key_bytes)) - } - - #[test] - fn gen_nonce_and_encrypt_decrypt_works() { - let data = randombytes(100); - let key = gen_key(); - - let (c, nonce) = gen_nonce_and_encrypt(&data, &key); - let u = decrypt(&c, &key, &nonce).unwrap(); - - assert_eq!(data, u); - } - - #[test] - pub fn gen_nonce_and_encrypt_detached_decrypt_detached_works() { - let data = randombytes(100); - let key = gen_key(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = b"some protocol data input to the encryption"; - - let (c, nonce, tag) = gen_nonce_and_encrypt_detached(&data, aad, &key); - let u = decrypt_detached(&c, &key, &nonce, &tag, Some(aad)).unwrap(); - assert_eq!(data, u); - } - - #[test] - fn encrypt_decrypt_works_for_nonce() { - let data = randombytes(16); - - let key = gen_key(); - let nonce = gen_nonce(); - let c = encrypt(&data, &key, &nonce); - let u = decrypt(&c, &key, &nonce).unwrap(); - - assert_eq!(data, u) - } - - #[test] - fn nonce_serialize_deserialize_works() { - let nonce = gen_nonce(); - let serialized = rmp_serde::to_vec(&nonce).unwrap(); - let deserialized: Nonce = rmp_serde::from_slice(&serialized).unwrap(); - - assert_eq!(serialized.len(), NONCEBYTES + 2); - assert_eq!(nonce, deserialized) - } - - #[test] - fn key_serialize_deserialize_works() { - let key = gen_key(); - let serialized = rmp_serde::to_vec(&key).unwrap(); - let deserialized: Key = rmp_serde::from_slice(&serialized).unwrap(); - - assert_eq!(serialized.len(), KEYBYTES + 2); - assert_eq!(key, deserialized) - } - - #[test] - fn writer_reader_works_for_less_than_one_chunk() { - let plain = randombytes(7); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 7 + TAGBYTES); - - let mut decrypted = vec![0u8; 7]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } - - #[test] - fn writer_reader_works_for_exact_one_chunk() { - let plain = randombytes(10); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 10 + TAGBYTES); - - let mut decrypted = vec![0u8; 10]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } - - #[test] - fn writer_reader_works_for_one_to_two_chunks() { - let plain = randombytes(13); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 13 + 2 * TAGBYTES); - - let mut decrypted = vec![0u8; 13]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } - - #[test] - fn writer_reader_works_for_exact_two_chunks() { - let plain = randombytes(20); - let key = gen_key(); - let nonce = gen_nonce(); - - let mut writer = Writer::new(Vec::::new(), key.clone(), nonce.clone(), 10); - writer.write_all(&plain).unwrap(); - writer.flush().unwrap(); - - let encrypted = writer.into_inner(); - assert_eq!(encrypted.len(), 20 + 2 * TAGBYTES); - - let mut decrypted = vec![0u8; 20]; - let mut reader = Reader::new(&encrypted[..], key, nonce, 10); - reader.read_exact(&mut decrypted).unwrap(); - - assert_eq!(plain, decrypted); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs deleted file mode 100644 index d51f75d92f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs +++ /dev/null @@ -1,73 +0,0 @@ -extern crate sodiumoxide; - -use indy_api_types::errors::prelude::*; - -use self::sodiumoxide::crypto::box_; - -pub const NONCEBYTES: usize = box_::curve25519xsalsa20poly1305::NONCEBYTES; -pub const PUBLICKEYBYTES: usize = box_::curve25519xsalsa20poly1305::PUBLICKEYBYTES; -pub const SECRETKEYBYTES: usize = box_::curve25519xsalsa20poly1305::SECRETKEYBYTES; - -sodium_type!(Nonce, box_::Nonce, NONCEBYTES); -sodium_type!(PublicKey, box_::PublicKey, PUBLICKEYBYTES); -sodium_type!(SecretKey, box_::SecretKey, SECRETKEYBYTES); - -pub fn encrypt( - secret_key: &SecretKey, - public_key: &PublicKey, - doc: &[u8], - nonce: &Nonce, -) -> Result, IndyError> { - Ok(box_::seal(doc, &nonce.0, &public_key.0, &secret_key.0)) -} - -pub fn decrypt( - secret_key: &SecretKey, - public_key: &PublicKey, - doc: &[u8], - nonce: &Nonce, -) -> Result, IndyError> { - box_::open(doc, &nonce.0, &public_key.0, &secret_key.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium _box", - ) - }) -} - -pub fn gen_nonce() -> Nonce { - Nonce(box_::gen_nonce()) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::crypto::{ed25519_sign, randombytes::randombytes}; - - #[test] - fn encrypt_decrypt_works() { - let text = randombytes(16); - let nonce = gen_nonce(); - let seed = ed25519_sign::Seed::from_slice(&randombytes(32)).unwrap(); - - let (alice_ver_key, alice_sign_key) = - ed25519_sign::create_key_pair_for_signature(Some(&seed)).unwrap(); - let alice_pk = ed25519_sign::vk_to_curve25519(&alice_ver_key).unwrap(); - let alice_sk = ed25519_sign::sk_to_curve25519(&alice_sign_key).unwrap(); - - let (bob_ver_key, bob_sign_key) = - ed25519_sign::create_key_pair_for_signature(Some(&seed)).unwrap(); - let bob_pk = ed25519_sign::vk_to_curve25519(&bob_ver_key).unwrap(); - let bob_sk = ed25519_sign::sk_to_curve25519(&bob_sign_key).unwrap(); - - let bob_encrypted_text = encrypt(&bob_sk, &alice_pk, &text, &nonce).unwrap(); - let bob_decrypt_result = decrypt(&alice_sk, &bob_pk, &bob_encrypted_text, &nonce); - assert!(bob_decrypt_result.is_ok()); - assert_eq!(text, bob_decrypt_result.unwrap()); - - let alice_encrypted_text = encrypt(&alice_sk, &bob_pk, &text, &nonce).unwrap(); - let alice_decrypted_text = decrypt(&bob_sk, &alice_pk, &alice_encrypted_text, &nonce); - assert!(alice_decrypted_text.is_ok()); - assert_eq!(text, alice_decrypted_text.unwrap()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs deleted file mode 100644 index c89f8abeab..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs +++ /dev/null @@ -1,123 +0,0 @@ -use indy_api_types::errors::prelude::*; -use libc::c_int; -use sodiumoxide::crypto::{box_, sign}; - -use super::{ed25519_box, randombytes::randombytes}; - -pub const SEEDBYTES: usize = sign::SEEDBYTES; -pub const SIG_PUBLICKEYBYTES: usize = sign::PUBLICKEYBYTES; -pub const ENC_PUBLICKEYBYTES: usize = box_::PUBLICKEYBYTES; -pub const SIG_SECRETKEYBYTES: usize = sign::SECRETKEYBYTES; -pub const ENC_SECRETKEYBYTES: usize = box_::SECRETKEYBYTES; -pub const SIGNATUREBYTES: usize = sign::SIGNATUREBYTES; - -sodium_type!(Seed, sign::Seed, SEEDBYTES); -sodium_type!(PublicKey, sign::PublicKey, SIG_PUBLICKEYBYTES); -sodium_type!(SecretKey, sign::SecretKey, SIG_SECRETKEYBYTES); -sodium_type!(Signature, sign::Signature, SIGNATUREBYTES); - -extern "C" { - // TODO: fix hack: - // this functions isn't included to sodiumoxide rust wrappers, - // temporary local binding is used to call libsodium-sys function - pub fn crypto_sign_ed25519_pk_to_curve25519( - curve25519_pk: *mut [u8; ENC_PUBLICKEYBYTES], - ed25519_pk: *const [u8; SIG_PUBLICKEYBYTES], - ) -> c_int; - pub fn crypto_sign_ed25519_sk_to_curve25519( - curve25519_sk: *mut [u8; ENC_SECRETKEYBYTES], - ed25519_sk: *const [u8; SIG_SECRETKEYBYTES], - ) -> c_int; -} - -pub fn create_key_pair_for_signature( - seed: Option<&Seed>, -) -> Result<(PublicKey, SecretKey), IndyError> { - let (public_key, secret_key) = sign::keypair_from_seed( - &seed - .unwrap_or(&Seed::from_slice(&randombytes(SEEDBYTES)).unwrap()) - .0, - ); - - Ok((PublicKey(public_key), SecretKey(secret_key))) -} - -pub fn sign(secret_key: &SecretKey, doc: &[u8]) -> Result { - Ok(Signature(sign::sign_detached(doc, &secret_key.0))) -} - -pub fn verify( - public_key: &PublicKey, - doc: &[u8], - signature: &Signature, -) -> Result { - Ok(sign::verify_detached(&signature.0, doc, &public_key.0)) -} - -pub fn sk_to_curve25519(sk: &SecretKey) -> Result { - let mut to: [u8; ENC_SECRETKEYBYTES] = [0; ENC_SECRETKEYBYTES]; - unsafe { - crypto_sign_ed25519_sk_to_curve25519(&mut to, &(sk.0).0); - } - ed25519_box::SecretKey::from_slice(&to) -} - -pub fn vk_to_curve25519(pk: &PublicKey) -> Result { - let mut to: [u8; ENC_PUBLICKEYBYTES] = [0; ENC_PUBLICKEYBYTES]; - unsafe { - crypto_sign_ed25519_pk_to_curve25519(&mut to, &(pk.0).0); - } - ed25519_box::PublicKey::from_slice(&to) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::crypto::ed25519_box; - - #[test] - fn signin_verify_works() { - let seed = Seed::from_slice(&randombytes(SEEDBYTES)).unwrap(); - let text = randombytes(16); - - let (public_key, secret_key) = create_key_pair_for_signature(Some(&seed)).unwrap(); - let alice_signed_text = sign(&secret_key, &text).unwrap(); - let verified = verify(&public_key, &text, &alice_signed_text).unwrap(); - - assert!(verified); - } - - #[test] - fn pk_to_curve25519_works() { - let pk = vec![ - 236, 191, 114, 144, 108, 87, 211, 244, 148, 23, 20, 175, 122, 6, 159, 254, 85, 99, 145, - 152, 178, 133, 230, 236, 192, 69, 35, 136, 141, 194, 243, 134, - ]; - let pk = PublicKey::from_slice(&pk).unwrap(); - let pkc_test = vk_to_curve25519(&pk).unwrap(); - let pkc_exp = vec![ - 8, 45, 124, 147, 248, 201, 112, 171, 11, 51, 29, 248, 34, 127, 197, 241, 60, 158, 84, - 47, 4, 176, 238, 166, 110, 39, 207, 58, 127, 110, 76, 42, - ]; - let pkc_exp = ed25519_box::PublicKey::from_slice(&pkc_exp).unwrap(); - assert_eq!(pkc_exp, pkc_test); - } - - #[test] - fn sk_to_curve25519_works() { - let sk = vec![ - 78, 67, 205, 99, 150, 131, 75, 110, 56, 154, 76, 61, 27, 142, 36, 141, 44, 223, 122, - 199, 14, 230, 12, 163, 4, 255, 94, 230, 21, 242, 97, 200, 236, 191, 114, 144, 108, 87, - 211, 244, 148, 23, 20, 175, 122, 6, 159, 254, 85, 99, 145, 152, 178, 133, 230, 236, - 192, 69, 35, 136, 141, 194, 243, 134, - ]; - let sk = SecretKey::from_slice(&sk).unwrap(); - let skc_test = sk_to_curve25519(&sk).unwrap(); - let skc_exp = vec![ - 144, 112, 64, 101, 69, 167, 61, 44, 220, 148, 58, 187, 108, 73, 11, 247, 130, 161, 158, - 40, 100, 1, 40, 27, 76, 148, 209, 240, 195, 35, 153, 121, - ]; - let skc_exp = ed25519_box::SecretKey::from_slice(&skc_exp).unwrap(); - assert_eq!(skc_exp, skc_test); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs deleted file mode 100644 index 237ac7e01f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hash/openssl.rs +++ /dev/null @@ -1,84 +0,0 @@ -extern crate openssl; - -use indy_api_types::errors::prelude::*; - -use self::openssl::hash::{Hasher, MessageDigest}; - -pub const HASHBYTES: usize = 32; - -// these bytes are the same as openssl_hash(MessageDigest::sha256(), &[]) so we do not have to -// actually call the hash function -pub const EMPTY_HASH_BYTES: [u8; HASHBYTES] = [ - 227, 176, 196, 66, 152, 252, 28, 20, 154, 251, 244, 200, 153, 111, 185, 36, 39, 174, 65, 228, - 100, 155, 147, 76, 164, 149, 153, 27, 120, 82, 184, 85, -]; - -pub fn hash(input: &[u8]) -> Result, IndyError> { - let mut hasher = Hash::new_context()?; - hasher.update(input)?; - Ok(hasher.finish().map(|b| b.to_vec())?) -} - -pub struct Hash {} - -impl Hash { - pub fn new_context() -> Result { - Ok(Hasher::new(MessageDigest::sha256())?) - } - - pub fn hash_leaf(leaf: &T) -> Result, IndyError> - where - T: Hashable, - { - let mut ctx = Hash::new_context()?; - ctx.update(&[0x00])?; - leaf.update_context(&mut ctx)?; - Ok(ctx.finish().map(|b| b.to_vec())?) - } - - pub fn hash_nodes(left: &T, right: &T) -> Result, IndyError> - where - T: Hashable, - { - let mut ctx = Hash::new_context()?; - ctx.update(&[0x01])?; - left.update_context(&mut ctx)?; - right.update_context(&mut ctx)?; - Ok(ctx.finish().map(|b| b.to_vec())?) - } -} - -/// The type of values stored in a `MerkleTree` must implement -/// this trait, in order for them to be able to be fed -/// to a Ring `Context` when computing the hash of a leaf. -/// -/// A default instance for types that already implements -/// `AsRef<[u8]>` is provided. -/// -/// ## Example -/// -/// Here is an example of how to implement `Hashable` for a type -/// that does not (or cannot) implement `AsRef<[u8]>`: -/// -/// ```ignore -/// impl Hashable for PublicKey { -/// fn update_context(&self, context: &mut Hasher) -> Result<(), CommonError> { -/// let bytes: Vec = self.to_bytes(); -/// Ok(context.update(&bytes)?) -/// } -/// } -/// ``` -pub trait Hashable { - /// Update the given `context` with `self`. - /// - /// See `openssl::hash::Hasher::update` for more information. - fn update_context(&self, context: &mut Hasher) -> Result<(), IndyError>; -} - -impl> Hashable for T { - fn update_context(&self, context: &mut Hasher) -> Result<(), IndyError> { - context - .update(self.as_ref()) - .to_indy(IndyErrorKind::InvalidState, "Internal OpenSSL error") - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs deleted file mode 100644 index ec9dbe1e31..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/hmacsha256/sodium.rs +++ /dev/null @@ -1,17 +0,0 @@ -extern crate sodiumoxide; - -use self::sodiumoxide::crypto::auth::hmacsha256; - -pub const KEYBYTES: usize = hmacsha256::KEYBYTES; -pub const TAGBYTES: usize = hmacsha256::TAGBYTES; - -sodium_type!(Key, hmacsha256::Key, KEYBYTES); -sodium_type!(Tag, hmacsha256::Tag, TAGBYTES); - -pub fn gen_key() -> Key { - Key(hmacsha256::gen_key()) -} - -pub fn authenticate(data: &[u8], key: &Key) -> Tag { - Tag(hmacsha256::authenticate(data, &key.0)) -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs deleted file mode 100644 index d8f1ecc87b..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/mod.rs +++ /dev/null @@ -1,44 +0,0 @@ -#[macro_use] -pub mod sodium_type; - -#[cfg(feature = "base64_rust_base64")] -#[path = "base64/rust_base64.rs"] -pub mod base64; - -#[cfg(feature = "chacha20poly1305_ietf_sodium")] -#[path = "chacha20poly1305_ietf/sodium.rs"] -pub mod chacha20poly1305_ietf; - -#[cfg(feature = "hash_openssl")] -#[path = "hash/openssl.rs"] -pub mod hash; - -#[cfg(feature = "hmacsha256_sodium")] -#[path = "hmacsha256/sodium.rs"] -pub mod hmacsha256; - -#[cfg(feature = "pwhash_argon2i13_sodium")] -#[path = "pwhash_argon2i13/sodium.rs"] -pub mod pwhash_argon2i13; - -#[cfg(feature = "randombytes_sodium")] -#[path = "randombytes/sodium.rs"] -pub mod randombytes; - -#[cfg(feature = "sealedbox_sodium")] -#[path = "sealedbox/sodium.rs"] -pub mod sealedbox; - -#[allow(dead_code)] /* FIXME Do we really need this module? */ -#[cfg(feature = "xsalsa20_sodium")] -#[path = "xsalsa20/sodium.rs"] -pub mod xsalsa20; - -#[cfg(feature = "ed25519_sign_sodium")] -#[path = "ed25519_sign/sodium.rs"] -pub mod ed25519_sign; - -#[cfg(feature = "ed25519_box_sodium")] -#[path = "ed25519_box/sodium.rs"] -// TODO: The name is misleading as the operations do not happen over ed25519 curve -pub mod ed25519_box; diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs deleted file mode 100644 index b629c34c1f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs +++ /dev/null @@ -1,133 +0,0 @@ -extern crate serde; -extern crate sodiumoxide; - -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; -use libc::{c_int, c_ulonglong, size_t}; - -use self::sodiumoxide::crypto::pwhash; - -pub const SALTBYTES: usize = pwhash::SALTBYTES; - -sodium_type!(Salt, pwhash::Salt, SALTBYTES); - -pub fn gen_salt() -> Salt { - Salt(pwhash::gen_salt()) -} - -pub fn pwhash<'a>( - key: &'a mut [u8], - passwd: &[u8], - salt: &Salt, - key_derivation_method: &KeyDerivationMethod, -) -> Result<&'a [u8], IndyError> { - let (opslimit, memlimit) = unsafe { - match key_derivation_method { - KeyDerivationMethod::ARGON2I_MOD => ( - crypto_pwhash_argon2i_opslimit_moderate(), - crypto_pwhash_argon2i_memlimit_moderate(), - ), - KeyDerivationMethod::ARGON2I_INT => ( - crypto_pwhash_argon2i_opslimit_interactive(), - crypto_pwhash_argon2i_memlimit_interactive(), - ), - KeyDerivationMethod::RAW => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "RAW key derivation method is not acceptable", - )) - } - } - }; - - let alg = unsafe { crypto_pwhash_alg_argon2i13() }; - - let res = unsafe { - crypto_pwhash( - key.as_mut_ptr(), - key.len() as c_ulonglong, - passwd.as_ptr(), - passwd.len() as c_ulonglong, - (salt.0).0.as_ptr(), - opslimit as c_ulonglong, - memlimit, - alg, - ) - }; - - if res == 0 { - Ok(key) - } else { - Err(IndyError::from_msg( - IndyErrorKind::InvalidState, - "Sodium pwhash failed", - )) - } -} - -extern "C" { - fn crypto_pwhash_alg_argon2i13() -> c_int; - fn crypto_pwhash_argon2i_opslimit_moderate() -> size_t; - fn crypto_pwhash_argon2i_memlimit_moderate() -> size_t; - fn crypto_pwhash_argon2i_opslimit_interactive() -> size_t; - fn crypto_pwhash_argon2i_memlimit_interactive() -> size_t; - - fn crypto_pwhash( - out: *mut u8, - outlen: c_ulonglong, - passwd: *const u8, - passwdlen: c_ulonglong, - salt: *const u8, // SODIUM_CRYPTO_PWHASH_SALTBYTES - opslimit: c_ulonglong, - memlimit: size_t, - alg: c_int, - ) -> c_int; -} - -#[cfg(test)] -mod tests { - use rmp_serde; - - use super::*; - - #[test] - fn get_salt_works() { - let salt = gen_salt(); - assert_eq!(salt[..].len(), SALTBYTES) - } - - #[test] - fn salt_serialize_deserialize_works() { - let salt = gen_salt(); - let serialized = rmp_serde::to_vec(&salt).unwrap(); - let deserialized: Salt = rmp_serde::from_slice(&serialized).unwrap(); - - assert_eq!(serialized.len(), SALTBYTES + 2); - assert_eq!(salt, deserialized) - } - - #[test] - fn pwhash_works() { - let passwd = b"Correct Horse Battery Staple"; - let mut key = [0u8; 64]; - - let salt = gen_salt(); - let _key = pwhash(&mut key, passwd, &salt, &KeyDerivationMethod::ARGON2I_MOD).unwrap(); - } - - #[test] - fn pwhash_works_for_interactive_method() { - let passwd = b"Correct Horse Battery Staple"; - - let salt = gen_salt(); - - let mut key = [0u8; 64]; - let key_moderate = - pwhash(&mut key, passwd, &salt, &KeyDerivationMethod::ARGON2I_MOD).unwrap(); - - let mut key = [0u8; 64]; - let key_interactive = - pwhash(&mut key, passwd, &salt, &KeyDerivationMethod::ARGON2I_INT).unwrap(); - - assert_ne!(key_moderate, key_interactive); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs deleted file mode 100644 index ef70300668..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/randombytes/sodium.rs +++ /dev/null @@ -1,66 +0,0 @@ -use indy_api_types::errors::prelude::*; -use libc::size_t; -use zeroize::Zeroize; - -pub const SEEDBYTES: usize = 32; // randombytes_seedbytes - -#[derive(Zeroize)] -#[zeroize(drop)] -pub struct Seed([u8; SEEDBYTES]); - -impl Seed { - pub fn from_slice(bytes: &[u8]) -> Result { - if bytes.len() != SEEDBYTES { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - format!( - "Invalid seed length, expected: {:}, provided: {}", - SEEDBYTES, - bytes.len() - ), - )); - } - - let mut seed = Seed([0; SEEDBYTES]); - - for (ni, &bsi) in seed.0.iter_mut().zip(bytes.iter()) { - *ni = bsi - } - - Ok(seed) - } -} - -pub fn randombytes(size: usize) -> Vec { - sodiumoxide::randombytes::randombytes(size) -} - -pub fn randombytes_deterministic(size: usize, seed: &Seed) -> Vec { - let mut out = vec![0u8; size]; - unsafe { randombytes_buf_deterministic(out.as_mut_ptr(), size, &seed.0) }; - out -} - -extern "C" { - fn randombytes_buf_deterministic(out: *mut u8, size: size_t, seed: *const [u8; SEEDBYTES]); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn randombytes_deterministic_works() { - let seed = Seed::from_slice(&[ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, - 3, 4, 5, - ]) - .unwrap(); - let res = randombytes_deterministic(32, &seed); - let expected_bytes = vec![ - 7, 183, 0, 143, 100, 203, 87, 27, 32, 132, 126, 172, 180, 123, 39, 26, 18, 243, 64, 60, - 92, 43, 111, 227, 54, 129, 201, 185, 53, 73, 93, 93, - ]; - assert_eq!(expected_bytes, res); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs deleted file mode 100644 index 0f00f525fb..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs +++ /dev/null @@ -1,45 +0,0 @@ -extern crate sodiumoxide; - -use indy_api_types::errors::prelude::*; - -use self::sodiumoxide::crypto::sealedbox; -use super::ed25519_box; - -pub fn encrypt(pk: &ed25519_box::PublicKey, doc: &[u8]) -> Result, IndyError> { - Ok(sealedbox::seal(doc, &pk.0)) -} - -pub fn decrypt( - pk: &ed25519_box::PublicKey, - sk: &ed25519_box::SecretKey, - doc: &[u8], -) -> Result, IndyError> { - sealedbox::open(doc, &pk.0, &sk.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium sealedbox", - ) - }) -} - -#[cfg(test)] -mod tests { - use self::sodiumoxide::crypto::box_; - use super::*; - use crate::crypto::{ - ed25519_box::{PublicKey, SecretKey}, - randombytes::randombytes, - }; - - #[test] - fn encrypt_decrypt_works() { - let (pk, sk) = box_::gen_keypair(); - let (pk, sk) = (PublicKey(pk), SecretKey(sk)); - let doc = randombytes(16); - - let encrypted_data = encrypt(&pk, &doc).unwrap(); - let decrypt_result = decrypt(&pk, &sk, &encrypted_data).unwrap(); - - assert_eq!(doc, decrypt_result); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs deleted file mode 100644 index 2b6ef226f7..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/sodium_type.rs +++ /dev/null @@ -1,94 +0,0 @@ -// This macro allows to wrap Sodimoxide type to libvdrtools type keeping the same behaviour -#[macro_export] -macro_rules! sodium_type (($newtype:ident, $sodiumtype:path, $len:ident) => ( - pub struct $newtype(pub(super) $sodiumtype); - - impl $newtype { - - #[allow(dead_code)] - pub fn new(bytes: [u8; $len]) -> $newtype { - $newtype($sodiumtype(bytes)) - } - - #[allow(dead_code)] - pub fn from_slice(bs: &[u8]) -> Result<$newtype, indy_api_types::errors::IndyError> { - let inner = <$sodiumtype>::from_slice(bs) - .ok_or(indy_api_types::errors::err_msg(indy_api_types::errors::IndyErrorKind::InvalidStructure, format!("Invalid bytes for {:?}", stringify!($newtype))))?; - - Ok($newtype(inner)) - } - } - - impl Clone for $newtype { - fn clone(&self) -> $newtype { - $newtype(self.0.clone()) - } - } - - impl ::std::fmt::Debug for $newtype { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - self.0.fmt(f) - } - } - - impl ::std::cmp::PartialEq for $newtype { - fn eq(&self, other: &$newtype) -> bool { - self.0.eq(&other.0) - } - } - - impl ::std::cmp::Eq for $newtype {} - - impl ::serde::Serialize for $newtype { - fn serialize(&self, serializer: S) -> Result where S: ::serde::Serializer - { - serializer.serialize_bytes(&self.0[..]) - } - } - - impl<'de> ::serde::Deserialize<'de> for $newtype { - fn deserialize(deserializer: D) -> Result<$newtype, D::Error> where D: ::serde::Deserializer<'de> - { - <$sodiumtype>::deserialize(deserializer).map($newtype) - } - } - - impl ::std::ops::Index<::std::ops::Range> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::Range) -> &[u8] { - self.0.index(_index) - } - } - - impl ::std::ops::Index<::std::ops::RangeTo> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::RangeTo) -> &[u8] { - self.0.index(_index) - } - } - - impl ::std::ops::Index<::std::ops::RangeFrom> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::RangeFrom) -> &[u8] { - self.0.index(_index) - } - } - - impl ::std::ops::Index<::std::ops::RangeFull> for $newtype { - type Output = [u8]; - - fn index(&self, _index: ::std::ops::RangeFull) -> &[u8] { - self.0.index(_index) - } - } - - impl AsRef<[u8]> for $newtype { - #[inline] - fn as_ref(&self) -> &[u8] { - &self[..] - } - } -)); diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs deleted file mode 100644 index 64cb09cb6a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs +++ /dev/null @@ -1,72 +0,0 @@ -extern crate sodiumoxide; - -use indy_api_types::errors::prelude::*; - -use self::sodiumoxide::crypto::{secretbox, secretbox::xsalsa20poly1305}; - -pub const KEYBYTES: usize = xsalsa20poly1305::KEYBYTES; -pub const NONCEBYTES: usize = xsalsa20poly1305::NONCEBYTES; -pub const MACBYTES: usize = xsalsa20poly1305::MACBYTES; - -sodium_type!(Key, xsalsa20poly1305::Key, KEYBYTES); -sodium_type!(Nonce, xsalsa20poly1305::Nonce, NONCEBYTES); -sodium_type!(Tag, xsalsa20poly1305::Tag, MACBYTES); - -pub fn create_key() -> Key { - Key(secretbox::gen_key()) -} - -pub fn gen_nonce() -> Nonce { - Nonce(secretbox::gen_nonce()) -} - -pub fn encrypt(key: &Key, nonce: &Nonce, doc: &[u8]) -> Vec { - secretbox::seal(doc, &nonce.0, &key.0) -} - -pub fn decrypt(key: &Key, nonce: &Nonce, doc: &[u8]) -> Result, IndyError> { - secretbox::open(doc, &nonce.0, &key.0).map_err(|_| { - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unable to open sodium secretbox", - ) - }) -} - -pub fn encrypt_detached(key: &Key, nonce: &Nonce, doc: &[u8]) -> (Vec, Tag) { - let mut cipher = doc.to_vec(); - let tag = secretbox::seal_detached(cipher.as_mut_slice(), &nonce.0, &key.0); - - (cipher, Tag(tag)) -} - -pub fn decrypt_detached( - key: &Key, - nonce: &Nonce, - tag: &Tag, - doc: &[u8], -) -> Result, IndyError> { - let mut plain = doc.to_vec(); - secretbox::open_detached(plain.as_mut_slice(), &tag.0, &nonce.0, &key.0) - .map_err(|_| IndyError::from_msg(IndyErrorKind::InvalidStructure, "Unable to decrypt data")) - .map(|_| plain) -} - -#[cfg(test)] -mod tests { - use self::sodiumoxide::randombytes; - use super::*; - - #[test] - fn encrypt_decrypt_works() { - let nonce = gen_nonce(); - let key = create_key(); - let data = randombytes::randombytes(16); - - let encrypted_data = encrypt(&key, &nonce, &data); - let decrypt_result = decrypt(&key, &nonce, &encrypted_data); - - assert!(decrypt_result.is_ok()); - assert_eq!(data, decrypt_result.unwrap()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs deleted file mode 100755 index e15e5d5a0a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/environment.rs +++ /dev/null @@ -1,142 +0,0 @@ -use std::{env, path::PathBuf}; - -pub fn indy_home_path() -> PathBuf { - // TODO: FIXME: Provide better handling for the unknown home path case!!! - let mut path = dirs::home_dir().unwrap_or_else(|| PathBuf::from("/home/indy")); - let mut indy_client_dir = ".indy_client"; - - if cfg!(target_os = "ios") { - indy_client_dir = "Documents/.indy_client"; - } - - path.push(indy_client_dir); - - if cfg!(target_os = "android") { - path = android_indy_client_dir_path(); - } - - path -} - -pub fn android_indy_client_dir_path() -> PathBuf { - let external_storage = env::var("EXTERNAL_STORAGE"); - let android_dir: String; - - match external_storage { - Ok(val) => android_dir = val + "/.indy_client", - Err(err) => { - panic!("Failed to find external storage path {:?}", err) - } - } - - PathBuf::from(android_dir) -} - -pub fn wallet_home_path() -> PathBuf { - let mut path = indy_home_path(); - path.push("wallet"); - path -} - -pub fn pool_home_path() -> PathBuf { - let mut path = indy_home_path(); - path.push("pool"); - path -} - -pub fn pool_path(pool_name: &str) -> PathBuf { - let mut path = pool_home_path(); - path.push(pool_name); - path -} - -pub fn tmp_path() -> PathBuf { - let mut path = env::temp_dir(); - path.push("indy_client"); - path -} - -pub fn tmp_file_path(file_name: &str) -> PathBuf { - let mut path = tmp_path(); - path.push(file_name); - path -} - -pub fn test_pool_ip() -> String { - env::var("TEST_POOL_IP").unwrap_or("127.0.0.1".to_string()) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn indy_home_path_works() { - let path = indy_home_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - } - - #[test] - fn indy_home_path_works_twice() { - indy_home_path(); - indy_home_path(); - } - - #[test] - fn wallet_home_path_works() { - let path = wallet_home_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - assert!(path.to_string_lossy().contains("wallet")); - } - - #[test] - fn pool_home_path_works() { - let path = pool_home_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - assert!(path.to_string_lossy().contains("pool")); - } - - #[test] - fn pool_path_works() { - let path = pool_path("pool1"); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains(".indy_client")); - assert!(path.to_string_lossy().contains("pool1")); - } - - #[test] - fn tmp_path_works() { - let path = tmp_path(); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains("indy_client")); - } - - #[test] - fn tmp_file_path_works() { - let path = tmp_file_path("test.txt"); - - assert!(path.is_absolute()); - assert!(path.has_root()); - assert!(path.to_string_lossy().contains("indy_client")); - assert!(path.to_string_lossy().contains("test.txt")); - } - - #[test] - fn test_pool_ip_works() { - let pool_ip = test_pool_ip(); - assert!(!pool_ip.is_empty()); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs deleted file mode 100644 index 92e2aaccf2..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/lib.rs +++ /dev/null @@ -1,44 +0,0 @@ -// allow all clippy warnings, given this is legacy to be removed soon -#![allow(clippy::all)] -#[macro_use] -extern crate serde_json; - -#[cfg(debug_assertions)] -#[macro_export] -macro_rules! secret { - ($val:expr) => {{ - $val - }}; -} - -#[cfg(not(debug_assertions))] -#[macro_export] -macro_rules! secret { - ($val:expr) => {{ - "_" - }}; -} - -#[macro_use] -pub mod crypto; -pub mod environment; -pub mod sequence; -pub mod wql; - -use indy_api_types::{CommandHandle, SearchHandle, VdrHandle, WalletHandle}; - -pub fn next_wallet_handle() -> WalletHandle { - WalletHandle(sequence::get_next_id()) -} - -pub fn next_command_handle() -> CommandHandle { - sequence::get_next_id() -} - -pub fn next_search_handle() -> SearchHandle { - SearchHandle(sequence::get_next_id()) -} - -pub fn next_vdr_handle() -> VdrHandle { - sequence::get_next_id() -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs deleted file mode 100644 index 5032b8ecad..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/sequence.rs +++ /dev/null @@ -1,11 +0,0 @@ -use std::sync::atomic::{AtomicUsize, Ordering}; - -use lazy_static::lazy_static; - -lazy_static! { - static ref IDS_COUNTER: AtomicUsize = AtomicUsize::new(1); -} - -pub fn get_next_id() -> i32 { - (IDS_COUNTER.fetch_add(1, Ordering::SeqCst) + 1) as i32 -} diff --git a/aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs b/aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs deleted file mode 100644 index d9cabdd8d6..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-utils/src/wql.rs +++ /dev/null @@ -1,2791 +0,0 @@ -use std::string; - -use serde::{ - de, - ser::{Serialize, Serializer}, - Deserialize, Deserializer, -}; -use serde_json::{self, Value}; - -#[derive(Debug, Hash, Clone, PartialEq, Eq)] -pub enum Query { - And(Vec), - Or(Vec), - Not(Box), - Eq(String, String), - Neq(String, String), - Gt(String, String), - Gte(String, String), - Lt(String, String), - Lte(String, String), - Like(String, String), - In(String, Vec), -} - -impl Serialize for Query { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.to_value().serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for Query { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let v = Value::deserialize(deserializer)?; - - match v { - serde_json::Value::Object(map) => parse_query(map).map_err(de::Error::missing_field), - serde_json::Value::Array(array) => { - // cast old restrictions format to wql - let mut res: Vec = Vec::new(); - for sub_query in array { - let sub_query: serde_json::Map = sub_query - .as_object() - .ok_or_else(|| de::Error::custom("Restriction is invalid"))? - .clone() - .into_iter() - .filter(|(_, v)| !v.is_null()) - .collect(); - - if !sub_query.is_empty() { - res.push(serde_json::Value::Object(sub_query)); - } - } - - let mut map = serde_json::Map::new(); - map.insert("$or".to_string(), serde_json::Value::Array(res)); - - parse_query(map).map_err(de::Error::custom) - } - _ => Err(de::Error::missing_field( - "Restriction must be either object or array", - )), - } - } -} - -impl Query { - pub fn optimise(self) -> Option { - match self { - Query::Not(boxed_operator) => { - if let Query::Not(nested_operator) = *boxed_operator { - Some(*nested_operator) - } else { - Some(Query::Not(boxed_operator)) - } - } - Query::And(suboperators) if suboperators.is_empty() => None, - Query::And(mut suboperators) if suboperators.len() == 1 => { - suboperators.remove(0).optimise() - } - Query::And(suboperators) => { - let mut suboperators: Vec = suboperators - .into_iter() - .flat_map(|operator| operator.optimise()) - .collect(); - - match suboperators.len() { - 0 => None, - 1 => Some(suboperators.remove(0)), - _ => Some(Query::And(suboperators)), - } - } - Query::Or(suboperators) if suboperators.is_empty() => None, - Query::Or(mut suboperators) if suboperators.len() == 1 => { - suboperators.remove(0).optimise() - } - Query::Or(suboperators) => { - let mut suboperators: Vec = suboperators - .into_iter() - .flat_map(|operator| operator.optimise()) - .collect(); - - match suboperators.len() { - 0 => None, - 1 => Some(suboperators.remove(0)), - _ => Some(Query::Or(suboperators)), - } - } - Query::In(key, mut targets) if targets.len() == 1 => { - Some(Query::Eq(key, targets.remove(0))) - } - Query::In(key, targets) => Some(Query::In(key, targets)), - _ => Some(self), - } - } - - fn to_value(&self) -> serde_json::Value { - match *self { - Query::Eq(ref tag_name, ref tag_value) => json!({ tag_name: tag_value }), - Query::Neq(ref tag_name, ref tag_value) => json!({tag_name: {"$neq": tag_value}}), - Query::Gt(ref tag_name, ref tag_value) => json!({tag_name: {"$gt": tag_value}}), - Query::Gte(ref tag_name, ref tag_value) => json!({tag_name: {"$gte": tag_value}}), - Query::Lt(ref tag_name, ref tag_value) => json!({tag_name: {"$lt": tag_value}}), - Query::Lte(ref tag_name, ref tag_value) => json!({tag_name: {"$lte": tag_value}}), - Query::Like(ref tag_name, ref tag_value) => json!({tag_name: {"$like": tag_value}}), - Query::In(ref tag_name, ref tag_values) => json!({tag_name: {"$in": tag_values}}), - Query::And(ref operators) => { - if !operators.is_empty() { - json!({ - "$and": operators.iter().map(|q: &Query| q.to_value()).collect::>() - }) - } else { - json!({}) - } - } - Query::Or(ref operators) => { - if !operators.is_empty() { - json!({ - "$or": operators.iter().map(|q: &Query| q.to_value()).collect::>() - }) - } else { - json!({}) - } - } - Query::Not(ref stmt) => json!({"$not": stmt.to_value()}), - } - } -} - -impl Default for Query { - fn default() -> Self { - Query::And(Vec::new()) - } -} - -impl string::ToString for Query { - fn to_string(&self) -> String { - self.to_value().to_string() - } -} - -fn parse_query(map: serde_json::Map) -> Result { - let mut operators: Vec = Vec::new(); - - for (key, value) in map { - if let Some(operator_) = parse_operator(key, value)? { - operators.push(operator_); - } - } - - let query = if operators.len() == 1 { - operators.remove(0) - } else { - Query::And(operators) - }; - - Ok(query) -} - -fn parse_operator(key: String, value: serde_json::Value) -> Result, &'static str> { - match (key.as_str(), value) { - ("$and", serde_json::Value::Array(values)) if values.is_empty() => Ok(None), - ("$and", serde_json::Value::Array(values)) => { - let operators: Vec = parse_list_operators(values)?; - Ok(Some(Query::And(operators))) - } - ("$and", _) => Err("$and must be array of JSON objects"), - ("$or", serde_json::Value::Array(values)) if values.is_empty() => Ok(None), - ("$or", serde_json::Value::Array(values)) => { - let operators: Vec = parse_list_operators(values)?; - Ok(Some(Query::Or(operators))) - } - ("$or", _) => Err("$or must be array of JSON objects"), - ("$not", serde_json::Value::Object(map)) => { - let operator = parse_query(map)?; - Ok(Some(Query::Not(Box::new(operator)))) - } - ("$not", _) => Err("$not must be JSON object"), - (_, serde_json::Value::String(value)) => Ok(Some(Query::Eq(key, value))), - (_, serde_json::Value::Object(map)) => { - if map.len() == 1 { - let (operator_name, value) = map.into_iter().next().unwrap(); - parse_single_operator(operator_name, key, value).map(Some) - } else { - Err("value must be JSON object of length 1") - } - } - (_, _) => Err("Unsupported value"), - } -} - -fn parse_list_operators(operators: Vec) -> Result, &'static str> { - let mut out_operators: Vec = Vec::with_capacity(operators.len()); - - for value in operators.into_iter() { - if let serde_json::Value::Object(map) = value { - let suboperator = parse_query(map)?; - out_operators.push(suboperator); - } else { - return Err("operator must be array of JSON objects"); - } - } - - Ok(out_operators) -} - -fn parse_single_operator( - operator_name: String, - key: String, - value: serde_json::Value, -) -> Result { - match (&*operator_name, value) { - ("$neq", serde_json::Value::String(value_)) => Ok(Query::Neq(key, value_)), - ("$neq", _) => Err("$neq must be used with string"), - ("$gt", serde_json::Value::String(value_)) => Ok(Query::Gt(key, value_)), - ("$gt", _) => Err("$gt must be used with string"), - ("$gte", serde_json::Value::String(value_)) => Ok(Query::Gte(key, value_)), - ("$gte", _) => Err("$gte must be used with string"), - ("$lt", serde_json::Value::String(value_)) => Ok(Query::Lt(key, value_)), - ("$lt", _) => Err("$lt must be used with string"), - ("$lte", serde_json::Value::String(value_)) => Ok(Query::Lte(key, value_)), - ("$lte", _) => Err("$lte must be used with string"), - ("$like", serde_json::Value::String(value_)) => Ok(Query::Like(key, value_)), - ("$like", _) => Err("$like must be used with string"), - ("$in", serde_json::Value::Array(values)) => { - let mut target_values: Vec = Vec::with_capacity(values.len()); - - for v in values.into_iter() { - if let serde_json::Value::String(s) = v { - target_values.push(s); - } else { - return Err("$in must be used with array of strings"); - } - } - - Ok(Query::In(key, target_values)) - } - ("$in", _) => Err("$in must be used with array of strings"), - (_, _) => Err("Unknown operator"), - } -} - -#[cfg(test)] -mod tests { - use rand::{distributions::Alphanumeric, thread_rng, Rng}; - - use super::*; - - fn _random_string(len: usize) -> String { - thread_rng() - .sample_iter(&Alphanumeric) - .take(len) - .map(char::from) - .collect() - } - - /// parse - #[test] - fn test_simple_operator_empty_json_parse() { - let json = "{}"; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_explicit_empty_and_parse() { - let json = r#"{"$and":[]}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_empty_or_parse() { - let json = r#"{"$or":[]}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_empty_not_parse() { - let json = r#"{"$not":{}}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - let expected = Query::Not(Box::new(Query::And(vec![]))); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_eq_plaintext_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":"{}"}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Eq(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$neq":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Neq(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$gt":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Gt(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$gte":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Gte(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$lt":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Lt(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_lte_plaintext_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$lte":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Lte(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$like":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Like(name1, value1); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_in_plaintext_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"{}":{{"$in":["{}"]}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::In(name1, vec![value1]); - - assert_eq!(query, expected); - } - - #[test] - fn test_simple_operator_in_plaintexts_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let value2 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"{}":{{"$in":["{}","{}","{}"]}}}}"#, - name1, value1, value2, value3 - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::In(name1, vec![value1, value2, value3]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":"{}"}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Eq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Neq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Gt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Gte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Lt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Lte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Like(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::In(name1, vec![value1])]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_one_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$and":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![Query::Not(Box::new(Query::Eq(name1, value1)))]); - - assert_eq!(query, expected); - } - - #[test] - #[ignore] // order - fn test_short_and_with_multiple_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"{}":"{}","{}":"{}","{}":"{}"}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Eq(name1, value1), - Query::Eq(name2, value2), - Query::Eq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Eq(name1, value1), - Query::Eq(name2, value2), - Query::Eq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Neq(name1, value1), - Query::Neq(name2, value2), - Query::Neq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Gt(name1, value1), - Query::Gt(name2, value2), - Query::Gt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Gte(name1, value1), - Query::Gte(name2, value2), - Query::Gte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Lt(name1, value1), - Query::Lt(name2, value2), - Query::Lt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Lte(name1, value1), - Query::Lte(name2, value2), - Query::Lte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Like(name1, value1), - Query::Like(name2, value2), - Query::Like(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::In(name1, vec![value1]), - Query::In(name2, vec![value2]), - Query::In(name3, vec![value3]), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Not(Box::new(Query::Eq(name1, value1))), - Query::Not(Box::new(Query::Eq(name2, value2))), - Query::Not(Box::new(Query::Eq(name3, value3))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_with_multiple_mixed_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let json = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![ - Query::Eq(name1, value1), - Query::Neq(name2, value2), - Query::Gt(name3, value3), - Query::Gte(name4, value4), - Query::Lt(name5, value5), - Query::Lte(name6, value6), - Query::Like(name7, value7), - Query::In(name8, vec![value8a, value8b]), - Query::Not(Box::new(Query::Eq(name9, value9))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":"{}"}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Eq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Neq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Gt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Gte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Lt(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Lte(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Like(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::In(name1, vec![value1])]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_one_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$or":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Not(Box::new(Query::Eq(name1, value1)))]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Eq(name1, value1), - Query::Eq(name2, value2), - Query::Eq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Neq(name1, value1), - Query::Neq(name2, value2), - Query::Neq(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Gt(name1, value1), - Query::Gt(name2, value2), - Query::Gt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Gte(name1, value1), - Query::Gte(name2, value2), - Query::Gte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Lt(name1, value1), - Query::Lt(name2, value2), - Query::Lt(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Lte(name1, value1), - Query::Lte(name2, value2), - Query::Lte(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Like(name1, value1), - Query::Like(name2, value2), - Query::Like(name3, value3), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::In(name1, vec![value1]), - Query::In(name2, vec![value2]), - Query::In(name3, vec![value3]), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_not_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Not(Box::new(Query::Eq(name1, value1))), - Query::Not(Box::new(Query::Eq(name2, value2))), - Query::Not(Box::new(Query::Eq(name3, value3))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_or_with_multiple_mixed_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let json = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![ - Query::Eq(name1, value1), - Query::Neq(name2, value2), - Query::Gt(name3, value3), - Query::Gte(name4, value4), - Query::Lt(name5, value5), - Query::Lte(name6, value6), - Query::Like(name7, value7), - Query::In(name8, vec![value8a, value8b]), - Query::Not(Box::new(Query::Eq(name9, value9))), - ]); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_eq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":"{}"}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Eq(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_neq_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$neq":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Neq(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_gt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$gt":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Gt(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_gte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$gte":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Gte(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_lt_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$lt":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Lt(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_lte_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$lte":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Lte(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_like_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$like":"{}"}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::Like(name1, value1))); - - assert_eq!(query, expected); - } - - #[test] - fn test_not_with_one_in_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let json = format!(r#"{{"$not":{{"{}":{{"$in":["{}"]}}}}}}"#, name1, value1); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::In(name1, vec![value1]))); - - assert_eq!(query, expected); - } - - #[test] - fn test_and_or_not_complex_case_parse() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8 = _random_string(10); - - let json = format!( - r#"{{"$not":{{"$and":[{{"{}":"{}"}},{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"$not":{{"{}":{{"$lte":"{}"}}}}}},{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"$not":{{"{}":{{"$gte":"{}"}}}}}}]}}]}},{{"$not":{{"{}":{{"$like":"{}"}}}}}},{{"$and":[{{"{}":"{}"}},{{"$not":{{"{}":{{"$neq":"{}"}}}}}}]}}]}}}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8, - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Not(Box::new(Query::And(vec![ - Query::Eq(name1, value1), - Query::Or(vec![ - Query::Gt(name2, value2), - Query::Not(Box::new(Query::Lte(name3, value3))), - Query::And(vec![ - Query::Lt(name4, value4), - Query::Not(Box::new(Query::Gte(name5, value5))), - ]), - ]), - Query::Not(Box::new(Query::Like(name6, value6))), - Query::And(vec![ - Query::Eq(name7, value7), - Query::Not(Box::new(Query::Neq(name8, value8))), - ]), - ]))); - - assert_eq!(query, expected); - } - - /// to string - #[test] - fn test_simple_operator_empty_and_to_string() { - let query = Query::And(vec![]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = "{}"; - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_empty_or_to_string() { - let query = Query::Or(vec![]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = "{}"; - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_empty_not_to_string() { - let query = Query::Not(Box::new(Query::And(vec![]))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = r#"{"$not":{}}"#; - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Eq(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":"{}"}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Neq(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$neq":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_gt_plaintext_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Gt(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$gt":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Gte(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$gte":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Lt(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$lt":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Lte(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$lte":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Like(name1.clone(), value1.clone()); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$like":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::In(name1.clone(), vec![value1.clone()]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"{}":{{"$in":["{}"]}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_simple_operator_in_multimply_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let value2 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::In( - name1.clone(), - vec![value1.clone(), value2.clone(), value3.clone()], - ); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"{}":{{"$in":["{}","{}","{}"]}}}}"#, - name1, value1, value2, value3 - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Eq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":"{}"}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Neq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Gt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Gte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Lt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Lte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Like(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::In(name1.clone(), vec![value1.clone()])]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_one_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::And(vec![Query::Not(Box::new(Query::Eq( - name1.clone(), - value1.clone(), - )))]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$and":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Eq(name2.clone(), value2.clone()), - Query::Eq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Neq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Neq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Gt(name1.clone(), value1.clone()), - Query::Gt(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Gte(name1.clone(), value1.clone()), - Query::Gte(name2.clone(), value2.clone()), - Query::Gte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Lt(name1.clone(), value1.clone()), - Query::Lt(name2.clone(), value2.clone()), - Query::Lt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Lte(name1.clone(), value1.clone()), - Query::Lte(name2.clone(), value2.clone()), - Query::Lte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Like(name1.clone(), value1.clone()), - Query::Like(name2.clone(), value2.clone()), - Query::Like(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::In(name1.clone(), vec![value1.clone()]), - Query::In(name2.clone(), vec![value2.clone()]), - Query::In(name3.clone(), vec![value3.clone()]), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::And(vec![ - Query::Not(Box::new(Query::Eq(name1.clone(), value1.clone()))), - Query::Not(Box::new(Query::Eq(name2.clone(), value2.clone()))), - Query::Not(Box::new(Query::Eq(name3.clone(), value3.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_with_multiple_mixed_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let query = Query::And(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - Query::Gte(name4.clone(), value4.clone()), - Query::Lt(name5.clone(), value5.clone()), - Query::Lte(name6.clone(), value6.clone()), - Query::Like(name7.clone(), value7.clone()), - Query::In(name8.clone(), vec![value8a.clone(), value8b.clone()]), - Query::Not(Box::new(Query::Eq(name9.clone(), value9.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$and":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Eq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":"{}"}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Neq(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Gt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}}]}}"#, name1, value1); - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Gte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Lt(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Lte(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Like(name1.clone(), value1.clone())]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$like":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::In(name1.clone(), vec![value1.clone()])]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_one_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Or(vec![Query::Not(Box::new(Query::Eq( - name1.clone(), - value1.clone(), - )))]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$or":[{{"$not":{{"{}":"{}"}}}}]}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Eq(name2.clone(), value2.clone()), - Query::Eq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":"{}"}},{{"{}":"{}"}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Neq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Neq(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$neq":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Gt(name1.clone(), value1.clone()), - Query::Gt(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Gte(name1.clone(), value1.clone()), - Query::Gte(name2.clone(), value2.clone()), - Query::Gte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$gte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Lt(name1.clone(), value1.clone()), - Query::Lt(name2.clone(), value2.clone()), - Query::Lt(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lt":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Lte(name1.clone(), value1.clone()), - Query::Lte(name2.clone(), value2.clone()), - Query::Lte(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$lte":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Like(name1.clone(), value1.clone()), - Query::Like(name2.clone(), value2.clone()), - Query::Like(name3.clone(), value3.clone()), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$like":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::In(name1.clone(), vec![value1.clone()]), - Query::In(name2.clone(), vec![value2.clone()]), - Query::In(name3.clone(), vec![value3.clone()]), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}},{{"{}":{{"$in":["{}"]}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_not_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - - let query = Query::Or(vec![ - Query::Not(Box::new(Query::Eq(name1.clone(), value1.clone()))), - Query::Not(Box::new(Query::Eq(name2.clone(), value2.clone()))), - Query::Not(Box::new(Query::Eq(name3.clone(), value3.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, value1, name2, value2, name3, value3, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_or_with_multiple_mixed_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8a = _random_string(10); - let value8b = _random_string(10); - let name9 = _random_string(10); - let value9 = _random_string(10); - - let query = Query::Or(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Neq(name2.clone(), value2.clone()), - Query::Gt(name3.clone(), value3.clone()), - Query::Gte(name4.clone(), value4.clone()), - Query::Lt(name5.clone(), value5.clone()), - Query::Lte(name6.clone(), value6.clone()), - Query::Like(name7.clone(), value7.clone()), - Query::In(name8.clone(), vec![value8a.clone(), value8b.clone()]), - Query::Not(Box::new(Query::Eq(name9.clone(), value9.clone()))), - ]); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$or":[{{"{}":"{}"}},{{"{}":{{"$neq":"{}"}}}},{{"{}":{{"$gt":"{}"}}}},{{"{}":{{"$gte":"{}"}}}},{{"{}":{{"$lt":"{}"}}}},{{"{}":{{"$lte":"{}"}}}},{{"{}":{{"$like":"{}"}}}},{{"{}":{{"$in":["{}","{}"]}}}},{{"$not":{{"{}":"{}"}}}}]}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8a, - value8b, - name9, - value9, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_eq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Eq(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":"{}"}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_neq_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Neq(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$neq":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_gt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Gt(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$gt":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_gte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Gte(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$gte":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_lt_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Lt(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$lt":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_lte_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Lte(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$lte":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_like_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::Like(name1.clone(), value1.clone()))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$like":"{}"}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_not_with_one_in_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - - let query = Query::Not(Box::new(Query::In(name1.clone(), vec![value1.clone()]))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!(r#"{{"$not":{{"{}":{{"$in":["{}"]}}}}}}"#, name1, value1); - - assert_eq!(json, expected); - } - - #[test] - fn test_and_or_not_complex_case_to_string() { - let name1 = _random_string(10); - let value1 = _random_string(10); - let name2 = _random_string(10); - let value2 = _random_string(10); - let name3 = _random_string(10); - let value3 = _random_string(10); - let name4 = _random_string(10); - let value4 = _random_string(10); - let name5 = _random_string(10); - let value5 = _random_string(10); - let name6 = _random_string(10); - let value6 = _random_string(10); - let name7 = _random_string(10); - let value7 = _random_string(10); - let name8 = _random_string(10); - let value8 = _random_string(10); - - let query = Query::Not(Box::new(Query::And(vec![ - Query::Eq(name1.clone(), value1.clone()), - Query::Or(vec![ - Query::Gt(name2.clone(), value2.clone()), - Query::Not(Box::new(Query::Lte(name3.clone(), value3.clone()))), - Query::And(vec![ - Query::Lt(name4.clone(), value4.clone()), - Query::Not(Box::new(Query::Gte(name5.clone(), value5.clone()))), - ]), - ]), - Query::Not(Box::new(Query::Like(name6.clone(), value6.clone()))), - Query::And(vec![ - Query::Eq(name7.clone(), value7.clone()), - Query::Not(Box::new(Query::Neq(name8.clone(), value8.clone()))), - ]), - ]))); - - let json = ::serde_json::to_string(&query).unwrap(); - - let expected = format!( - r#"{{"$not":{{"$and":[{{"{}":"{}"}},{{"$or":[{{"{}":{{"$gt":"{}"}}}},{{"$not":{{"{}":{{"$lte":"{}"}}}}}},{{"$and":[{{"{}":{{"$lt":"{}"}}}},{{"$not":{{"{}":{{"$gte":"{}"}}}}}}]}}]}},{{"$not":{{"{}":{{"$like":"{}"}}}}}},{{"$and":[{{"{}":"{}"}},{{"$not":{{"{}":{{"$neq":"{}"}}}}}}]}}]}}}}"#, - name1, - value1, - name2, - value2, - name3, - value3, - name4, - value4, - name5, - value5, - name6, - value6, - name7, - value7, - name8, - value8, - ); - - assert_eq!(json, expected); - } - - #[test] - fn test_old_format() { - let name1 = _random_string(10); - let name2 = _random_string(10); - let value1 = _random_string(10); - let value2 = _random_string(10); - - let json = format!( - r#"[{{"{}":"{}"}}, {{"{}":"{}"}}]"#, - name1, value1, name2, value2 - ); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Eq(name1, value1), Query::Eq(name2, value2)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_old_format_empty() { - let json = r#"[]"#.to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::And(vec![]); - - assert_eq!(query, expected); - } - - #[test] - fn test_old_format_with_nulls() { - let name1 = _random_string(10); - let name2 = _random_string(10); - let value1 = _random_string(10); - - let json = json!(vec![ - json!({ name1.clone(): value1 }), - json!({ name2: serde_json::Value::Null }) - ]) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - let expected = Query::Or(vec![Query::Eq(name1, value1)]); - - assert_eq!(query, expected); - } - - #[test] - fn test_optimise_and() { - let json = r#"{}"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_or() { - let json = r#"[]"#; - - let query: Query = ::serde_json::from_str(json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_single_nested_and() { - let json = json!({ - "$and": [ - { - "$and": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_several_nested_and() { - let json = json!({ - "$and": [ - { - "$and": [] - }, - { - "$and": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_single_nested_or() { - let json = json!({ - "$and": [ - { - "$or": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } - - #[test] - fn test_optimise_several_nested_or() { - let json = json!({ - "$and": [ - { - "$or": [] - }, - { - "$or": [] - } - ] - }) - .to_string(); - - let query: Query = ::serde_json::from_str(&json).unwrap(); - - assert_eq!(query.optimise(), None); - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml b/aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml deleted file mode 100644 index 88d5bd99ad..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "indy-wallet" -version = "0.1.0" -authors = ["Hyperledger Indy Contributors "] -edition = "2018" - -[features] -default = [] -benchmark = [] -mysql = [] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -async-trait = "0.1" -byteorder = "1" -futures = { version = "0.3", default-features = false, features = [ "alloc" ] } -indy-api-types = { path = "../indy-api-types" } -indy-utils = { path = "../indy-utils" } -libc = "0.2" -log = "0.4" -rmp-serde = "1" -bs58 = "0.5" -serde = "1" -serde_json = "1" -serde_derive = "1" -sqlx = { version = "0.7", features = [ "sqlite", "mysql", "runtime-tokio-rustls" ] } -zeroize = "1" -lru = "0.12" - -[dev-dependencies] -async-std = "1" diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs deleted file mode 100644 index fe50b13e4c..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/lru.rs +++ /dev/null @@ -1,50 +0,0 @@ -use std::num::NonZeroUsize; - -use lru::LruCache as InnerCache; - -use crate::cache::{ - wallet_cache::{WalletCacheKey, WalletCacheValue}, - Cache, -}; - -pub struct LruCache { - inner: InnerCache, -} - -impl LruCache { - pub fn new(size: NonZeroUsize) -> LruCache { - LruCache { - inner: InnerCache::new(size), - } - } -} - -impl Cache for LruCache { - fn put(&mut self, key: WalletCacheKey, value: WalletCacheValue) -> Option { - self.inner.put(key, value) - } - - fn get(&mut self, key: &WalletCacheKey) -> Option<&WalletCacheValue> { - self.inner.get(key) - } - - fn get_mut(&mut self, key: &WalletCacheKey) -> Option<&mut WalletCacheValue> { - self.inner.get_mut(key) - } - - fn pop(&mut self, key: &WalletCacheKey) -> Option { - self.inner.pop(key) - } - - fn peek(&self, key: &WalletCacheKey) -> Option<&WalletCacheValue> { - self.inner.peek(key) - } - - fn len(&self) -> usize { - self.inner.len() - } - - fn cap(&self) -> usize { - self.inner.cap().into() - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs deleted file mode 100644 index f01402ac65..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/mod.rs +++ /dev/null @@ -1,17 +0,0 @@ -mod lru; -pub mod wallet_cache; - -use crate::cache::wallet_cache::{WalletCacheKey, WalletCacheValue}; - -pub trait Cache { - fn put(&mut self, key: WalletCacheKey, value: WalletCacheValue) -> Option; - fn get(&mut self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; - fn get_mut(&mut self, key: &WalletCacheKey) -> Option<&mut WalletCacheValue>; - fn pop(&mut self, key: &WalletCacheKey) -> Option; - #[allow(dead_code)] - fn peek(&self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; - #[allow(dead_code)] - fn len(&self) -> usize; - #[allow(dead_code)] - fn cap(&self) -> usize; -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs deleted file mode 100644 index 2bf91dad4e..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/cache/wallet_cache.rs +++ /dev/null @@ -1,338 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - iter::FromIterator, - num::NonZeroUsize, - sync::{ - atomic::{AtomicUsize, Ordering}, - Mutex, RwLock, - }, -}; - -use indy_api_types::domain::wallet::{CacheConfig, CachingAlgorithm}; - -use crate::{ - cache::{lru::LruCache, Cache}, - storage::{ - StorageRecord, Tag, - Tag::{Encrypted, PlainText}, - TagName, - TagName::{OfEncrypted, OfPlain}, - }, - wallet::EncryptedValue, - RecordOptions, -}; - -#[derive(PartialEq, Eq, Hash)] -pub struct WalletCacheKey { - type_: Vec, - id: Vec, -} - -pub struct WalletCacheValue { - value: EncryptedValue, - tags: Vec, -} - -pub struct WalletCache { - cache: Option>>, - cache_entities: HashSet, -} - -impl WalletCache { - pub fn new(config: Option) -> Self { - match config { - Some(cache_config) if cache_config.size > 0 && !cache_config.entities.is_empty() => { - let cache = match cache_config.algorithm { - CachingAlgorithm::LRU => { - LruCache::new(NonZeroUsize::new(cache_config.size).unwrap()) - } - }; - WalletCache { - cache: Some(Mutex::new(Box::new(cache))), - cache_entities: HashSet::from_iter(cache_config.entities.iter().cloned()), - } - } - _ => { - WalletCache { - // no cache - cache: None, - cache_entities: HashSet::new(), - } - } - } - } - - pub fn is_type_cacheable(&self, type_: &str) -> bool { - self.cache.is_some() && self.cache_entities.contains(&type_.to_owned()) - } - - pub fn add( - &self, - type_: &str, - etype: &[u8], - eid: &[u8], - evalue: &EncryptedValue, - etags: &[Tag], - ) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let value = WalletCacheValue { - value: evalue.to_owned(), - tags: etags.to_owned(), - }; - let _ = protected_cache.lock().unwrap().put(key, value); - } - } - } - - pub async fn add_tags(&self, type_: &str, etype: &[u8], eid: &[u8], etags: &[Tag]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() //await - .get_mut(&key) - .map(|v| v.tags.append(&mut etags.to_owned())); - } - } - } - - pub async fn update_tags(&self, type_: &str, etype: &[u8], eid: &[u8], etags: &[Tag]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() //await - .get_mut(&key) - .map(|v| v.tags = etags.to_vec()); - } - } - } - - pub async fn delete_tags(&self, type_: &str, etype: &[u8], eid: &[u8], etag_names: &[TagName]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let mut enc_tag_names = HashSet::new(); - let mut plain_tag_names = HashSet::new(); - for x in etag_names { - match x { - OfEncrypted(value) => enc_tag_names.insert(value), - OfPlain(value) => plain_tag_names.insert(value), - }; - } - let _ = protected_cache - .lock() - .unwrap() //await - .get_mut(&key) - .map(|v| { - v.tags.retain(|el| match el { - Encrypted(tag_name, _) => !enc_tag_names.contains(tag_name), - PlainText(tag_name, _) => !plain_tag_names.contains(tag_name), - }); - }); - } - } - } - - pub async fn update(&self, type_: &str, etype: &[u8], eid: &[u8], evalue: &EncryptedValue) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() // await - .get_mut(&key) - .map(|v| v.value = evalue.to_owned()); - } - } - } - - pub async fn get( - &self, - type_: &str, - etype: &[u8], - eid: &[u8], - options: &RecordOptions, - ) -> Option { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - protected_cache - .lock() - .unwrap() //await - .get(&key) - .map(|v| StorageRecord { - id: eid.to_owned(), - value: if options.retrieve_value { - Some(v.value.clone()) - } else { - None - }, - type_: if options.retrieve_type { - Some(etype.to_owned()) - } else { - None - }, - tags: if options.retrieve_tags { - Some(v.tags.clone()) - } else { - None - }, - }) - } else { - None - } - } else { - None - } - } - - pub async fn delete(&self, type_: &str, etype: &[u8], eid: &[u8]) { - if let Some(protected_cache) = &self.cache { - if self.cache_entities.contains(&type_.to_owned()) { - let key = WalletCacheKey { - type_: etype.to_owned(), - id: eid.to_owned(), - }; - let _ = protected_cache - .lock() - .unwrap() //await - .pop(&key); - } - } - } -} - -#[derive(Default, Debug)] -pub struct WalletCacheHitData { - pub hit: AtomicUsize, - pub miss: AtomicUsize, - pub not_cached: AtomicUsize, -} - -impl WalletCacheHitData { - fn inc(var: &AtomicUsize, increment: usize) -> usize { - var.fetch_add(increment, Ordering::Relaxed) - } - - fn get(var: &AtomicUsize) -> usize { - var.load(Ordering::Relaxed) - } - - pub fn inc_hit(&self) -> usize { - WalletCacheHitData::inc(&self.hit, 1) - } - - pub fn inc_miss(&self) -> usize { - WalletCacheHitData::inc(&self.miss, 1) - } - - pub fn inc_not_cached(&self) -> usize { - WalletCacheHitData::inc(&self.not_cached, 1) - } - - pub fn get_hit(&self) -> usize { - WalletCacheHitData::get(&self.hit) - } - - pub fn get_miss(&self) -> usize { - WalletCacheHitData::get(&self.miss) - } - - pub fn get_not_cached(&self) -> usize { - WalletCacheHitData::get(&self.not_cached) - } -} - -impl Clone for WalletCacheHitData { - fn clone(&self) -> Self { - WalletCacheHitData { - hit: AtomicUsize::from(self.get_hit()), - miss: AtomicUsize::from(self.get_miss()), - not_cached: AtomicUsize::from(self.get_not_cached()), - } - } - - fn clone_from(&mut self, source: &Self) { - *self.hit.get_mut() = source.get_hit(); - *self.miss.get_mut() = source.get_miss(); - *self.not_cached.get_mut() = source.get_not_cached(); - } -} - -pub struct WalletCacheHitMetrics { - pub data: RwLock>, -} - -impl WalletCacheHitMetrics { - pub fn new() -> Self { - WalletCacheHitMetrics { - data: RwLock::new(HashMap::new()), - } - } - - pub async fn inc_cache_hit(&self, type_: &str) -> usize { - self.update_data(type_, |x| x.inc_hit()).await - } - - pub async fn inc_cache_miss(&self, type_: &str) -> usize { - self.update_data(type_, |x| x.inc_miss()).await - } - - pub async fn inc_not_cached(&self, type_: &str) -> usize { - self.update_data(type_, |x| x.inc_not_cached()).await - } - - async fn update_data(&self, type_: &str, f: fn(&WalletCacheHitData) -> usize) -> usize { - let read_guard = self.data.read().unwrap(); //await; - match read_guard.get(type_) { - Some(x) => f(x), - None => { - drop(read_guard); - let mut write_guard = self.data.write().unwrap(); //await; - // check if data is inserted in the mean time until write lock is acquired. - match write_guard.get(type_) { - Some(x) => f(x), - None => { - // we are now holding exclusive access, so insert the item in map. - let d = Default::default(); - let result = f(&d); - write_guard.insert(type_.to_string(), d); - result - } - } - } - } - } - - #[allow(dead_code)] - pub async fn get_data_for_type(&self, type_: &str) -> Option { - self.data.read().unwrap().get(type_).cloned() - } - - pub fn get_data(&self) -> HashMap { - self.data.read().unwrap().clone() - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs deleted file mode 100644 index d3df984a1a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/encryption.rs +++ /dev/null @@ -1,505 +0,0 @@ -use std::{collections::HashMap, str}; - -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; -use indy_utils::crypto::{chacha20poly1305_ietf, hmacsha256, pwhash_argon2i13}; -use serde::{Deserialize, Serialize}; - -use crate::{ - storage::{StorageRecord, Tag, TagName}, - Keys, Metadata, WalletRecord, -}; - -pub(super) fn master_key_salt_from_slice(slice: &[u8]) -> IndyResult { - let salt = pwhash_argon2i13::Salt::from_slice(slice) - .to_indy(IndyErrorKind::WalletAccessFailed, "Invalid master key salt")?; - - Ok(salt) -} - -//TODO memzero for passphrase -#[derive(Debug, Serialize, Deserialize, Clone)] -pub enum KeyDerivationData { - Raw(String), - Argon2iMod(String, pwhash_argon2i13::Salt), - Argon2iInt(String, pwhash_argon2i13::Salt), -} - -impl KeyDerivationData { - pub fn from_passphrase_with_new_salt( - passphrase: &str, - derivation_method: &KeyDerivationMethod, - ) -> Self { - let salt = pwhash_argon2i13::gen_salt(); - let passphrase = passphrase.to_owned(); - match *derivation_method { - KeyDerivationMethod::ARGON2I_INT => KeyDerivationData::Argon2iInt(passphrase, salt), - KeyDerivationMethod::ARGON2I_MOD => KeyDerivationData::Argon2iMod(passphrase, salt), - KeyDerivationMethod::RAW => KeyDerivationData::Raw(passphrase), - } - } - - pub(super) fn from_passphrase_and_metadata( - passphrase: &str, - metadata: &Metadata, - derivation_method: &KeyDerivationMethod, - ) -> IndyResult { - let passphrase = passphrase.to_owned(); - - let data = match (derivation_method, metadata) { - (KeyDerivationMethod::RAW, &Metadata::MetadataRaw(_)) => { - KeyDerivationData::Raw(passphrase) - } - (KeyDerivationMethod::ARGON2I_INT, Metadata::MetadataArgon(metadata)) => { - let master_key_salt = master_key_salt_from_slice(&metadata.master_key_salt)?; - KeyDerivationData::Argon2iInt(passphrase, master_key_salt) - } - (KeyDerivationMethod::ARGON2I_MOD, Metadata::MetadataArgon(metadata)) => { - let master_key_salt = master_key_salt_from_slice(&metadata.master_key_salt)?; - KeyDerivationData::Argon2iMod(passphrase, master_key_salt) - } - _ => { - return Err(err_msg( - IndyErrorKind::WalletAccessFailed, - "Invalid combination of KeyDerivationMethod and Metadata", - )) - } - }; - - Ok(data) - } - - pub fn calc_master_key(&self) -> IndyResult { - match self { - KeyDerivationData::Raw(passphrase) => _raw_master_key(passphrase), - KeyDerivationData::Argon2iInt(passphrase, salt) => { - _derive_master_key(passphrase, salt, &KeyDerivationMethod::ARGON2I_INT) - } - KeyDerivationData::Argon2iMod(passphrase, salt) => { - _derive_master_key(passphrase, salt, &KeyDerivationMethod::ARGON2I_MOD) - } - } - } -} - -fn _derive_master_key( - passphrase: &str, - salt: &pwhash_argon2i13::Salt, - key_derivation_method: &KeyDerivationMethod, -) -> IndyResult { - let key = chacha20poly1305_ietf::derive_key(passphrase, salt, key_derivation_method)?; - Ok(key) -} - -fn _raw_master_key(passphrase: &str) -> IndyResult { - let bytes = bs58::decode(passphrase).into_vec()?; - - chacha20poly1305_ietf::Key::from_slice(&bytes).map_err(|err| err.extend("Invalid mastery key")) -} - -pub(super) fn encrypt_tag_names( - tag_names: &[&str], - tag_name_key: &chacha20poly1305_ietf::Key, - tags_hmac_key: &hmacsha256::Key, -) -> Vec { - tag_names - .iter() - .map(|tag_name| { - if tag_name.starts_with('~') { - TagName::OfPlain(encrypt_as_searchable( - &tag_name.as_bytes()[1..], - tag_name_key, - tags_hmac_key, - )) - } else { - TagName::OfEncrypted(encrypt_as_searchable( - tag_name.as_bytes(), - tag_name_key, - tags_hmac_key, - )) - } - }) - .collect::>() -} - -pub(super) fn encrypt_tags( - tags: &HashMap, - tag_name_key: &chacha20poly1305_ietf::Key, - tag_value_key: &chacha20poly1305_ietf::Key, - tags_hmac_key: &hmacsha256::Key, -) -> Vec { - tags.iter() - .map(|(tag_name, tag_value)| { - if tag_name.starts_with('~') { - // '~' character on start is skipped. - Tag::PlainText( - encrypt_as_searchable(&tag_name.as_bytes()[1..], tag_name_key, tags_hmac_key), - tag_value.to_string(), - ) - } else { - Tag::Encrypted( - encrypt_as_searchable(tag_name.as_bytes(), tag_name_key, tags_hmac_key), - encrypt_as_searchable(tag_value.as_bytes(), tag_value_key, tags_hmac_key), - ) - } - }) - .collect::>() -} - -pub(super) fn encrypt_as_searchable( - data: &[u8], - key: &chacha20poly1305_ietf::Key, - hmac_key: &hmacsha256::Key, -) -> Vec { - let tag = hmacsha256::authenticate(data, hmac_key); - let nonce = chacha20poly1305_ietf::Nonce::from_slice(&tag[..chacha20poly1305_ietf::NONCEBYTES]) - .unwrap(); // We can safely unwrap here - let ct = chacha20poly1305_ietf::encrypt(data, key, &nonce); - - let mut result: Vec = Default::default(); - result.extend_from_slice(&nonce[..]); - result.extend_from_slice(&ct); - result -} - -pub(super) fn encrypt_as_not_searchable(data: &[u8], key: &chacha20poly1305_ietf::Key) -> Vec { - let (ct, nonce) = chacha20poly1305_ietf::gen_nonce_and_encrypt(data, key); - - let mut result: Vec = Default::default(); - result.extend_from_slice(&nonce[..]); - result.extend_from_slice(&ct); - result -} - -pub(super) fn decrypt( - data: &[u8], - key: &chacha20poly1305_ietf::Key, - nonce: &chacha20poly1305_ietf::Nonce, -) -> IndyResult> { - let res = chacha20poly1305_ietf::decrypt(data, key, nonce)?; - Ok(res) -} - -pub(super) fn decrypt_merged( - joined_data: &[u8], - key: &chacha20poly1305_ietf::Key, -) -> IndyResult> { - let nonce = - chacha20poly1305_ietf::Nonce::from_slice(&joined_data[..chacha20poly1305_ietf::NONCEBYTES]) - .unwrap(); // We can safety unwrap here - let data = &joined_data[chacha20poly1305_ietf::NONCEBYTES..]; - let res = decrypt(data, key, &nonce)?; - Ok(res) -} - -pub(super) fn decrypt_tags( - etags: &Option>, - tag_name_key: &chacha20poly1305_ietf::Key, - tag_value_key: &chacha20poly1305_ietf::Key, -) -> IndyResult>> { - match *etags { - None => Ok(None), - Some(ref etags) => { - let mut tags: HashMap = HashMap::new(); - - for etag in etags { - let (name, value) = match *etag { - Tag::PlainText(ref ename, ref value) => { - let name = match decrypt_merged(ename, tag_name_key) { - Err(err) => { - return Err(err.to_indy( - IndyErrorKind::WalletEncryptionError, - "Unable to decrypt tag name", - )) - } - Ok(tag_name_bytes) => format!( - "~{}", - str::from_utf8(&tag_name_bytes).to_indy( - IndyErrorKind::WalletEncryptionError, - "Plaintext Tag name is invalid utf8" - )? - ), - }; - (name, value.clone()) - } - Tag::Encrypted(ref ename, ref evalue) => { - let name = String::from_utf8(decrypt_merged(ename, tag_name_key)?) - .to_indy( - IndyErrorKind::WalletEncryptionError, - "Tag name is invalid utf8", - )?; - let value = String::from_utf8(decrypt_merged(evalue, tag_value_key)?) - .to_indy( - IndyErrorKind::WalletEncryptionError, - "Tag value is invalid utf8", - )?; - (name, value) - } - }; - tags.insert(name, value); - } - - Ok(Some(tags)) - } - } -} - -pub(super) fn decrypt_storage_record( - record: &StorageRecord, - keys: &Keys, -) -> IndyResult { - let decrypted_name = decrypt_merged(&record.id, &keys.name_key)?; - - let decrypted_name = String::from_utf8(decrypted_name).to_indy( - IndyErrorKind::WalletEncryptionError, - "Record is invalid utf8", - )?; - - let decrypted_value = match record.value { - Some(ref value) => Some(value.decrypt(&keys.value_key)?), - None => None, - }; - - let decrypted_type = match record.type_ { - Some(ref type_) => { - let decrypted_type = decrypt_merged(type_, &keys.type_key)?; - Some(String::from_utf8(decrypted_type).to_indy( - IndyErrorKind::WalletEncryptionError, - "Record type is invalid utf8", - )?) - } - None => None, - }; - - let decrypted_tags = decrypt_tags(&record.tags, &keys.tag_name_key, &keys.tag_value_key)?; - Ok(WalletRecord::new( - decrypted_name, - decrypted_type, - decrypted_value, - decrypted_tags, - )) -} - -// #[cfg(test)] -// mod tests { -// use crate::wallet::EncryptedValue; -// use crate::wallet::Keys; -// use indy_utils::crypto::hmacsha256; - -// use super::*; - -// #[test] -// fn test_encrypt_decrypt_searchable() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let decrypted_data = decrypt_merged(&encrypted_data, &key).unwrap(); - -// assert_eq!(&decrypted_data[..], data.as_bytes()); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_wrong_key() { -// let key = chacha20poly1305_ietf::gen_key(); -// let key2 = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let res = decrypt_merged(&encrypted_data, &key2); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_nonce_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "test_data"; - -// let mut encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let byte_value = encrypted_data[3]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[3] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_data_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let index = encrypted_data.len() - 1; -// let byte_value = encrypted_data[index]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[index] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_searchable_returns_error_if_tag_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_searchable(data.as_bytes(), &key, &hmac_key); -// let byte_value = encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let decrypted_data = decrypt_merged(&encrypted_data, &key).unwrap(); - -// assert_eq!(&decrypted_data[..], data.as_bytes()); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_wrong_key() { -// let key = chacha20poly1305_ietf::gen_key(); -// let key2 = chacha20poly1305_ietf::gen_key(); -// let data = "test_data"; - -// let encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let res = decrypt_merged(&encrypted_data, &key2); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_nonce_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "test_data"; - -// let mut encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let byte_value = encrypted_data[3]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[3] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_data_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let index = encrypted_data.len() - 1; -// let byte_value = encrypted_data[index]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[index] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_not_searchable_returns_error_if_tag_modified() { -// let key = chacha20poly1305_ietf::gen_key(); -// let data = "12345678901234567890123456789012345678901234567890"; - -// let mut encrypted_data = encrypt_as_not_searchable(data.as_bytes(), &key); -// let byte_value = encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1]; -// let new_byte_value = if byte_value == 255 { 0 } else { byte_value + 1 }; -// encrypted_data[chacha20poly1305_ietf::NONCEBYTES + 1] = new_byte_value; -// let res = decrypt_merged(&encrypted_data, &key); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } - -// #[test] -// fn test_encrypt_decrypt_tags() { -// let tags = serde_json::from_str(r#"{"tag1":"value1", "tag2":"value2", -// "~tag3":"value3"}"#).unwrap(); - -// let tag_name_key = chacha20poly1305_ietf::gen_key(); -// let tag_value_key = chacha20poly1305_ietf::gen_key(); -// let hmac_key = hmacsha256::gen_key(); - -// let c = encrypt_tags(&tags, &tag_name_key, &tag_value_key, &hmac_key); -// let u = decrypt_tags(&Some(c), &tag_name_key, &tag_value_key).unwrap().unwrap(); -// assert_eq!(tags, u); -// } - -// #[test] -// fn test_decrypt_tags_works_for_none() { -// let tag_name_key = chacha20poly1305_ietf::gen_key(); -// let tag_value_key = chacha20poly1305_ietf::gen_key(); - -// let u = decrypt_tags(&None, &tag_name_key, &tag_value_key).unwrap(); -// assert!(u.is_none()); -// } - -// #[test] -// fn test_decrypt_storage_record_works() { -// let keys = Keys::new(); -// let name = "test_name"; -// let value = "test_value"; -// let encrypted_value = EncryptedValue::encrypt(value, &keys.value_key); -// let type_ = "test_type"; -// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, -// &keys.item_hmac_key); let encrypted_type = encrypt_as_searchable(type_.as_bytes(), -// &keys.type_key, &keys.item_hmac_key); let mut tags = HashMap::new(); -// tags.insert("tag_name_1".to_string(), "tag_value_1".to_string()); -// tags.insert("~tag_name_2".to_string(), "tag_value_2".to_string()); -// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, -// &keys.tags_hmac_key); - -// let storage_record = StorageRecord { -// id: encrypted_name, -// value: Some(encrypted_value), -// type_: Some(encrypted_type), -// tags: Some(encrypted_tags), -// }; -// let decrypted_wallet_record = decrypt_storage_record(&storage_record, &keys).unwrap(); - -// assert_eq!(&decrypted_wallet_record.id, name); -// assert_eq!(&decrypted_wallet_record.value.unwrap(), value); -// assert_eq!(&decrypted_wallet_record.type_.unwrap(), type_); -// assert_eq!(&decrypted_wallet_record.tags.unwrap(), &tags); -// } - -// #[test] -// fn test_decrypt_storage_record_fails_if_wrong_keys() { -// let keys = Keys::new(); -// let keys2 = Keys::new(); -// let name = "test_name"; -// let value = "test_value"; -// let encrypted_value = EncryptedValue::encrypt(value, &keys.value_key); -// let type_ = "test_type"; -// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, -// &keys.item_hmac_key); let encrypted_type = encrypt_as_searchable(type_.as_bytes(), -// &keys.type_key, &keys.item_hmac_key); let mut tags = HashMap::new(); -// tags.insert("tag_name_1".to_string(), "tag_value_1".to_string()); -// tags.insert("~tag_name_2".to_string(), "tag_value_2".to_string()); -// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, -// &keys.tags_hmac_key); - -// let storage_record = StorageRecord { -// id: encrypted_name, -// value: Some(encrypted_value), -// type_: Some(encrypted_type), -// tags: Some(encrypted_tags), -// }; -// let res = decrypt_storage_record(&storage_record, &keys2); - -// assert_kind!(IndyErrorKind::InvalidStructure, res); -// } -// } diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs deleted file mode 100644 index a8e646f161..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/export_import.rs +++ /dev/null @@ -1,329 +0,0 @@ -use std::{ - io, - io::{BufReader, BufWriter, Read, Write}, - sync::Arc, - time::{SystemTime, UNIX_EPOCH}, -}; - -use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; -use indy_api_types::{ - domain::wallet::{IndyRecord, KeyDerivationMethod}, - errors::prelude::*, -}; -use indy_utils::crypto::{ - chacha20poly1305_ietf, - hash::{hash, HASHBYTES}, - pwhash_argon2i13, -}; -use serde::{Deserialize, Serialize}; - -use crate::{encryption::KeyDerivationData, Wallet, WalletRecord}; - -const CHUNK_SIZE: usize = 1024; - -#[derive(Debug, Serialize, Deserialize)] -pub enum EncryptionMethod { - // **ChaCha20-Poly1305-IETF** cypher in blocks per chunk_size bytes - ChaCha20Poly1305IETF { - // pwhash_argon2i13::Salt as bytes. Random salt used for deriving of key from passphrase - salt: Vec, - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each - // chunk to be sure in export file consistency - nonce: Vec, - // size of encrypted chunk - chunk_size: usize, - }, - // **ChaCha20-Poly1305-IETF interactive key derivation** cypher in blocks per chunk_size bytes - ChaCha20Poly1305IETFInteractive { - // pwhash_argon2i13::Salt as bytes. Random salt used for deriving of key from passphrase - salt: Vec, - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each - // chunk to be sure in export file consistency - nonce: Vec, - // size of encrypted chunk - chunk_size: usize, - }, - // **ChaCha20-Poly1305-IETF raw key** cypher in blocks per chunk_size bytes - ChaCha20Poly1305IETFRaw { - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each - // chunk to be sure in export file consistency - nonce: Vec, - // size of encrypted chunk - chunk_size: usize, - }, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct Header { - // Method of encryption for encrypted stream - pub encryption_method: EncryptionMethod, - // Export time in seconds from UNIX Epoch - pub time: u64, - // Version of header - pub version: u32, -} - -// Note that we use externally tagged enum serialization and header will be represented as: -// -// { -// "encryption_method": { -// "ChaCha20Poly1305IETF": { -// "salt": .., -// "nonce": .., -// "chunk_size": .., -// }, -// }, -// "time": .., -// "version": .., -// } - -pub(super) async fn export_continue( - wallet: Arc, - writer: &mut (dyn Write + Send + Sync), - version: u32, - key: chacha20poly1305_ietf::Key, - key_data: &KeyDerivationData, -) -> IndyResult<()> { - let nonce = chacha20poly1305_ietf::gen_nonce(); - let chunk_size = CHUNK_SIZE; - - let encryption_method = match key_data { - KeyDerivationData::Argon2iMod(_, salt) => EncryptionMethod::ChaCha20Poly1305IETF { - salt: salt[..].to_vec(), - nonce: nonce[..].to_vec(), - chunk_size, - }, - KeyDerivationData::Argon2iInt(_, salt) => { - EncryptionMethod::ChaCha20Poly1305IETFInteractive { - salt: salt[..].to_vec(), - nonce: nonce[..].to_vec(), - chunk_size, - } - } - KeyDerivationData::Raw(_) => EncryptionMethod::ChaCha20Poly1305IETFRaw { - nonce: nonce[..].to_vec(), - chunk_size, - }, - }; - - let header = Header { - encryption_method, - time: SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_secs(), - version, - }; - - let header = rmp_serde::to_vec(&header).to_indy( - IndyErrorKind::InvalidState, - "Can't serialize wallet export file header", - )?; - - // Write plain - let mut writer = BufWriter::new(writer); - writer.write_u32::(header.len() as u32)?; - writer.write_all(&header)?; - - // Write ecnrypted - let mut writer = chacha20poly1305_ietf::Writer::new(writer, key, nonce, chunk_size); - - writer.write_all(&hash(&header)?)?; - - let mut records = wallet.get_all().await?; - - while let Some(WalletRecord { - type_, - id, - value, - tags, - }) = records.next().await? - { - let record = IndyRecord { - type_: type_.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No type fetched for exported record", - ) - })?, - id, - value: value.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No value fetched for exported record", - ) - })?, - tags: tags.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No tags fetched for exported record", - ) - })?, - }; - - let record = rmp_serde::to_vec(&record) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize record")?; - - writer.write_u32::(record.len() as u32)?; - writer.write_all(&record)?; - } - - writer.write_u32::(0)?; // END message - writer.flush()?; - Ok(()) -} - -#[allow(clippy::type_complexity)] -pub(super) fn preparse_file_to_import( - reader: T, - passphrase: &str, -) -> IndyResult<( - BufReader, - KeyDerivationData, - chacha20poly1305_ietf::Nonce, - usize, - Vec, -)> -where - T: Read, -{ - // Reads plain - let mut reader = BufReader::new(reader); - - let header_len = reader.read_u32::().map_err(_map_io_err)? as usize; - - if header_len == 0 { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Invalid header length", - )); - } - - let mut header_bytes = vec![0u8; header_len]; - reader.read_exact(&mut header_bytes).map_err(_map_io_err)?; - - let header: Header = rmp_serde::from_slice(&header_bytes) - .to_indy(IndyErrorKind::InvalidStructure, "Header is malformed json")?; - - if header.version != 0 { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Unsupported version", - )); - } - - let key_derivation_method = match header.encryption_method { - EncryptionMethod::ChaCha20Poly1305IETF { .. } => KeyDerivationMethod::ARGON2I_MOD, - EncryptionMethod::ChaCha20Poly1305IETFInteractive { .. } => { - KeyDerivationMethod::ARGON2I_INT - } - EncryptionMethod::ChaCha20Poly1305IETFRaw { .. } => KeyDerivationMethod::RAW, - }; - - let (import_key_derivation_data, nonce, chunk_size) = match header.encryption_method { - EncryptionMethod::ChaCha20Poly1305IETF { - salt, - nonce, - chunk_size, - } - | EncryptionMethod::ChaCha20Poly1305IETFInteractive { - salt, - nonce, - chunk_size, - } => { - let salt = pwhash_argon2i13::Salt::from_slice(&salt) - .to_indy(IndyErrorKind::InvalidStructure, "Invalid salt")?; - - let nonce = chacha20poly1305_ietf::Nonce::from_slice(&nonce) - .to_indy(IndyErrorKind::InvalidStructure, "Invalid nonce")?; - - let passphrase = passphrase.to_owned(); - - let key_data = match key_derivation_method { - KeyDerivationMethod::ARGON2I_INT => KeyDerivationData::Argon2iInt(passphrase, salt), - KeyDerivationMethod::ARGON2I_MOD => KeyDerivationData::Argon2iMod(passphrase, salt), - _ => unimplemented!("FIXME"), //FIXME - }; - - (key_data, nonce, chunk_size) - } - EncryptionMethod::ChaCha20Poly1305IETFRaw { nonce, chunk_size } => { - let nonce = chacha20poly1305_ietf::Nonce::from_slice(&nonce) - .to_indy(IndyErrorKind::InvalidStructure, "Invalid nonce")?; - - let key_data = KeyDerivationData::Raw(passphrase.to_owned()); - - (key_data, nonce, chunk_size) - } - }; - - Ok(( - reader, - import_key_derivation_data, - nonce, - chunk_size, - header_bytes, - )) -} - -pub(super) async fn finish_import( - wallet: &Wallet, - reader: BufReader, - key: chacha20poly1305_ietf::Key, - nonce: chacha20poly1305_ietf::Nonce, - chunk_size: usize, - header_bytes: Vec, -) -> IndyResult<()> -where - T: Read, -{ - // Reads encrypted - let mut reader = chacha20poly1305_ietf::Reader::new(reader, key, nonce, chunk_size); - - let mut header_hash = vec![0u8; HASHBYTES]; - reader.read_exact(&mut header_hash).map_err(_map_io_err)?; - - if hash(&header_bytes)? != header_hash { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Invalid header hash", - )); - } - - loop { - let record_len = reader.read_u32::().map_err(_map_io_err)? as usize; - - if record_len == 0 { - break; - } - - let mut record = vec![0u8; record_len]; - reader.read_exact(&mut record).map_err(_map_io_err)?; - - let record: IndyRecord = rmp_serde::from_slice(&record).to_indy( - IndyErrorKind::InvalidStructure, - "Record is malformed msgpack", - )?; - - wallet - .add(&record.type_, &record.id, &record.value, &record.tags, true) - .await?; - } - - Ok(()) -} - -fn _map_io_err(e: io::Error) -> IndyError { - match e { - ref e - if e.kind() == io::ErrorKind::UnexpectedEof - || e.kind() == io::ErrorKind::InvalidData => - { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid export file format", - ) - } - e => e.to_indy(IndyErrorKind::IOError, "Can't read export file"), - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs deleted file mode 100644 index 81125d769d..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/iterator.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::sync::Arc; - -use indy_api_types::errors::IndyError; - -use super::{ - encryption::decrypt_storage_record, storage::StorageIterator, wallet::Keys, WalletRecord, -}; - -pub struct WalletIterator { - storage_iterator: Box, - keys: Arc, -} - -impl WalletIterator { - pub fn new(storage_iter: Box, keys: Arc) -> Self { - WalletIterator { - storage_iterator: storage_iter, - keys, - } - } - - pub async fn next(&mut self) -> Result, IndyError> { - let next_storage_entity = self.storage_iterator.next().await?; - - if let Some(next_storage_entity) = next_storage_entity { - Ok(Some(decrypt_storage_record( - &next_storage_entity, - &self.keys, - )?)) - } else { - Ok(None) - } - } - - pub fn get_total_count(&self) -> Result, IndyError> { - self.storage_iterator.get_total_count() - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs deleted file mode 100644 index 6b6b41a0a8..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/language.rs +++ /dev/null @@ -1,152 +0,0 @@ -use std::string; - -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::base64; - -#[derive(Debug, PartialEq, Eq, Hash, Clone)] -pub enum TagName { - EncryptedTagName(Vec), - PlainTagName(Vec), -} - -impl TagName { - pub fn from(s: String) -> IndyResult { - if s.is_empty() || s.starts_with('~') && s.len() == 1 { - return Err(err_msg( - IndyErrorKind::WalletQueryError, - "Tag name must not be empty", - )); - } - - if s.starts_with('~') { - Ok(TagName::PlainTagName(s.into_bytes()[1..].to_vec())) - } else { - Ok(TagName::EncryptedTagName(s.into_bytes())) - } - } -} - -impl string::ToString for TagName { - fn to_string(&self) -> String { - match *self { - TagName::EncryptedTagName(ref v) => format!(r#""{}""#, base64::encode(v)), - TagName::PlainTagName(ref v) => format!(r#""~{}""#, base64::encode(v)), - } - } -} - -#[derive(Debug, PartialEq, Hash, Eq, Clone)] -pub enum TargetValue { - Unencrypted(String), - Encrypted(Vec), -} - -impl From for TargetValue { - fn from(s: String) -> TargetValue { - TargetValue::Unencrypted(s) - } -} - -impl string::ToString for TargetValue { - fn to_string(&self) -> String { - match *self { - TargetValue::Unencrypted(ref s) => format!(r#""{}""#, s), - TargetValue::Encrypted(ref v) => format!(r#""{}""#, base64::encode(v)), - } - } -} - -#[derive(Debug, Hash, Clone)] -pub enum Operator { - And(Vec), - Or(Vec), - Not(Box), - Eq(TagName, TargetValue), - Neq(TagName, TargetValue), - Gt(TagName, TargetValue), - Gte(TagName, TargetValue), - Lt(TagName, TargetValue), - Lte(TagName, TargetValue), - Like(TagName, TargetValue), - In(TagName, Vec), -} - -impl string::ToString for Operator { - fn to_string(&self) -> String { - match *self { - Operator::Eq(ref tag_name, ref tag_value) => { - format!(r#"{{{}:{}}}"#, tag_name.to_string(), tag_value.to_string()) - } - Operator::Neq(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$neq":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Gt(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$gt":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Gte(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$gte":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Lt(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$lt":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Lte(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$lte":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::Like(ref tag_name, ref tag_value) => format!( - r#"{{{}:{{"$like":{}}}}}"#, - tag_name.to_string(), - tag_value.to_string() - ), - Operator::In(ref tag_name, ref tag_values) => { - format!( - r#"{{{}:{{"$in":[{}]}}}}"#, - tag_name.to_string(), - tag_values - .iter() - .map(|v| v.to_string()) - .collect::>() - .join(",") - ) - } - Operator::And(ref operators) => { - if !operators.is_empty() { - format!( - r#"{{"$and":[{}]}}"#, - operators - .iter() - .map(|o: &Operator| { o.to_string() }) - .collect::>() - .join(",") - ) - } else { - "{}".to_string() - } - } - Operator::Or(ref operators) => { - if !operators.is_empty() { - format!( - r#"{{"$or":[{}]}}"#, - operators - .iter() - .map(|o: &Operator| { o.to_string() }) - .collect::>() - .join(",") - ) - } else { - "{}".to_string() - } - } - Operator::Not(ref stmt) => format!(r#"{{"$not":{}}}"#, stmt.to_string()), - } - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs deleted file mode 100644 index 48df31dac3..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/lib.rs +++ /dev/null @@ -1,1206 +0,0 @@ -// allow all clippy warnings, given this is legacy to be removed soon -#![allow(clippy::all)] -use std::{ - collections::{HashMap, HashSet}, - fmt, fs, - io::BufReader, - path::PathBuf, - sync::{Arc, Mutex}, - unimplemented, -}; - -use indy_api_types::{ - domain::wallet::{CacheConfig, Config, Credentials, ExportConfig, Tags}, - errors::prelude::*, - WalletHandle, -}; -use indy_utils::{ - crypto::chacha20poly1305_ietf::{self, Key as MasterKey}, - secret, -}; -use iterator::WalletIterator; -use log::trace; -use serde::{Deserialize, Serialize}; -use serde_json::Value as SValue; - -pub use crate::encryption::KeyDerivationData; -use crate::{ - cache::wallet_cache::{WalletCache, WalletCacheHitData, WalletCacheHitMetrics}, - export_import::{export_continue, finish_import, preparse_file_to_import}, - storage::{ - default::SQLiteStorageType, mysql::MySqlStorageType, WalletStorage, WalletStorageType, - }, - wallet::{Keys, Wallet}, -}; - -mod encryption; -pub mod iterator; -mod query_encryption; -mod storage; - -// TODO: Remove query language out of wallet module -pub mod language; - -mod cache; -mod export_import; -mod wallet; - -#[allow(clippy::type_complexity)] -pub struct WalletService { - storage_types: Mutex>>, - wallets: Mutex>>, - wallet_ids: Mutex>, - pending_for_open: Mutex< - HashMap< - WalletHandle, - ( - String, /* id */ - Box, - Metadata, - Option, - ), - >, - >, - pending_for_import: Mutex< - HashMap< - WalletHandle, - ( - BufReader<::std::fs::File>, - chacha20poly1305_ietf::Nonce, - usize, - Vec, - KeyDerivationData, - ), - >, - >, - cache_hit_metrics: WalletCacheHitMetrics, -} - -#[allow(clippy::new_without_default)] -impl WalletService { - pub fn new() -> WalletService { - let storage_types = { - let s1: Arc = Arc::new(SQLiteStorageType::new()); - let s2: Arc = Arc::new(MySqlStorageType::new()); - - Mutex::new(HashMap::from([ - ("default".to_string(), s1), - ("mysql".to_string(), s2), - ])) - }; - - WalletService { - storage_types, - wallets: Mutex::new(HashMap::new()), - wallet_ids: Mutex::new(HashSet::new()), - pending_for_open: Mutex::new(HashMap::new()), - pending_for_import: Mutex::new(HashMap::new()), - cache_hit_metrics: WalletCacheHitMetrics::new(), - } - } - - pub async fn create_wallet( - &self, - config: &Config, - credentials: &Credentials, - key: (&KeyDerivationData, &MasterKey), - ) -> IndyResult<()> { - self._create_wallet(config, credentials, key).await?; - Ok(()) - } - - async fn _create_wallet( - &self, - config: &Config, - credentials: &Credentials, - (key_data, master_key): (&KeyDerivationData, &MasterKey), - ) -> IndyResult { - trace!( - "create_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(credentials) - ); - - let keys = Keys::new(); - let metadata = self._prepare_metadata(master_key, key_data, &keys)?; - - let (storage_type, storage_config, storage_credentials) = - self._get_config_and_cred_for_storage(config, credentials)?; - - storage_type - .create_storage( - &config.id, - storage_config.as_deref(), - storage_credentials.as_deref(), - &metadata, - ) - .await?; - - Ok(keys) - } - - pub async fn delete_wallet_prepare( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(Metadata, KeyDerivationData)> { - trace!( - "delete_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(credentials) - ); - - if self - .wallet_ids - .lock() - .unwrap() - .contains(&WalletService::_get_wallet_id(config)) - { - return Err(err_msg( - IndyErrorKind::InvalidState, - format!( - "Wallet has to be closed before deleting: {:?}", - WalletService::_get_wallet_id(config) - ), - )); - } - - // check credentials and close connection before deleting wallet - - let (_, metadata, key_derivation_data) = self - ._open_storage_and_fetch_metadata(config, credentials) - .await?; - - Ok((metadata, key_derivation_data)) - } - - pub async fn delete_wallet_continue( - &self, - config: &Config, - credentials: &Credentials, - metadata: &Metadata, - master_key: &MasterKey, - ) -> IndyResult<()> { - trace!( - "delete_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(credentials) - ); - - { - self._restore_keys(metadata, master_key)?; - } - - let (storage_type, storage_config, storage_credentials) = - self._get_config_and_cred_for_storage(config, credentials)?; - - storage_type - .delete_storage( - &config.id, - storage_config.as_deref(), - storage_credentials.as_deref(), - ) - .await?; - - trace!("delete_wallet <<<"); - Ok(()) - } - - pub async fn open_wallet_prepare( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(WalletHandle, KeyDerivationData, Option)> { - trace!( - "open_wallet >>> config: {:?}, credentials: {:?}", - config, - secret!(&credentials) - ); - - self._is_id_from_config_not_used(config)?; - - let (storage, metadata, key_derivation_data) = self - ._open_storage_and_fetch_metadata(config, credentials) - .await?; - - let wallet_handle = indy_utils::next_wallet_handle(); - - let rekey_data: Option = credentials.rekey.as_ref().map(|rekey| { - KeyDerivationData::from_passphrase_with_new_salt( - rekey, - &credentials.rekey_derivation_method, - ) - }); - - self.pending_for_open.lock().unwrap().insert( - wallet_handle, - ( - WalletService::_get_wallet_id(config), - storage, - metadata, - rekey_data.clone(), - ), - ); - - Ok((wallet_handle, key_derivation_data, rekey_data)) - } - - pub async fn open_wallet_continue( - &self, - wallet_handle: WalletHandle, - master_key: (&MasterKey, Option<&MasterKey>), - cache_config: Option, - ) -> IndyResult { - let (id, storage, metadata, rekey_data) = self - .pending_for_open - .lock() - .unwrap() - .remove(&wallet_handle) - .ok_or_else(|| err_msg(IndyErrorKind::InvalidState, "Open data not found"))?; - - let (master_key, rekey) = master_key; - let keys = self._restore_keys(&metadata, master_key)?; - - // Rotate master key - if let (Some(rekey), Some(rekey_data)) = (rekey, rekey_data) { - let metadata = self._prepare_metadata(rekey, &rekey_data, &keys)?; - storage.set_storage_metadata(&metadata).await?; - } - - let wallet = Wallet::new( - id.clone(), - storage, - Arc::new(keys), - WalletCache::new(cache_config), - ); - - self.wallets - .lock() - .unwrap() - .insert(wallet_handle, Arc::new(wallet)); - - self.wallet_ids.lock().unwrap().insert(id.to_string()); - - trace!("open_wallet <<< res: {:?}", wallet_handle); - - Ok(wallet_handle) - } - - async fn _open_storage_and_fetch_metadata( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(Box, Metadata, KeyDerivationData)> { - let storage = self._open_storage(config, credentials).await?; - - let metadata: Metadata = { - let metadata = storage.get_storage_metadata().await?; - - serde_json::from_slice(&metadata) - .to_indy(IndyErrorKind::InvalidState, "Cannot deserialize metadata")? - }; - - let key_derivation_data = KeyDerivationData::from_passphrase_and_metadata( - &credentials.key, - &metadata, - &credentials.key_derivation_method, - )?; - - Ok((storage, metadata, key_derivation_data)) - } - - pub async fn close_wallet(&self, handle: WalletHandle) -> IndyResult<()> { - trace!("close_wallet >>> handle: {:?}", handle); - - let wallet = self.wallets.lock().unwrap().remove(&handle); - - let wallet = if let Some(wallet) = wallet { - wallet - } else { - return Err(err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown wallet handle", - )); - }; - - self.wallet_ids.lock().unwrap().remove(wallet.get_id()); - - trace!("close_wallet <<<"); - - Ok(()) - } - - fn _map_wallet_storage_error(err: IndyError, type_: &str, name: &str) -> IndyError { - match err.kind() { - IndyErrorKind::WalletItemAlreadyExists => err_msg( - IndyErrorKind::WalletItemAlreadyExists, - format!( - "Wallet item already exists with type: {}, id: {}", - type_, name - ), - ), - IndyErrorKind::WalletItemNotFound => err_msg( - IndyErrorKind::WalletItemNotFound, - format!("Wallet item not found with type: {}, id: {}", type_, name), - ), - _ => err, - } - } - - pub async fn add_record( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - value: &str, - tags: &Tags, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .add(type_, name, value, tags, true) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn add_indy_record( - &self, - wallet_handle: WalletHandle, - name: &str, - value: &str, - tags: &Tags, - ) -> IndyResult<()> - where - T: Sized, - { - self.add_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - value, - tags, - ) - .await?; - - Ok(()) - } - - pub async fn add_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - object: &T, - tags: &Tags, - ) -> IndyResult - where - T: ::serde::Serialize + Sized, - { - let object_json = serde_json::to_string(object).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot serialize {:?}", short_type_name::()), - )?; - - self.add_indy_record::(wallet_handle, name, &object_json, tags) - .await?; - - Ok(object_json) - } - - pub async fn update_record_value( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - value: &str, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .update(type_, name, value) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn update_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - object: &T, - ) -> IndyResult - where - T: ::serde::Serialize + Sized, - { - let type_ = short_type_name::(); - - let wallet = self.get_wallet(wallet_handle).await?; - - let object_json = serde_json::to_string(object).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot serialize {:?}", type_), - )?; - - wallet - .update(&self.add_prefix(type_), name, &object_json) - .await?; - - Ok(object_json) - } - - pub async fn add_record_tags( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - tags: &Tags, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .add_tags(type_, name, tags) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn update_record_tags( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - tags: &Tags, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .update_tags(type_, name, tags) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn delete_record_tags( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - tag_names: &[&str], - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .delete_tags(type_, name, tag_names) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn delete_record( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - ) -> IndyResult<()> { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .delete(type_, name) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn delete_indy_record( - &self, - wallet_handle: WalletHandle, - name: &str, - ) -> IndyResult<()> - where - T: Sized, - { - self.delete_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - ) - .await?; - - Ok(()) - } - - pub async fn get_record( - &self, - wallet_handle: WalletHandle, - type_: &str, - name: &str, - options_json: &str, - ) -> IndyResult { - let wallet = self.get_wallet(wallet_handle).await?; - wallet - .get(type_, name, options_json, &self.cache_hit_metrics) - .await - .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) - } - - pub async fn get_indy_record( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult - where - T: Sized, - { - self.get_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - options_json, - ) - .await - } - - pub async fn get_indy_record_value( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult - where - T: Sized, - { - let type_ = short_type_name::(); - - let record = self - .get_record(wallet_handle, &self.add_prefix(type_), name, options_json) - .await?; - - let record_value = record - .get_value() - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - format!("{} not found for id: {:?}", type_, name), - ) - })? - .to_string(); - - Ok(record_value) - } - - // Dirty hack. json must live longer then result T - pub async fn get_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult - where - T: ::serde::de::DeserializeOwned + Sized, - { - let record_value = self - .get_indy_record_value::(wallet_handle, name, options_json) - .await?; - - serde_json::from_str(&record_value).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize {:?}", short_type_name::()), - ) - } - - // Dirty hack. json must live longer then result T - pub async fn get_indy_opt_object( - &self, - wallet_handle: WalletHandle, - name: &str, - options_json: &str, - ) -> IndyResult> - where - T: ::serde::de::DeserializeOwned + Sized, - { - match self - .get_indy_object::(wallet_handle, name, options_json) - .await - { - Ok(res) => Ok(Some(res)), - Err(ref err) if err.kind() == IndyErrorKind::WalletItemNotFound => Ok(None), - Err(err) => Err(err), - } - } - - pub async fn search_records( - &self, - wallet_handle: WalletHandle, - type_: &str, - query_json: &str, - options_json: &str, - ) -> IndyResult { - let wallet = self.get_wallet(wallet_handle).await?; - - Ok(WalletSearch { - iter: wallet.search(type_, query_json, Some(options_json)).await?, - }) - } - - pub async fn search_indy_records( - &self, - wallet_handle: WalletHandle, - query_json: &str, - options_json: &str, - ) -> IndyResult - where - T: Sized, - { - self.search_records( - wallet_handle, - &self.add_prefix(short_type_name::()), - query_json, - options_json, - ) - .await - } - - #[allow(dead_code)] // TODO: Should we implement getting all records or delete everywhere? - pub fn search_all_records(&self, _wallet_handle: WalletHandle) -> IndyResult { - // match self.wallets.lock().await.get(&wallet_handle) { - // Some(wallet) => wallet.search_all_records(), - // None => Err(IndyError::InvalidHandle(wallet_handle.to_string())) - // } - unimplemented!() - } - - pub async fn upsert_indy_object( - &self, - wallet_handle: WalletHandle, - name: &str, - object: &T, - ) -> IndyResult - where - T: ::serde::Serialize + Sized, - { - if self.record_exists::(wallet_handle, name).await? { - self.update_indy_object::(wallet_handle, name, object) - .await - } else { - self.add_indy_object::(wallet_handle, name, object, &HashMap::new()) - .await - } - } - - pub async fn record_exists( - &self, - wallet_handle: WalletHandle, - name: &str, - ) -> IndyResult - where - T: Sized, - { - match self - .get_record( - wallet_handle, - &self.add_prefix(short_type_name::()), - name, - &RecordOptions::id(), - ) - .await - { - Ok(_) => Ok(true), - Err(ref err) if err.kind() == IndyErrorKind::WalletItemNotFound => Ok(false), - Err(err) => Err(err), - } - } - - pub async fn check(&self, handle: WalletHandle) -> IndyResult<()> { - self.get_wallet(handle).await?; - Ok(()) - } - - pub async fn get_all(&self, handle: WalletHandle) -> IndyResult { - let wallet = self.get_wallet(handle).await?; - wallet.get_all().await - } - - pub async fn export_wallet( - &self, - wallet_handle: WalletHandle, - export_config: &ExportConfig, - version: u32, - key: (&KeyDerivationData, &MasterKey), - ) -> IndyResult<()> { - trace!( - "export_wallet >>> wallet_handle: {:?}, export_config: {:?}, version: {:?}", - wallet_handle, - secret!(export_config), - version - ); - - if version != 0 { - return Err(err_msg(IndyErrorKind::InvalidState, "Unsupported version")); - } - - let (key_data, key) = key; - - let wallet = self.get_wallet(wallet_handle).await?; - - let path = PathBuf::from(&export_config.path); - - if let Some(parent_path) = path.parent() { - fs::DirBuilder::new().recursive(true).create(parent_path)?; - } - - let mut export_file = fs::OpenOptions::new() - .write(true) - .create_new(true) - .open(export_config.path.clone())?; - - let res = export_continue(wallet, &mut export_file, version, key.clone(), key_data).await; - - trace!("export_wallet <<<"); - res - } - - pub async fn import_wallet_prepare( - &self, - config: &Config, - credentials: &Credentials, - export_config: &ExportConfig, - ) -> IndyResult<(WalletHandle, KeyDerivationData, KeyDerivationData)> { - trace!( - "import_wallet_prepare >>> config: {:?}, credentials: {:?}, export_config: {:?}", - config, - secret!(export_config), - secret!(export_config) - ); - - let exported_file_to_import = fs::OpenOptions::new() - .read(true) - .open(&export_config.path)?; - - let (reader, import_key_derivation_data, nonce, chunk_size, header_bytes) = - preparse_file_to_import(exported_file_to_import, &export_config.key)?; - let key_data = KeyDerivationData::from_passphrase_with_new_salt( - &credentials.key, - &credentials.key_derivation_method, - ); - - let wallet_handle = indy_utils::next_wallet_handle(); - - let stashed_key_data = key_data.clone(); - - self.pending_for_import.lock().unwrap().insert( - wallet_handle, - (reader, nonce, chunk_size, header_bytes, stashed_key_data), - ); - - Ok((wallet_handle, key_data, import_key_derivation_data)) - } - - pub async fn import_wallet_continue( - &self, - wallet_handle: WalletHandle, - config: &Config, - credentials: &Credentials, - key: (MasterKey, MasterKey), - ) -> IndyResult<()> { - let (reader, nonce, chunk_size, header_bytes, key_data) = self - .pending_for_import - .lock() - .unwrap() - .remove(&wallet_handle) - .unwrap(); - - let (import_key, master_key) = key; - - let keys = self - ._create_wallet(config, credentials, (&key_data, &master_key)) - .await?; - - self._is_id_from_config_not_used(config)?; - - let storage = self._open_storage(config, credentials).await?; - let metadata = storage.get_storage_metadata().await?; - - let res = { - let wallet = Wallet::new( - WalletService::_get_wallet_id(config), - storage, - Arc::new(keys), - WalletCache::new(None), - ); - - finish_import(&wallet, reader, import_key, nonce, chunk_size, header_bytes).await - }; - - if res.is_err() { - let metadata: Metadata = serde_json::from_slice(&metadata) - .to_indy(IndyErrorKind::InvalidState, "Cannot deserialize metadata")?; - - self.delete_wallet_continue(config, credentials, &metadata, &master_key) - .await?; - } - - // self.close_wallet(wallet_handle)?; - - trace!("import_wallet <<<"); - res - } - - pub fn get_wallets_count(&self) -> usize { - self.wallets.lock().unwrap().len() - } - - pub fn get_wallet_ids_count(&self) -> usize { - self.wallet_ids.lock().unwrap().len() - } - - pub fn get_pending_for_import_count(&self) -> usize { - self.pending_for_import.lock().unwrap().len() - } - - pub fn get_pending_for_open_count(&self) -> usize { - self.pending_for_open.lock().unwrap().len() - } - - pub async fn get_wallet_cache_hit_metrics_data(&self) -> HashMap { - self.cache_hit_metrics.get_data() - } - - #[allow(clippy::type_complexity)] - fn _get_config_and_cred_for_storage( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult<(Arc, Option, Option)> { - let storage_type = { - let storage_type = config.storage_type.as_deref().unwrap_or("default"); - - self.storage_types - .lock() - .unwrap() - .get(storage_type) - .ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownWalletStorageType, - "Unknown wallet storage type", - ) - })? - .clone() - }; - - let storage_config = config.storage_config.as_ref().map(SValue::to_string); - - let storage_credentials = credentials - .storage_credentials - .as_ref() - .map(SValue::to_string); - - Ok((storage_type, storage_config, storage_credentials)) - } - - fn _is_id_from_config_not_used(&self, config: &Config) -> IndyResult<()> { - let id = WalletService::_get_wallet_id(config); - if self.wallet_ids.lock().unwrap().contains(&id) { - return Err(err_msg( - IndyErrorKind::WalletAlreadyOpened, - format!( - "Wallet {} already opened", - WalletService::_get_wallet_id(config) - ), - )); - } - - Ok(()) - } - - fn _get_wallet_id(config: &Config) -> String { - let wallet_path = config - .storage_config - .as_ref() - .and_then(|storage_config| storage_config["path"].as_str()) - .unwrap_or(""); - - format!("{}{}", config.id, wallet_path) - } - - async fn _open_storage( - &self, - config: &Config, - credentials: &Credentials, - ) -> IndyResult> { - let (storage_type, storage_config, storage_credentials) = - self._get_config_and_cred_for_storage(config, credentials)?; - - let storage = storage_type - .open_storage( - &config.id, - storage_config.as_deref(), - storage_credentials.as_deref(), - ) - .await?; - - Ok(storage) - } - - fn _prepare_metadata( - &self, - master_key: &chacha20poly1305_ietf::Key, - key_data: &KeyDerivationData, - keys: &Keys, - ) -> IndyResult> { - let encrypted_keys = keys.serialize_encrypted(master_key)?; - - let metadata = match key_data { - KeyDerivationData::Raw(_) => Metadata::MetadataRaw(MetadataRaw { - keys: encrypted_keys, - }), - KeyDerivationData::Argon2iInt(_, salt) | KeyDerivationData::Argon2iMod(_, salt) => { - Metadata::MetadataArgon(MetadataArgon { - keys: encrypted_keys, - master_key_salt: salt[..].to_vec(), - }) - } - }; - - let res = serde_json::to_vec(&metadata).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize wallet metadata", - )?; - - Ok(res) - } - - fn _restore_keys(&self, metadata: &Metadata, master_key: &MasterKey) -> IndyResult { - let metadata_keys = metadata.get_keys(); - - let res = Keys::deserialize_encrypted(metadata_keys, master_key).map_err(|err| { - err.map( - IndyErrorKind::WalletAccessFailed, - "Invalid master key provided", - ) - })?; - - Ok(res) - } - - pub const PREFIX: &'static str = "Indy"; - - pub fn add_prefix(&self, type_: &str) -> String { - format!("{}::{}", WalletService::PREFIX, type_) - } - - async fn get_wallet(&self, wallet_handle: WalletHandle) -> IndyResult> { - let wallets = self.wallets.lock().unwrap(); //await; - let w = wallets.get(&wallet_handle); - if let Some(w) = w { - Ok(w.clone()) - } else { - Err(err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown wallet handle", - )) - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(untagged)] -pub enum Metadata { - MetadataArgon(MetadataArgon), - MetadataRaw(MetadataRaw), -} - -impl Metadata { - pub fn get_keys(&self) -> &Vec { - match *self { - Metadata::MetadataArgon(ref metadata) => &metadata.keys, - Metadata::MetadataRaw(ref metadata) => &metadata.keys, - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct MetadataArgon { - pub keys: Vec, - pub master_key_salt: Vec, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct MetadataRaw { - pub keys: Vec, -} - -#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)] -pub struct WalletRecord { - #[serde(rename = "type")] - type_: Option, - id: String, - value: Option, - tags: Option, -} - -impl fmt::Debug for WalletRecord { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("WalletRecord") - .field("type_", &self.type_) - .field("id", &self.id) - .field("value", &self.value.as_ref().map(|_| "******")) - .field("tags", &self.tags) - .finish() - } -} - -impl Ord for WalletRecord { - fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { - (&self.type_, &self.id).cmp(&(&other.type_, &other.id)) - } -} - -impl PartialOrd for WalletRecord { - fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { - Some(self.cmp(other)) - } -} - -impl WalletRecord { - pub fn new( - name: String, - type_: Option, - value: Option, - tags: Option, - ) -> WalletRecord { - WalletRecord { - id: name, - type_, - value, - tags, - } - } - - pub fn get_id(&self) -> &str { - self.id.as_str() - } - - #[allow(dead_code)] - pub fn get_type(&self) -> Option<&str> { - self.type_.as_deref() - } - - pub fn get_value(&self) -> Option<&str> { - self.value.as_deref() - } - - #[allow(dead_code)] - pub fn get_tags(&self) -> Option<&Tags> { - self.tags.as_ref() - } -} - -fn default_true() -> bool { - true -} - -fn default_false() -> bool { - false -} - -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct RecordOptions { - #[serde(default = "default_false")] - retrieve_type: bool, - #[serde(default = "default_true")] - retrieve_value: bool, - #[serde(default = "default_false")] - retrieve_tags: bool, -} - -impl RecordOptions { - pub fn id() -> String { - let options = RecordOptions { - retrieve_type: false, - retrieve_value: false, - retrieve_tags: false, - }; - - serde_json::to_string(&options).unwrap() - } - - pub fn id_value() -> String { - let options = RecordOptions { - retrieve_type: false, - retrieve_value: true, - retrieve_tags: false, - }; - - serde_json::to_string(&options).unwrap() - } - - pub fn id_value_tags() -> String { - let options = RecordOptions { - retrieve_type: false, - retrieve_value: true, - retrieve_tags: true, - }; - - serde_json::to_string(&options).unwrap() - } -} - -impl Default for RecordOptions { - fn default() -> RecordOptions { - RecordOptions { - retrieve_type: false, - retrieve_value: true, - retrieve_tags: false, - } - } -} - -pub struct WalletSearch { - iter: iterator::WalletIterator, -} - -impl WalletSearch { - pub fn get_total_count(&self) -> IndyResult> { - self.iter.get_total_count() - } - - pub async fn fetch_next_record(&mut self) -> IndyResult> { - self.iter.next().await - } -} - -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct SearchOptions { - #[serde(default = "default_true")] - retrieve_records: bool, - #[serde(default = "default_false")] - retrieve_total_count: bool, - #[serde(default = "default_false")] - retrieve_type: bool, - #[serde(default = "default_true")] - retrieve_value: bool, - #[serde(default = "default_false")] - retrieve_tags: bool, -} - -impl SearchOptions { - pub fn id_value() -> String { - let options = SearchOptions { - retrieve_records: true, - retrieve_total_count: true, - retrieve_type: true, - retrieve_value: true, - retrieve_tags: false, - }; - - serde_json::to_string(&options).unwrap() - } -} - -impl Default for SearchOptions { - fn default() -> SearchOptions { - SearchOptions { - retrieve_records: true, - retrieve_total_count: false, - retrieve_type: false, - retrieve_value: true, - retrieve_tags: false, - } - } -} - -fn short_type_name() -> &'static str { - let type_name = std::any::type_name::(); - type_name.rsplit("::").next().unwrap_or(type_name) -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs deleted file mode 100644 index bf94a545c2..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/query_encryption.rs +++ /dev/null @@ -1,120 +0,0 @@ -use indy_api_types::errors::prelude::*; -use indy_utils::wql::Query; - -use super::{ - encryption::encrypt_as_searchable, - language::{Operator, TagName, TargetValue}, - wallet::Keys, -}; - -// Performs encryption of WQL query -// WQL query is provided as top-level Operator -pub(super) fn encrypt_query(query: Query, keys: &Keys) -> IndyResult { - transform(query, keys) -} - -fn transform(query: Query, keys: &Keys) -> IndyResult { - match query { - Query::Eq(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Eq(encrypted_name, encrypted_value)) - } - Query::Neq(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Neq(encrypted_name, encrypted_value)) - } - Query::Gt(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Gt(encrypted_name, encrypted_value)) - } - Query::Gte(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Gte(encrypted_name, encrypted_value)) - } - Query::Lt(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Lt(encrypted_name, encrypted_value)) - } - Query::Lte(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Lte(encrypted_name, encrypted_value)) - } - Query::Like(name, value) => { - let (encrypted_name, encrypted_value) = encrypt_name_value(name, value, keys)?; - Ok(Operator::Like(encrypted_name, encrypted_value)) - } - Query::In(name, values) => { - let ename = TagName::from(name.clone())?; - let ename = match ename { - TagName::EncryptedTagName(ref name) => { - let encrypted_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - TagName::EncryptedTagName(encrypted_name) - } - TagName::PlainTagName(ref name) => { - let encrypted_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - TagName::PlainTagName(encrypted_name) - } - }; - let mut encrypted_values: Vec = Vec::with_capacity(values.len()); - - for value in values { - encrypted_values.push(encrypt_name_value(name.clone(), value, keys)?.1); - } - Ok(Operator::In(ename, encrypted_values)) - } - Query::And(operators) => Ok(Operator::And(transform_list_operators(operators, keys)?)), - Query::Or(operators) => Ok(Operator::Or(transform_list_operators(operators, keys)?)), - Query::Not(boxed_operator) => { - Ok(Operator::Not(Box::new(transform(*boxed_operator, keys)?))) - } - } -} - -fn transform_list_operators(operators: Vec, keys: &Keys) -> IndyResult> { - let mut transformed = Vec::with_capacity(operators.len()); - - for operator in operators { - let transformed_operator = transform(operator, keys)?; - transformed.push(transformed_operator); - } - - Ok(transformed) -} - -// Encrypts a single tag name, tag value pair. -// If the tag name is EncryptedTagName enum variant, encrypts both the tag name and the tag value -// If the tag name is PlainTagName enum variant, encrypts only the tag name -fn encrypt_name_value( - name: String, - value: String, - keys: &Keys, -) -> IndyResult<(TagName, TargetValue)> { - let name = TagName::from(name)?; - let value = TargetValue::from(value); - match (name, value) { - (TagName::EncryptedTagName(ref name), TargetValue::Unencrypted(ref s)) => { - let encrypted_tag_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - let encrypted_tag_value = - encrypt_as_searchable(s.as_bytes(), &keys.tag_value_key, &keys.tags_hmac_key); - Ok(( - TagName::EncryptedTagName(encrypted_tag_name), - TargetValue::Encrypted(encrypted_tag_value), - )) - } - (TagName::PlainTagName(ref name), TargetValue::Unencrypted(ref s)) => { - let encrypted_tag_name = - encrypt_as_searchable(&name[..], &keys.tag_name_key, &keys.tags_hmac_key); - Ok(( - TagName::PlainTagName(encrypted_tag_name), - TargetValue::Unencrypted(s.clone()), - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Reached invalid combination of tag name and value while encrypting query", - )), - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs deleted file mode 100644 index d32790f65b..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/mod.rs +++ /dev/null @@ -1,939 +0,0 @@ -use std::{ - collections::{HashMap, VecDeque}, - fs, -}; - -use async_trait::async_trait; -use indy_api_types::errors::prelude::*; -use indy_utils::environment; -use log::LevelFilter; -use serde::Deserialize; -use sqlx::{ - sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions}, - ConnectOptions, SqlitePool, -}; - -use crate::{ - language, - storage::{StorageIterator, StorageRecord, Tag, TagName, WalletStorage, WalletStorageType}, - wallet::EncryptedValue, - RecordOptions, SearchOptions, -}; - -mod query; - -const _SQLITE_DB: &str = "sqlite.db"; - -struct SQLiteStorageIterator { - records: Option>, - total_count: Option, -} - -impl SQLiteStorageIterator { - fn new( - records: Option>, - total_count: Option, - ) -> IndyResult { - Ok(SQLiteStorageIterator { - records, - total_count, - }) - } -} - -#[async_trait] -impl StorageIterator for SQLiteStorageIterator { - async fn next(&mut self) -> IndyResult> { - if let Some(ref mut records) = self.records { - Ok(records.pop_front()) - } else { - Ok(None) - } - } - - fn get_total_count(&self) -> IndyResult> { - Ok(self.total_count.to_owned()) - } -} - -#[derive(Deserialize, Debug)] -struct Config { - path: Option, -} - -#[derive(Debug)] -struct SQLiteStorage { - pool: SqlitePool, -} - -pub struct SQLiteStorageType {} - -impl SQLiteStorageType { - pub fn new() -> SQLiteStorageType { - SQLiteStorageType {} - } - - fn _db_path(id: &str, config: Option<&Config>) -> std::path::PathBuf { - let mut path = match config { - Some(Config { - path: Some(ref path), - }) => std::path::PathBuf::from(path), - _ => environment::wallet_home_path(), - }; - - path.push(id); - path.push(_SQLITE_DB); - path - } -} - -#[async_trait] -impl WalletStorage for SQLiteStorage { - /// - /// Tries to fetch values and/or tags from the storage. - /// Returns Result with StorageEntity object which holds requested data in case of success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type_ of the item in storage - /// * `id` - id of the item in storage - /// * `options` - JSon containing what needs to be fetched. - /// Example: {"retrieveValue": true, "retrieveTags": true} - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `StorageEntity` - Contains name, optional value and optional tags - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn get(&self, type_: &[u8], id: &[u8], options: &str) -> IndyResult { - let options: RecordOptions = serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "RecordOptions is malformed json", - )?; - - let mut conn = self.pool.acquire().await?; - - let (item_id, value, key): (i64, Vec, Vec) = - sqlx::query_as("SELECT id, value, key FROM items where type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *conn) - .await?; - - let value = if options.retrieve_value { - Some(EncryptedValue::new(value, key)) - } else { - None - }; - - let type_ = if options.retrieve_type { - Some(type_.to_vec()) - } else { - None - }; - - let tags = if options.retrieve_tags { - let mut tags = Vec::new(); - - tags.extend( - sqlx::query_as::<_, (Vec, String)>( - "SELECT name, value from tags_plaintext where item_id = ?", - ) - .bind(item_id) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| Tag::PlainText(r.0, r.1)), - ); - - tags.extend( - sqlx::query_as::<_, (Vec, Vec)>( - "SELECT name, value from tags_encrypted where item_id = ?", - ) - .bind(item_id) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| Tag::Encrypted(r.0, r.1)), - ); - - Some(tags) - } else { - None - }; - - Ok(StorageRecord::new(id.to_vec(), value, type_, tags)) - } - - /// - /// inserts value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// * `value` - value of the item in storage - /// * `value_key` - key used to encrypt the value - /// * `tags` - tags assigned to the value - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` class of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemAlreadyExists` - Item is already present in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn add( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - tags: &[Tag], - ) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let id = sqlx::query("INSERT INTO items (type, name, value, key) VALUES (?1, ?2, ?3, ?4)") - .bind(type_) - .bind(id) - .bind(&value.data) - .bind(&value.key) - .execute(&mut *tx) - .await? - .last_insert_rowid(); - - for tag in tags { - match *tag { - Tag::Encrypted(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_encrypted (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - Tag::PlainText(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_plaintext (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - Ok(()) - } - - async fn update(&self, type_: &[u8], id: &[u8], value: &EncryptedValue) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let row_updated = - sqlx::query("UPDATE items SET value = ?1, key = ?2 WHERE type = ?3 AND name = ?4") - .bind(&value.data) - .bind(&value.key) - .bind(type_) - .bind(id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let (item_id,): (i64,) = - sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *tx) - .await?; - - for tag in tags { - match *tag { - Tag::Encrypted(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT OR REPLACE INTO tags_encrypted (item_id, name, value) VALUES (?1, \ - ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - Tag::PlainText(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT OR REPLACE INTO tags_plaintext (item_id, name, value) VALUES (?1, \ - ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - Ok(()) - } - - async fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let (item_id,): (i64,) = - sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *tx) - .await?; - - sqlx::query("DELETE FROM tags_encrypted WHERE item_id = ?1") - .bind(item_id) - .execute(&mut *tx) - .await?; - - sqlx::query("DELETE FROM tags_plaintext WHERE item_id = ?1") - .bind(item_id) - .execute(&mut *tx) - .await?; - - for tag in tags { - match *tag { - Tag::Encrypted(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_encrypted (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - Tag::PlainText(ref tag_name, ref tag_data) => { - sqlx::query( - "INSERT INTO tags_plaintext (item_id, name, value) VALUES (?1, ?2, ?3)", - ) - .bind(item_id) - .bind(tag_name) - .bind(tag_data) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - - Ok(()) - } - - async fn delete_tags(&self, type_: &[u8], id: &[u8], tag_names: &[TagName]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let (item_id,): (i64,) = - sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .fetch_one(&mut *tx) - .await?; - - for tag_name in tag_names { - match *tag_name { - TagName::OfEncrypted(ref tag_name) => { - sqlx::query("DELETE FROM tags_encrypted WHERE item_id = ?1 AND name = ?2") - .bind(item_id) - .bind(tag_name) - .execute(&mut *tx) - .await? - } - TagName::OfPlain(ref tag_name) => { - sqlx::query("DELETE FROM tags_plaintext WHERE item_id = ?1 AND name = ?2") - .bind(item_id) - .bind(tag_name) - .execute(&mut *tx) - .await? - } - }; - } - - tx.commit().await?; - Ok(()) - } - - /// - /// deletes value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn delete(&self, type_: &[u8], id: &[u8]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - let rows_affected = sqlx::query("DELETE FROM items where type = ?1 AND name = ?2") - .bind(type_) - .bind(id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match rows_affected { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to delete not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row deleted. Seems wallet structure is inconsistent", - )), - } - } - - async fn get_storage_metadata(&self) -> IndyResult> { - let mut conn = self.pool.acquire().await?; - - let (metadata,): (Vec,) = sqlx::query_as::<_, (Vec,)>("SELECT value FROM metadata") - .fetch_one(&mut *conn) - .await?; - - Ok(metadata) - } - - async fn set_storage_metadata(&self, metadata: &[u8]) -> IndyResult<()> { - let mut tx = self.pool.begin().await?; - - sqlx::query("UPDATE metadata SET value = ?1") - .bind(metadata) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - Ok(()) - } - - async fn get_all(&self) -> IndyResult> { - let mut conn = self.pool.acquire().await?; - let mut tags: Vec<(i64, Tag)> = Vec::new(); - - tags.extend( - sqlx::query_as::<_, (i64, Vec, String)>( - "SELECT item_id, name, value from tags_plaintext", - ) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::PlainText(r.1, r.2))), - ); - - tags.extend( - sqlx::query_as::<_, (i64, Vec, Vec)>( - "SELECT item_id, name, value from tags_encrypted", - ) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::Encrypted(r.1, r.2))), - ); - - let mut mtags = HashMap::new(); - - for (k, v) in tags { - mtags.entry(k).or_insert_with(Vec::new).push(v) - } - - let records: VecDeque<_> = sqlx::query_as::<_, (i64, Vec, Vec, Vec, Vec)>( - "SELECT id, name, value, key, type FROM items", - ) - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| { - StorageRecord::new( - r.1, - Some(EncryptedValue::new(r.2, r.3)), - Some(r.4), - mtags.remove(&r.0).or_else(|| Some(Vec::new())), - ) - }) - .collect(); - - let total_count = records.len(); - - Ok(Box::new(SQLiteStorageIterator::new( - Some(records), - Some(total_count), - )?)) - } - - async fn search( - &self, - type_: &[u8], - query: &language::Operator, - options: Option<&str>, - ) -> IndyResult> { - let options = if let Some(options) = options { - serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "Search options is malformed json", - )? - } else { - SearchOptions::default() - }; - - let mut conn = self.pool.acquire().await?; - - let records = if options.retrieve_records { - let (query, args) = query::wql_to_sql(type_, query, None)?; - - // "SELECT i.id, i.name, i.value, i.key, i.type FROM items as i WHERE i.type = ?" - - let mut query = - sqlx::query_as::, Vec, Vec, Vec)>(&query); - - for arg in args.iter() { - query = match arg { - query::ToSQL::ByteSlice(a) => query.bind(a), - query::ToSQL::CharSlice(a) => query.bind(a), - } - } - - let mut records = query.fetch_all(&mut *conn).await?; - - let mut mtags = if options.retrieve_tags && !records.is_empty() { - let mut tags: Vec<(i64, Tag)> = Vec::new(); - - let in_binings = std::iter::repeat("?") - .take(records.len()) - .collect::>() - .join(","); - - let query = format!( - r#" - SELECT item_id, name, value - FROM tags_plaintext - WHERE item_id IN ({}) - "#, - in_binings - ); - - let mut query = sqlx::query_as::, String)>(&query); - - for record in records.iter() { - query = query.bind(record.0); - } - - tags.extend( - query - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::PlainText(r.1, r.2))), - ); - - let query = format!( - r#" - SELECT item_id, name, value - FROM tags_encrypted - WHERE item_id IN ({}) - "#, - in_binings - ); - - let mut query = sqlx::query_as::, Vec)>(&query); - - for record in records.iter() { - query = query.bind(record.0); - } - - tags.extend( - query - .fetch_all(&mut *conn) - .await? - .drain(..) - .map(|r| (r.0, Tag::Encrypted(r.1, r.2))), - ); - - let mut mtags = HashMap::new(); - - for (k, v) in tags { - mtags.entry(k).or_insert_with(Vec::new).push(v) - } - - mtags - } else { - HashMap::new() - }; - - let records = records - .drain(..) - .map(|r| { - StorageRecord::new( - r.1, - if options.retrieve_value { - Some(EncryptedValue::new(r.2, r.3)) - } else { - None - }, - if options.retrieve_type { - Some(r.4) - } else { - None - }, - if options.retrieve_tags { - mtags.remove(&r.0).or_else(|| Some(Vec::new())) - } else { - None - }, - ) - }) - .collect(); - - Some(records) - } else { - None - }; - - let total_count = if options.retrieve_total_count { - let (query, args) = query::wql_to_sql_count(type_, query)?; - - let mut query = sqlx::query_as::(&query); - - for arg in args.iter() { - query = match arg { - query::ToSQL::ByteSlice(a) => query.bind(a), - query::ToSQL::CharSlice(a) => query.bind(a), - } - } - - let (total_count,) = query.fetch_one(&mut *conn).await?; - Some(total_count as usize) - } else { - None - }; - - Ok(Box::new(SQLiteStorageIterator::new(records, total_count)?)) - } - - fn close(&mut self) -> IndyResult<()> { - Ok(()) - } -} - -#[async_trait] -impl WalletStorageType for SQLiteStorageType { - /// - /// Deletes the SQLite database file with the provided id from the path specified in the - /// config file. - /// - /// # Arguments - /// - /// * `id` - id of the SQLite DB file - /// * `storage_config` - config containing the location of SQLite DB files - /// * `storage_credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn delete_storage( - &self, - id: &str, - config: Option<&str>, - _credentials: Option<&str>, - ) -> IndyResult<()> { - let config = config - .map(serde_json::from_str::) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")?; - - let db_file_path = SQLiteStorageType::_db_path(id, config.as_ref()); - - if !db_file_path.exists() { - return Err(err_msg( - IndyErrorKind::WalletNotFound, - format!("Wallet storage file isn't found: {:?}", db_file_path), - )); - } - - std::fs::remove_dir_all(db_file_path.parent().unwrap())?; - Ok(()) - } - - /// - /// Creates the SQLite DB file with the provided name in the path specified in the config file, - /// and initializes the encryption keys needed for encryption and decryption of data. - /// - /// # Arguments - /// - /// * `id` - name of the SQLite DB file - /// * `config` - config containing the location of SQLite DB files - /// * `credentials` - DB credentials - /// * `metadata` - encryption keys that need to be stored in the newly created DB - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `AlreadyExists` - File with a given name already exists on the path - /// * `IOError("IO error during storage operation:...")` - Connection to the DB failed - /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed - /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn create_storage( - &self, - id: &str, - config: Option<&str>, - _credentials: Option<&str>, - metadata: &[u8], - ) -> IndyResult<()> { - let config = config - .map(serde_json::from_str::) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")?; - - let db_path = SQLiteStorageType::_db_path(id, config.as_ref()); - - if db_path.exists() { - return Err(err_msg( - IndyErrorKind::WalletAlreadyExists, - format!("Wallet database file already exists: {:?}", db_path), - )); - } - - fs::DirBuilder::new() - .recursive(true) - .create(db_path.parent().unwrap())?; - - let mut conn = SqliteConnectOptions::default() - .filename(db_path.as_path()) - .create_if_missing(true) - .journal_mode(SqliteJournalMode::Wal) - .log_statements(LevelFilter::Debug) - .connect() - .await?; - - let res = sqlx::query( - r#" - PRAGMA locking_mode=EXCLUSIVE; - PRAGMA foreign_keys=ON; - - BEGIN EXCLUSIVE TRANSACTION; - - /*** Keys Table ***/ - - CREATE TABLE metadata ( - id INTEGER NOT NULL, - value NOT NULL, - PRIMARY KEY(id) - ); - - /*** Items Table ***/ - - CREATE TABLE items( - id INTEGER NOT NULL, - type NOT NULL, - name NOT NULL, - value NOT NULL, - key NOT NULL, - PRIMARY KEY(id) - ); - - CREATE UNIQUE INDEX ux_items_type_name ON items(type, name); - - /*** Encrypted Tags Table ***/ - - CREATE TABLE tags_encrypted( - name NOT NULL, - value NOT NULL, - item_id INTEGER NOT NULL, - PRIMARY KEY(name, item_id), - FOREIGN KEY(item_id) - REFERENCES items(id) - ON DELETE CASCADE - ON UPDATE CASCADE - ); - - CREATE INDEX ix_tags_encrypted_name ON tags_encrypted(name); - CREATE INDEX ix_tags_encrypted_value ON tags_encrypted(value); - CREATE INDEX ix_tags_encrypted_item_id ON tags_encrypted(item_id); - - /*** PlainText Tags Table ***/ - - CREATE TABLE tags_plaintext( - name NOT NULL, - value NOT NULL, - item_id INTEGER NOT NULL, - PRIMARY KEY(name, item_id), - FOREIGN KEY(item_id) - REFERENCES items(id) - ON DELETE CASCADE - ON UPDATE CASCADE - ); - - CREATE INDEX ix_tags_plaintext_name ON tags_plaintext(name); - CREATE INDEX ix_tags_plaintext_value ON tags_plaintext(value); - CREATE INDEX ix_tags_plaintext_item_id ON tags_plaintext(item_id); - - /*** Insert metadata ***/ - INSERT INTO metadata(value) VALUES (?1); - - COMMIT; - "#, - ) - .persistent(false) - .bind(metadata) - .execute(&mut conn) - .await; - - // TODO: I am not sure force cleanup here is a good idea. - if let Err(err) = res { - std::fs::remove_file(db_path)?; - Err(err)?; - } - - Ok(()) - } - - /// - /// Establishes a connection to the SQLite DB with the provided id located in the path - /// specified in the config. In case of a successful onection returns a Storage object - /// embedding the connection and the encryption keys that will be used for encryption and - /// decryption operations. - /// - /// - /// # Arguments - /// - /// * `id` - id of the SQLite DB file - /// * `config` - config containing the location of SQLite DB files - /// * `credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `(Box, Vec)` - Tuple of `SQLiteStorage` and `encryption keys` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn open_storage( - &self, - id: &str, - config: Option<&str>, - _credentials: Option<&str>, - ) -> IndyResult> { - let config: Option = config - .map(serde_json::from_str) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")?; - - let db_path = SQLiteStorageType::_db_path(id, config.as_ref()); - - if !db_path.exists() { - return Err(err_msg( - IndyErrorKind::WalletNotFound, - "No wallet database exists", - )); - } - - let connect_options = SqliteConnectOptions::new() - .filename(db_path.as_path()) - .journal_mode(SqliteJournalMode::Wal) - .disable_statement_logging(); - - Ok(Box::new(SQLiteStorage { - pool: SqlitePoolOptions::default() - .min_connections(1) - .max_connections(1) - .max_lifetime(None) - .connect_with(connect_options) - .await?, - })) - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs deleted file mode 100644 index fd85ebfde4..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/default/query.rs +++ /dev/null @@ -1,376 +0,0 @@ -use std::convert::From; - -use indy_api_types::errors::prelude::*; - -use crate::language::{Operator, TagName, TargetValue}; - -#[derive(Debug)] -pub(crate) enum ToSQL<'a> { - ByteSlice(&'a [u8]), - CharSlice(&'a str), -} - -impl<'a> From<&'a Vec> for ToSQL<'a> { - fn from(item: &'a Vec) -> Self { - ToSQL::ByteSlice(item.as_slice()) - } -} - -impl<'a> From<&'a [u8]> for ToSQL<'a> { - fn from(item: &'a [u8]) -> Self { - ToSQL::ByteSlice(item) - } -} - -impl<'a> From<&'a str> for ToSQL<'a> { - fn from(item: &'a str) -> Self { - ToSQL::CharSlice(item) - } -} - -impl<'a> From<&'a String> for ToSQL<'a> { - fn from(item: &'a String) -> Self { - ToSQL::CharSlice(item.as_str()) - } -} - -// Translates Wallet Query Language to SQL -// WQL input is provided as a reference to a top level Operator -// Result is a tuple of query string and query arguments -pub(crate) fn wql_to_sql<'a>( - class: &'a [u8], - op: &'a Operator, - _options: Option<&str>, -) -> Result<(String, Vec>), IndyError> { - let mut arguments: Vec> = Vec::new(); - arguments.push(class.into()); - - let clause_string = operator_to_sql(op, &mut arguments)?; - - const BASE: &str = - "SELECT i.id, i.name, i.value, i.key, i.type FROM items as i WHERE i.type = ?"; - if !clause_string.is_empty() { - let mut query_string = String::with_capacity(BASE.len() + 5 + clause_string.len()); - query_string.push_str(BASE); - query_string.push_str(" AND "); - query_string.push_str(&clause_string); - Ok((query_string, arguments)) - } else { - Ok((BASE.to_string(), arguments)) - } -} - -pub(crate) fn wql_to_sql_count<'a>( - class: &'a [u8], - op: &'a Operator, -) -> Result<(String, Vec>), IndyError> { - let mut arguments: Vec> = Vec::new(); - arguments.push(class.into()); - - let clause_string = operator_to_sql(op, &mut arguments)?; - let mut query_string = "SELECT count(*) FROM items as i WHERE i.type = ?".to_string(); - - if !clause_string.is_empty() { - query_string.push_str(" AND "); - query_string.push_str(&clause_string); - } - - Ok((query_string, arguments)) -} - -fn operator_to_sql<'a>(op: &'a Operator, arguments: &mut Vec>) -> IndyResult { - match *op { - Operator::Eq(ref tag_name, ref target_value) => { - eq_to_sql(tag_name, target_value, arguments) - } - Operator::Neq(ref tag_name, ref target_value) => { - neq_to_sql(tag_name, target_value, arguments) - } - Operator::Gt(ref tag_name, ref target_value) => { - gt_to_sql(tag_name, target_value, arguments) - } - Operator::Gte(ref tag_name, ref target_value) => { - gte_to_sql(tag_name, target_value, arguments) - } - Operator::Lt(ref tag_name, ref target_value) => { - lt_to_sql(tag_name, target_value, arguments) - } - Operator::Lte(ref tag_name, ref target_value) => { - lte_to_sql(tag_name, target_value, arguments) - } - Operator::Like(ref tag_name, ref target_value) => { - like_to_sql(tag_name, target_value, arguments) - } - Operator::In(ref tag_name, ref target_values) => { - in_to_sql(tag_name, target_values, arguments) - } - Operator::And(ref suboperators) => and_to_sql(suboperators, arguments), - Operator::Or(ref suboperators) => or_to_sql(suboperators, arguments), - Operator::Not(ref suboperator) => not_to_sql(suboperator, arguments), - } -} - -fn eq_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value = ?))" - .to_string(), - ) - } - ( - TagName::EncryptedTagName(ref queried_name), - TargetValue::Encrypted(ref queried_value), - ) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_encrypted WHERE name = ? AND value = ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for equality operator", - )), - } -} - -fn neq_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value != ?))" - .to_string(), - ) - } - ( - TagName::EncryptedTagName(ref queried_name), - TargetValue::Encrypted(ref queried_value), - ) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_encrypted WHERE name = ? AND value != ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for inequality operator", - )), - } -} - -fn gt_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value > ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gt operator", - )), - } -} - -fn gte_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value >= ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gte operator", - )), - } -} - -fn lt_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value < ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn lte_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value <= ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lte operator", - )), - } -} - -fn like_to_sql<'a>( - name: &'a TagName, - value: &'a TargetValue, - arguments: &mut Vec>, -) -> IndyResult { - match (name, value) { - (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { - arguments.push(queried_name.into()); - arguments.push(queried_value.into()); - Ok( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value LIKE ?))" - .to_string(), - ) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $like operator", - )), - } -} - -fn in_to_sql<'a>( - name: &'a TagName, - values: &'a [TargetValue], - arguments: &mut Vec>, -) -> IndyResult { - let mut in_string = String::new(); - match *name { - TagName::PlainTagName(ref queried_name) => { - in_string.push_str( - "(i.id in (SELECT item_id FROM tags_plaintext WHERE name = ? AND value IN (", - ); - arguments.push(queried_name.into()); - - for (index, value) in values.iter().enumerate() { - if let TargetValue::Unencrypted(ref target) = *value { - in_string.push('?'); - arguments.push(target.into()); - if index < values.len() - 1 { - in_string.push(','); - } - } else { - return Err(err_msg( - IndyErrorKind::WalletQueryError, - "Encrypted tag value in $in for nonencrypted tag name", - )); - } - } - - Ok(in_string + ")))") - } - TagName::EncryptedTagName(ref queried_name) => { - in_string.push_str( - "(i.id in (SELECT item_id FROM tags_encrypted WHERE name = ? AND value IN (", - ); - arguments.push(queried_name.into()); - let index_before_last = values.len() - 2; - - for (index, value) in values.iter().enumerate() { - if let TargetValue::Encrypted(ref target) = *value { - in_string.push('?'); - arguments.push(target.into()); - if index <= index_before_last { - in_string.push(','); - } - } else { - return Err(err_msg( - IndyErrorKind::WalletQueryError, - "Unencrypted tag value in $in for encrypted tag name", - )); - } - } - - Ok(in_string + ")))") - } - } -} - -fn and_to_sql<'a>( - suboperators: &'a [Operator], - arguments: &mut Vec>, -) -> IndyResult { - join_operators(suboperators, " AND ", arguments) -} - -fn or_to_sql<'a>( - suboperators: &'a [Operator], - arguments: &mut Vec>, -) -> IndyResult { - join_operators(suboperators, " OR ", arguments) -} - -fn not_to_sql<'a>(suboperator: &'a Operator, arguments: &mut Vec>) -> IndyResult { - let suboperator_string = operator_to_sql(suboperator, arguments)?; - Ok("NOT (".to_string() + &suboperator_string + ")") -} - -fn join_operators<'a>( - operators: &'a [Operator], - join_str: &str, - arguments: &mut Vec>, -) -> IndyResult { - let mut s = String::new(); - if !operators.is_empty() { - s.push('('); - for (index, operator) in operators.iter().enumerate() { - let operator_string = operator_to_sql(operator, arguments)?; - s.push_str(&operator_string); - if index < operators.len() - 1 { - s.push_str(join_str); - } - } - s.push(')'); - } - Ok(s) -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs deleted file mode 100644 index 315dd0334b..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mod.rs +++ /dev/null @@ -1,112 +0,0 @@ -use async_trait::async_trait; -use indy_api_types::errors::prelude::*; - -use crate::{language, wallet::EncryptedValue}; - -pub mod default; -pub mod mysql; - -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub enum Tag { - Encrypted(Vec, Vec), - PlainText(Vec, String), -} - -#[derive(Debug)] -pub enum TagName { - OfEncrypted(Vec), - OfPlain(Vec), -} - -#[derive(Clone, Debug)] -pub struct StorageRecord { - pub id: Vec, - pub value: Option, - pub type_: Option>, - pub tags: Option>, -} - -impl StorageRecord { - fn new( - id: Vec, - value: Option, - type_: Option>, - tags: Option>, - ) -> Self { - Self { - id, - value, - type_, - tags, - } - } -} - -#[async_trait] -pub trait StorageIterator: Send + Sync { - async fn next(&mut self) -> Result, IndyError>; - fn get_total_count(&self) -> Result, IndyError>; -} - -#[async_trait] -pub trait WalletStorage: Send + Sync { - async fn get(&self, type_: &[u8], id: &[u8], options: &str) - -> Result; - async fn add( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - tags: &[Tag], - ) -> Result<(), IndyError>; - async fn update( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - ) -> Result<(), IndyError>; - async fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> Result<(), IndyError>; - async fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> Result<(), IndyError>; - async fn delete_tags( - &self, - type_: &[u8], - id: &[u8], - tag_names: &[TagName], - ) -> Result<(), IndyError>; - async fn delete(&self, type_: &[u8], id: &[u8]) -> Result<(), IndyError>; - async fn get_storage_metadata(&self) -> Result, IndyError>; - async fn set_storage_metadata(&self, metadata: &[u8]) -> Result<(), IndyError>; - async fn get_all(&self) -> Result, IndyError>; - - // TODO: - async fn search( - &self, - type_: &[u8], - query: &language::Operator, - options: Option<&str>, - ) -> Result, IndyError>; - fn close(&mut self) -> Result<(), IndyError>; -} - -#[async_trait] -pub trait WalletStorageType: Send + Sync { - async fn create_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - metadata: &[u8], - ) -> Result<(), IndyError>; - async fn open_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> Result, IndyError>; - async fn delete_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> Result<(), IndyError>; -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs deleted file mode 100644 index f479e8fd1f..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/mod.rs +++ /dev/null @@ -1,2246 +0,0 @@ -use std::{ - collections::{HashMap, VecDeque}, - iter::Iterator, -}; - -use async_trait::async_trait; -use futures::lock::Mutex; -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::base64; -use log::LevelFilter; -use query::{wql_to_sql, wql_to_sql_count}; -use serde::Deserialize; -use sqlx::{ - mysql::{MySqlConnectOptions, MySqlPoolOptions, MySqlRow}, - ConnectOptions, MySqlPool, Row, -}; - -use crate::{ - language, - storage::{StorageIterator, StorageRecord, Tag, TagName, WalletStorage, WalletStorageType}, - wallet::EncryptedValue, - RecordOptions, SearchOptions, -}; - -mod query; - -struct MySQLStorageIterator { - records: Option>>, - total_count: Option, -} - -impl MySQLStorageIterator { - fn new( - records: Option>>, - total_count: Option, - ) -> IndyResult { - Ok(MySQLStorageIterator { - records, - total_count, - }) - } -} - -#[async_trait] -impl StorageIterator for MySQLStorageIterator { - async fn next(&mut self) -> IndyResult> { - // TODO: Optimize!!! - if let Some(ref mut records) = self.records { - if let Some(record) = records.pop_front() { - return Ok(Some(record?)); - } else { - Ok(None) - } - } else { - Ok(None) - } - } - - fn get_total_count(&self) -> IndyResult> { - Ok(self.total_count.to_owned()) - } -} - -#[derive(Deserialize, Debug, Clone)] -struct Config { - pub read_host: String, - pub write_host: String, - pub port: u16, - pub db_name: String, - #[serde(default = "default_connection_limit")] - pub connection_limit: u32, -} - -fn default_connection_limit() -> u32 { - 100 -} - -#[derive(Deserialize, Clone)] -pub struct Credentials { - pub user: String, - pub pass: String, -} - -#[derive(Debug)] -struct MySqlStorage { - wallet_id: i64, - read_pool: MySqlPool, - write_pool: MySqlPool, -} - -pub struct MySqlStorageType { - connections: Mutex>, -} - -impl MySqlStorageType { - pub fn new() -> MySqlStorageType { - MySqlStorageType { - connections: Mutex::new(HashMap::new()), - } - } - - async fn _connect( - &self, - read_only: bool, - config: Config, - credentials: Credentials, - ) -> IndyResult { - let host_addr = if read_only { - &config.read_host - } else { - &config.write_host - }; - - let connection_string = format!( - "{}:{}@{}:{}/{}", - credentials.user, credentials.pass, host_addr, config.port, config.db_name - ); - - let mut connref = self.connections.lock().await; - - if let Some(connection) = connref.get(&connection_string) { - return Ok(connection.clone()); - } - - let my_sql_connect_options = MySqlConnectOptions::new() - .host(host_addr) - .database(&config.db_name) - .username(&credentials.user) - .password(&credentials.pass) - .log_statements(LevelFilter::Debug); - - let connection = MySqlPoolOptions::default() - .max_connections(config.connection_limit) - .test_before_acquire(false) - .connect_with(my_sql_connect_options) - .await?; - - connref.insert(connection_string, connection.clone()); - Ok(connection) - } -} - -#[async_trait] -impl WalletStorage for MySqlStorage { - /// - /// Tries to fetch values and/or tags from the storage. - /// Returns Result with StorageEntity object which holds requested data in case of success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type_ of the item in storage - /// * `id` - id of the item in storage - /// * `options` - JSon containing what needs to be fetched. - /// Example: {"retrieveValue": true, "retrieveTags": true} - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `StorageEntity` - Contains name, optional value and optional tags - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn get(&self, type_: &[u8], id: &[u8], options: &str) -> IndyResult { - let options: RecordOptions = serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "RecordOptions is malformed json", - )?; - - let mut conn = self.read_pool.acquire().await?; - - let (value, tags): (Option>, Option) = sqlx::query_as(&format!( - r#" - SELECT {}, {} - FROM items - WHERE - wallet_id = ? - AND type = ? - AND name = ? - "#, - if options.retrieve_value { - "value" - } else { - "NULL" - }, - if options.retrieve_tags { - "tags" - } else { - "NULL" - }, - )) - .bind(self.wallet_id) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .fetch_one(&mut *conn) - .await?; - - let value = if let Some(value) = value { - Some(EncryptedValue::from_bytes(&value)?) - } else { - None - }; - - let type_ = if options.retrieve_type { - Some(type_.to_vec()) - } else { - None - }; - - let tags = if let Some(tags) = tags { - Some(_tags_from_json(tags)?) - } else { - None - }; - - Ok(StorageRecord::new(id.to_vec(), value, type_, tags)) - } - - /// - /// inserts value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// * `value` - value of the item in storage - /// * `value_key` - key used to encrypt the value - /// * `tags` - tags assigned to the value - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` class of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemAlreadyExists` - Item is already present in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn add( - &self, - type_: &[u8], - id: &[u8], - value: &EncryptedValue, - tags: &[Tag], - ) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - sqlx::query( - r#" - INSERT INTO items (type, name, value, tags, wallet_id) - VALUE (?, ?, ?, ?, ?) - "#, - ) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(&value.to_bytes()) - .bind(&_tags_to_json(tags)?) - .bind(self.wallet_id) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - Ok(()) - } - - async fn update(&self, type_: &[u8], id: &[u8], value: &EncryptedValue) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - let row_updated = sqlx::query( - r#" - UPDATE items - SET value = ? - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - ) - .bind(&value.to_bytes()) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - if tags.is_empty() { - // FIXME: Think about checking item exists - return Ok(()); - } - - let tag_paths = _tags_to_plain(tags) - .into_iter() - .map(|(tag, val)| format!(r#"'$."{}"', "{}""#, tag, val)) - .collect::>() - .join(","); - - let mut tx = self.write_pool.begin().await?; - - let row_updated = sqlx::query(&format!( - r#" - UPDATE items - SET tags = JSON_SET(tags, {}) - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - tag_paths - )) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - let row_updated = sqlx::query( - r#" - UPDATE items - SET tags = ? - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - ) - .bind(&_tags_to_json(tags)?) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - async fn delete_tags(&self, type_: &[u8], id: &[u8], tag_names: &[TagName]) -> IndyResult<()> { - if tag_names.is_empty() { - // FIXME: Think about checking item exists - return Ok(()); - } - - let mut tx = self.write_pool.begin().await?; - - let tag_name_paths = _tag_names_to_plain(tag_names) - .into_iter() - .map(|tag_name| format!(r#"'$."{}"'"#, tag_name)) - .collect::>() - .join(","); - - let row_updated = sqlx::query(&format!( - r#" - UPDATE items - SET tags = JSON_REMOVE(tags, {}) - WHERE type = ? - AND name = ? - AND wallet_id = ? - "#, - tag_name_paths - )) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match row_updated { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to update not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row update. Seems wallet structure is inconsistent", - )), - } - } - - /// - /// deletes value and tags into storage. - /// Returns Result with () on success or - /// Result with IndyError in case of failure. - /// - /// - /// # Arguments - /// - /// * `type_` - type of the item in storage - /// * `id` - id of the item in storage - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::Closed` - Storage is closed - /// * `IndyError::ItemNotFound` - Item is not found in database - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn delete(&self, type_: &[u8], id: &[u8]) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - let rows_affected = sqlx::query( - r#" - DELETE FROM items - WHERE type = ? - AND name = ? - AND wallet_id = ?"#, - ) - .bind(&base64::encode(type_)) - .bind(&base64::encode(id)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await? - .rows_affected(); - - match rows_affected { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Item to delete not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row deleted. Seems wallet structure is inconsistent", - )), - } - } - - async fn get_storage_metadata(&self) -> IndyResult> { - let mut conn = self.read_pool.acquire().await?; - - let (metadata,): (String,) = sqlx::query_as::<_, (String,)>( - r#" - SELECT metadata - FROM wallets - WHERE id = ? - "#, - ) - .bind(self.wallet_id) - .fetch_one(&mut *conn) - .await?; - - base64::decode(&metadata) - } - - async fn set_storage_metadata(&self, metadata: &[u8]) -> IndyResult<()> { - let mut tx = self.write_pool.begin().await?; - - sqlx::query( - r#" - UPDATE wallets - SET metadata = ? - WHERE id = ? - "#, - ) - .bind(base64::encode(metadata)) - .bind(self.wallet_id) - .execute(&mut *tx) - .await?; - - tx.commit().await?; - Ok(()) - } - - async fn get_all(&self) -> IndyResult> { - let records: VecDeque<_> = sqlx::query( - r#" - SELECT type, name, value, tags - FROM items - WHERE wallet_id = ? - "#, - ) - .bind(self.wallet_id) - .map(|r: MySqlRow| -> IndyResult { - let type_: String = r.get(0); - let id: String = r.get(1); - let value: Vec = r.get(2); - let tags: serde_json::Value = r.get(3); - - let res = StorageRecord::new( - base64::decode(&id)?, - Some(EncryptedValue::from_bytes(&value)?), - Some(base64::decode(&type_)?), - Some(_tags_from_json(tags)?), - ); - - Ok(res) - }) - .fetch_all(&self.read_pool) - .await? - .into_iter() - .collect(); - - let total_len = records.len(); - - // FIXME: Fetch total count - Ok(Box::new(MySQLStorageIterator::new( - Some(records), - Some(total_len), - )?)) - } - - async fn search( - &self, - type_: &[u8], - query: &language::Operator, - options: Option<&str>, - ) -> IndyResult> { - let options = if let Some(options) = options { - serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "Search options is malformed json", - )? - } else { - SearchOptions::default() - }; - - let mut conn = self.read_pool.acquire().await?; - - let total_count = if options.retrieve_total_count { - let (query, args) = wql_to_sql_count(self.wallet_id, type_, query)?; - let mut query = sqlx::query_as::(&query); - - for arg in args.iter() { - query = if arg.is_i64() { - query.bind(arg.as_i64().unwrap()) - } else if arg.is_string() { - query.bind(arg.as_str().unwrap()) - } else { - return Err(err_msg( - IndyErrorKind::InvalidState, - "Unexpected sql parameter type.", - )); - } - } - - let (total_count,) = query.fetch_one(&mut *conn).await?; - Some(total_count as usize) - } else { - None - }; - - let records = if options.retrieve_records { - let (query, args) = wql_to_sql(self.wallet_id, type_, query, &options)?; - - let mut query = sqlx::query::(&query); - - for arg in args.iter() { - query = if arg.is_i64() { - query.bind(arg.as_i64().unwrap()) - } else if arg.is_string() { - query.bind(arg.as_str().unwrap()) - } else { - return Err(err_msg( - IndyErrorKind::InvalidState, - "Unexpected sql parameter type.", - )); - } - } - - let records: VecDeque<_> = query - .map(|r: MySqlRow| -> IndyResult { - let type_ = if options.retrieve_type { - let type_: String = r.get(0); - Some(base64::decode(&type_)?) - } else { - None - }; - - let id = { - let id: String = r.get(1); - base64::decode(&id)? - }; - - let value = if options.retrieve_value { - let value: Vec = r.get(2); - Some(EncryptedValue::from_bytes(&value)?) - } else { - None - }; - - let tags = if options.retrieve_tags { - let tags: serde_json::Value = r.get(3); - Some(_tags_from_json(tags)?) - } else { - None - }; - - let res = StorageRecord::new(id, value, type_, tags); - - Ok(res) - }) - .fetch_all(&self.read_pool) - .await? - .into_iter() - .collect(); - - Some(records) - } else { - None - }; - - Ok(Box::new(MySQLStorageIterator::new(records, total_count)?)) - } - - fn close(&mut self) -> IndyResult<()> { - Ok(()) - } -} - -#[async_trait] -impl WalletStorageType for MySqlStorageType { - /// - /// Deletes the MySql database file with the provided id from the path specified in the - /// config file. - /// - /// # Arguments - /// - /// * `id` - id of the MySql DB file - /// * `storage_config` - config containing the location of MySql DB files - /// * `storage_credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn delete_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> IndyResult<()> { - let config = config - .map(serde_json::from_str::) - .transpose() - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent config json", - ))?; - - let credentials = credentials - .map(serde_json::from_str::) - .transpose() - .to_indy( - IndyErrorKind::InvalidStructure, - "Malformed credentials json", - )? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent credentials json", - ))?; - - let mut tx = self - ._connect(false, config, credentials) - .await? - .begin() - .await?; - - let res = sqlx::query( - r#" - DELETE FROM wallets - WHERE name = ? - "#, - ) - .bind(id) - .execute(&mut *tx) - .await; - - let rows_affected = res?.rows_affected(); - - match rows_affected { - 1 => { - tx.commit().await?; - Ok(()) - } - 0 => Err(err_msg( - IndyErrorKind::WalletNotFound, - "Item to delete not found", - )), - _ => Err(err_msg( - IndyErrorKind::InvalidState, - "More than one row deleted. Seems wallet structure is inconsistent", - )), - } - } - - /// - /// Creates the MySql DB file with the provided name in the path specified in the config file, - /// and initializes the encryption keys needed for encryption and decryption of data. - /// - /// # Arguments - /// - /// * `id` - name of the MySql DB file - /// * `config` - config containing the location of MySql DB files - /// * `credentials` - DB credentials - /// * `metadata` - encryption keys that need to be stored in the newly created DB - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `()` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `AlreadyExists` - File with a given name already exists on the path - /// * `IOError("IO error during storage operation:...")` - Connection to the DB failed - /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed - /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed - /// * `IOError(..)` - Deletion of the file form the file-system failed - async fn create_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - metadata: &[u8], - ) -> IndyResult<()> { - let mut config = config - .map(serde_json::from_str::) - .transpose() - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent config json", - ))?; - - let credentials = credentials - .map(serde_json::from_str::) - .transpose() - .to_indy( - IndyErrorKind::InvalidStructure, - "Malformed credentials json", - )? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent credentials json", - ))?; - - let my_sql_connect_options = MySqlConnectOptions::new() - .host(&config.write_host) - .username(&credentials.user) - .password(&credentials.pass) - .log_statements(LevelFilter::Debug); - - let mut pool = MySqlPoolOptions::default() - .max_connections(1) - .test_before_acquire(false) - .connect_with(my_sql_connect_options) - .await?; - - let mut con = pool.acquire().await?; - - // Basic SQL injection prevention - // since we cannot bind the database identifier - config.db_name = config.db_name.replace('`', "``"); - - sqlx::query(&format!( - "CREATE DATABASE IF NOT EXISTS `{}`;", - config.db_name - )) - .execute(&mut *con) - .await?; - - // Replace the previous single use pool - // with the actual one, get a connection - // and create the required tables - pool = self._connect(false, config, credentials).await?; - con = pool.acquire().await?; - - sqlx::query( - r#" - CREATE TABLE IF NOT EXISTS `items` ( - `wallet_id` int NOT NULL, - `type` varchar(256) NOT NULL, - `name` varchar(256) NOT NULL, - `value` blob NOT NULL, - `tags` varchar(256) DEFAULT NULL, - PRIMARY KEY (`wallet_id`, `type`, `name`) - );"#, - ) - .execute(&mut *con) - .await?; - - sqlx::query( - r#" - CREATE TABLE IF NOT EXISTS `wallets` ( - `id` int NOT NULL AUTO_INCREMENT, - `name` varchar(64) NOT NULL, - `metadata` varchar(4096) DEFAULT NULL, - PRIMARY KEY (`id`) - );"#, - ) - .execute(&mut *con) - .await?; - - let mut tx = pool.begin().await?; - - let res = sqlx::query( - r#" - INSERT INTO wallets (name, metadata) - VALUES (?, ?) - "#, - ) - .bind(id) - .bind(base64::encode(metadata)) - .execute(&mut *tx) - .await; - - match res { - Err(sqlx::Error::Database(e)) if e.code().is_some() && e.code().unwrap() == "23000" => { - return Err(err_msg( - IndyErrorKind::WalletAlreadyExists, - "Wallet already exists", - )) - } - e => e?, - }; - - // FIXME: return wallet already exists on 1062 error code from MySQL - - tx.commit().await?; - Ok(()) - } - - /// - /// Establishes a connection to the MySql DB with the provided id located in the path - /// specified in the config. In case of a successful onection returns a Storage object - /// embedding the connection and the encryption keys that will be used for encryption and - /// decryption operations. - /// - /// - /// # Arguments - /// - /// * `id` - id of the MySql DB file - /// * `config` - config containing the location of MySql DB files - /// * `credentials` - DB credentials - /// - /// # Returns - /// - /// Result that can be either: - /// - /// * `(Box, Vec)` - Tuple of `MySqlStorage` and `encryption keys` - /// * `IndyError` - /// - /// # Errors - /// - /// Any of the following `IndyError` type_ of errors can be throw by this method: - /// - /// * `IndyError::NotFound` - File with the provided id not found - /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - async fn open_storage( - &self, - id: &str, - config: Option<&str>, - credentials: Option<&str>, - ) -> IndyResult> { - let config = config - .map(serde_json::from_str::) - .transpose() - .to_indy(IndyErrorKind::InvalidStructure, "Malformed config json")? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent config json", - ))?; - - let credentials = credentials - .map(serde_json::from_str::) - .transpose() - .to_indy( - IndyErrorKind::InvalidStructure, - "Malformed credentials json", - )? - .ok_or(err_msg( - IndyErrorKind::InvalidStructure, - "Absent credentials json", - ))?; - - let read_pool = self - ._connect(true, config.clone(), credentials.clone()) - .await?; - let write_pool = self._connect(false, config, credentials).await?; - - let res = sqlx::query_as::<_, (i64,)>( - r#" - SELECT id FROM wallets - WHERE name = ? - "#, - ) - .bind(id) - .fetch_one(&read_pool) - .await; - - let (wallet_id,) = match res { - Err(sqlx::Error::RowNotFound) => { - return Err(err_msg(IndyErrorKind::WalletNotFound, "Wallet not found")); - } - e => e?, - }; - - Ok(Box::new(MySqlStorage { - read_pool, - write_pool, - wallet_id, - })) - } -} - -#[cfg(test)] -#[allow(clippy::all)] -mod tests { - use indy_utils::environment; - - use super::{super::Tag, *}; - - #[allow(unused_macros)] - macro_rules! assert_kind { - ($kind:expr, $var:expr) => { - match $var { - Err(e) => assert_eq!($kind, e.kind()), - _ => assert!(false, "Result expected to be error"), - } - }; - } - - #[async_std::test] - #[cfg(feature = "benchmark")] - async fn mysql_storage_sync_send() { - use std::{sync::Arc, time::SystemTime}; - - use futures::{channel::oneshot, executor::ThreadPool, future::join_all}; - - let count = 1000; - let executor = ThreadPool::new().expect("Failed to new ThreadPool"); - let storage_type = Arc::new(Box::new(MySqlStorageType::new())); - - let waiters: Vec<_> = (0..count) - .into_iter() - .map(|id| { - let st = storage_type.clone(); - let (tx, rx) = oneshot::channel::>(); - - let future = async move { - let res = st - .delete_storage( - &format!("mysql_storage_sync_send_{}", id), - _config(), - _credentials(), - ) - .await; - - tx.send(res).unwrap(); - }; - - executor.spawn_ok(future); - rx - }) - .collect(); - - join_all(waiters).await; - println!("------------> 1 {:?}", SystemTime::now()); - - let waiters: Vec<_> = (0..count) - .into_iter() - .map(|id| { - let st = storage_type.clone(); - let (tx, rx) = oneshot::channel::>(); - - let future = async move { - let res = st - .create_storage( - &format!("mysql_storage_sync_send_{}", id), - _config(), - _credentials(), - &_metadata(), - ) - .await; - - tx.send(res).unwrap(); - }; - - executor.spawn_ok(future); - rx - }) - .collect(); - - join_all(waiters).await; - - println!("------------> 3 {:?}", SystemTime::now()); - - let waiters: Vec<_> = (0..count) - .into_iter() - .map(|id| { - let st = storage_type.clone(); - let (tx, rx) = oneshot::channel::>(); - - let future = async move { - let res = st - .delete_storage( - &format!("mysql_storage_sync_send_{}", id), - _config(), - _credentials(), - ) - .await; - - tx.send(res).unwrap(); - }; - - executor.spawn_ok(future); - rx - }) - .collect(); - - join_all(waiters).await; - - println!("------------> 5 {:?}", SystemTime::now()); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_create_works() { - _cleanup("mysql_storage_type_create_works").await; - - let storage_type = MySqlStorageType::new(); - - storage_type - .create_storage( - "mysql_storage_type_create_works", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - _cleanup("mysql_storage_type_create_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_create_works_for_twice() { - _cleanup("mysql_storage_type_create_works_for_twice").await; - - let storage_type = MySqlStorageType::new(); - storage_type - .create_storage( - "mysql_storage_type_create_works_for_twice", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - let res = storage_type - .create_storage( - "mysql_storage_type_create_works_for_twice", - _config(), - _credentials(), - &_metadata(), - ) - .await; - - assert_kind!(IndyErrorKind::WalletAlreadyExists, res); - - storage_type - .delete_storage( - "mysql_storage_type_create_works_for_twice", - _config(), - _credentials(), - ) - .await - .unwrap(); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_storage_metadata_works() { - _cleanup("mysql_storage_get_storage_metadata_works").await; - - { - let storage = _storage("mysql_storage_get_storage_metadata_works").await; - let metadata = storage.get_storage_metadata().await.unwrap(); - - assert_eq!(metadata, _metadata()); - } - - _cleanup("mysql_storage_get_storage_metadata_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_delete_works() { - _cleanup("mysql_storage_type_delete_works").await; - - let storage_type = MySqlStorageType::new(); - storage_type - .create_storage( - "mysql_storage_type_delete_works", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - storage_type - .delete_storage("mysql_storage_type_delete_works", _config(), _credentials()) - .await - .unwrap(); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_delete_works_for_non_existing() { - _cleanup("mysql_storage_type_delete_works_for_non_existing").await; - - let storage_type = MySqlStorageType::new(); - - storage_type - .create_storage( - "mysql_storage_type_delete_works_for_non_existing", - _config(), - _credentials(), - &_metadata(), - ) - .await - .unwrap(); - - let res = storage_type - .delete_storage("unknown", _config(), _credentials()) - .await; - assert_kind!(IndyErrorKind::WalletNotFound, res); - - storage_type - .delete_storage( - "mysql_storage_type_delete_works_for_non_existing", - _config(), - _credentials(), - ) - .await - .unwrap(); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_open_works() { - _cleanup("mysql_storage_type_open_works").await; - _storage("mysql_storage_type_open_works").await; - _cleanup("mysql_storage_type_open_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_type_open_works_for_not_created() { - _cleanup("mysql_storage_type_open_works_for_not_created").await; - - let storage_type = MySqlStorageType::new(); - - let res = storage_type - .open_storage("unknown", _config(), _credentials()) - .await; - - assert_kind!(IndyErrorKind::WalletNotFound, res); - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_works_for_is_802() { - _cleanup("mysql_storage_add_works_for_is_802").await; - - { - let storage = _storage("mysql_storage_add_works_for_is_802").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add(&_type1(), &_id1(), &_value1(), &_tags()).await; - assert_kind!(IndyErrorKind::WalletItemAlreadyExists, res); - - let res = storage.add(&_type1(), &_id1(), &_value1(), &_tags()).await; - assert_kind!(IndyErrorKind::WalletItemAlreadyExists, res); - } - - _cleanup("mysql_storage_add_works_for_is_802").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_set_get_works() { - _cleanup("mysql_storage_set_get_works").await; - - { - let storage = _storage("mysql_storage_set_get_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - } - - _cleanup("mysql_storage_set_get_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_set_get_works_for_twice() { - _cleanup("mysql_storage_set_get_works_for_twice").await; - - { - let storage = _storage("mysql_storage_set_get_works_for_twice").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add(&_type1(), &_id1(), &_value2(), &_tags()).await; - assert_kind!(IndyErrorKind::WalletItemAlreadyExists, res); - } - - _cleanup("mysql_storage_set_get_works_for_twice").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_set_get_works_for_reopen() { - _cleanup("mysql_storage_set_get_works_for_reopen").await; - - _storage("mysql_storage_set_get_works_for_reopen") - .await - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = MySqlStorageType::new() - .open_storage( - "mysql_storage_set_get_works_for_reopen", - _config(), - _credentials(), - ) - .await - .unwrap() - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - _cleanup("mysql_storage_set_get_works_for_reopen").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_works_for_wrong_key() { - _cleanup("mysql_storage_get_works_for_wrong_key").await; - - { - let storage = _storage("mysql_storage_get_works_for_wrong_key").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage - .get( - &_type1(), - &_id2(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await; - - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_get_works_for_wrong_key").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_works() { - _cleanup("mysql_storage_delete_works").await; - - { - let storage = _storage("mysql_storage_delete_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - storage.delete(&_type1(), &_id1()).await.unwrap(); - - let res = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await; - - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_works_for_non_existing() { - _cleanup("mysql_storage_delete_works_for_non_existing").await; - - { - let storage = _storage("mysql_storage_delete_works_for_non_existing").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.delete(&_type1(), &_id2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_works_for_non_existing").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_returns_error_item_not_found_if_no_such_type() { - _cleanup("mysql_storage_delete_returns_error_item_not_found_if_no_such_type").await; - - { - let storage = - _storage("mysql_storage_delete_returns_error_item_not_found_if_no_such_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.delete(&_type2(), &_id2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_returns_error_item_not_found_if_no_such_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_all_works() { - _cleanup("mysql_storage_get_all_works").await; - - { - let storage = _storage("mysql_storage_get_all_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - storage - .add(&_type2(), &_id2(), &_value2(), &_tags()) - .await - .unwrap(); - - let mut storage_iterator = storage.get_all().await.unwrap(); - - let record = storage_iterator.next().await.unwrap().unwrap(); - assert_eq!(record.type_.unwrap(), _type1()); - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - let record = storage_iterator.next().await.unwrap().unwrap(); - assert_eq!(record.type_.unwrap(), _type2()); - assert_eq!(record.value.unwrap(), _value2()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); - - let record = storage_iterator.next().await.unwrap(); - assert!(record.is_none()); - } - - _cleanup("mysql_storage_get_all_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_get_all_works_for_empty() { - _cleanup("mysql_storage_get_all_works_for_empty").await; - - { - let storage = _storage("mysql_storage_get_all_works_for_empty").await; - let mut storage_iterator = storage.get_all().await.unwrap(); - - let record = storage_iterator.next().await.unwrap(); - assert!(record.is_none()); - } - - _cleanup("mysql_storage_get_all_works_for_empty").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_works() { - _cleanup("mysql_storage_update_works").await; - - { - let storage = _storage("mysql_storage_update_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - storage - .update(&_type1(), &_id1(), &_value2()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value2()); - } - - _cleanup("mysql_storage_update_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_works_for_non_existing_id() { - _cleanup("mysql_storage_update_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_update_works_for_non_existing_id").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let res = storage.update(&_type1(), &_id2(), &_value2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_works_for_non_existing_id").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_works_for_non_existing_type() { - _cleanup("mysql_storage_update_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_update_works_for_non_existing_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let res = storage.update(&_type2(), &_id1(), &_value2()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works() { - _cleanup("mysql_storage_add_tags_works").await; - - { - let storage = _storage("mysql_storage_add_tags_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - storage - .add_tags(&_type1(), &_id1(), &_new_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let expected_tags = { - let mut tags = _tags(); - tags.extend(_new_tags()); - _sort(tags) - }; - - assert_eq!(_sort(record.tags.unwrap()), expected_tags); - } - - _cleanup("mysql_storage_add_tags_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works_for_non_existing_id() { - _cleanup("mysql_storage_add_tags_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_add_tags_works_for_non_existing_id").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add_tags(&_type1(), &_id2(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_add_tags_works_for_non_existing_id").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works_for_non_existing_type() { - _cleanup("mysql_storage_add_tags_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_add_tags_works_for_non_existing_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.add_tags(&_type2(), &_id1(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_add_tags_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_add_tags_works_for_already_existing() { - _cleanup("mysql_storage_add_tags_works_for_already_existing").await; - - { - let storage = _storage("mysql_storage_add_tags_works_for_already_existing").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let tags_with_existing = { - let mut tags = _tags(); - tags.extend(_new_tags()); - tags - }; - - storage - .add_tags(&_type1(), &_id1(), &tags_with_existing) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let expected_tags = { - let mut tags = _tags(); - tags.extend(_new_tags()); - _sort(tags) - }; - - assert_eq!(_sort(record.tags.unwrap()), expected_tags); - } - - _cleanup("mysql_storage_add_tags_works_for_already_existing").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works() { - _cleanup("mysql_storage_update_tags_works").await; - - { - let storage = _storage("mysql_storage_update_tags_works").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - storage - .update_tags(&_type1(), &_id1(), &_new_tags()) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - assert_eq!(_sort(record.tags.unwrap()), _sort(_new_tags())); - } - - _cleanup("mysql_storage_update_tags_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works_for_non_existing_id() { - _cleanup("mysql_storage_update_tags_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_update_tags_works_for_non_existing_id").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.update_tags(&_type1(), &_id2(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_tags_works_for_non_existing_id").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works_for_non_existing_type() { - _cleanup("mysql_storage_update_tags_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_update_tags_works_for_non_existing_type").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let res = storage.update_tags(&_type1(), &_id2(), &_new_tags()).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_update_tags_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_update_tags_works_for_already_existing() { - _cleanup("mysql_storage_update_tags_works_for_already_existing").await; - { - let storage = _storage("mysql_storage_update_tags_works_for_already_existing").await; - - storage - .add(&_type1(), &_id1(), &_value1(), &_tags()) - .await - .unwrap(); - - let tags_with_existing = { - let mut tags = _tags(); - tags.extend(_new_tags()); - tags - }; - - storage - .update_tags(&_type1(), &_id1(), &tags_with_existing) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.value.unwrap(), _value1()); - - let expected_tags = { - let mut tags = _tags(); - tags.extend(_new_tags()); - _sort(tags) - }; - - assert_eq!(_sort(record.tags.unwrap()), expected_tags); - } - _cleanup("mysql_storage_update_tags_works_for_already_existing").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_tags_works() { - _cleanup("mysql_storage_delete_tags_works").await; - - { - let storage = _storage("mysql_storage_delete_tags_works").await; - - let tag_name1 = vec![0, 0, 0]; - let tag_name2 = vec![1, 1, 1]; - let tag_name3 = vec![2, 2, 2]; - let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); - let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); - let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); - let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; - - storage - .add(&_type1(), &_id1(), &_value1(), &tags) - .await - .unwrap(); - - let tag_names = vec![ - TagName::OfEncrypted(tag_name1.clone()), - TagName::OfPlain(tag_name2.clone()), - ]; - - storage - .delete_tags(&_type1(), &_id1(), &tag_names) - .await - .unwrap(); - - let record = storage - .get( - &_type1(), - &_id1(), - r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##, - ) - .await - .unwrap(); - - assert_eq!(record.tags.unwrap(), vec![tag3]); - } - - _cleanup("mysql_storage_delete_tags_works").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_tags_works_for_non_existing_type() { - _cleanup("mysql_storage_delete_tags_works_for_non_existing_type").await; - - { - let storage = _storage("mysql_storage_delete_tags_works_for_non_existing_type").await; - - let tag_name1 = vec![0, 0, 0]; - let tag_name2 = vec![1, 1, 1]; - let tag_name3 = vec![2, 2, 2]; - let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); - let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); - let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); - let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; - - storage - .add(&_type1(), &_id1(), &_value1(), &tags) - .await - .unwrap(); - - let tag_names = vec![ - TagName::OfEncrypted(tag_name1.clone()), - TagName::OfPlain(tag_name2.clone()), - ]; - - let res = storage.delete_tags(&_type2(), &_id1(), &tag_names).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_tags_works_for_non_existing_type").await; - } - - #[async_std::test] - #[cfg(feature = "mysql")] - async fn mysql_storage_delete_tags_works_for_non_existing_id() { - _cleanup("mysql_storage_delete_tags_works_for_non_existing_id").await; - - { - let storage = _storage("mysql_storage_delete_tags_works_for_non_existing_id").await; - - let tag_name1 = vec![0, 0, 0]; - let tag_name2 = vec![1, 1, 1]; - let tag_name3 = vec![2, 2, 2]; - let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); - let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); - let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); - let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; - - storage - .add(&_type1(), &_id1(), &_value1(), &tags) - .await - .unwrap(); - - let tag_names = vec![ - TagName::OfEncrypted(tag_name1.clone()), - TagName::OfPlain(tag_name2.clone()), - ]; - - let res = storage.delete_tags(&_type1(), &_id2(), &tag_names).await; - assert_kind!(IndyErrorKind::WalletItemNotFound, res); - } - - _cleanup("mysql_storage_delete_tags_works_for_non_existing_id").await; - } - - fn _config() -> Option<&'static str> { - Some( - r#" - { - "read_host": "127.0.0.1", - "write_host": "127.0.0.1", - "port": 3306, - "db_name": "indy" - } - "#, - ) - } - - fn _credentials() -> Option<&'static str> { - Some( - r#" - { - "user": "root", - "pass": "pass@word1" - } - "#, - ) - } - - async fn _cleanup(name: &str) { - MySqlStorageType::new() - .delete_storage(name, _config(), _credentials()) - .await - .ok(); - } - - async fn _storage(name: &str) -> Box { - let storage_type = MySqlStorageType::new(); - - storage_type - .create_storage(name, _config(), _credentials(), &_metadata()) - .await - .unwrap(); - - storage_type - .open_storage(name, _config(), _credentials()) - .await - .unwrap() - } - - fn _metadata() -> Vec { - vec![ - 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, - 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, - 3, 4, 5, 6, 7, 8, - ] - } - - fn _type(i: u8) -> Vec { - vec![i, 1 + i, 2 + i] - } - - fn _type1() -> Vec { - _type(1) - } - - fn _type2() -> Vec { - _type(2) - } - - fn _id(i: u8) -> Vec { - vec![3 + i, 4 + i, 5 + i] - } - - fn _id1() -> Vec { - _id(1) - } - - fn _id2() -> Vec { - _id(2) - } - - fn _value(i: u8) -> EncryptedValue { - EncryptedValue { - data: vec![6 + i, 7 + i, 8 + i], - key: vec![ - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - 9 + i, - 10 + i, - 11 + i, - ], - } - } - - fn _value1() -> EncryptedValue { - _value(1) - } - - fn _value2() -> EncryptedValue { - _value(2) - } - - fn _tags() -> Vec { - vec![ - Tag::Encrypted(vec![1, 5, 8], vec![3, 5, 6]), - Tag::PlainText(vec![1, 5, 8, 1], "Plain value".to_string()), - ] - } - - fn _new_tags() -> Vec { - vec![ - Tag::Encrypted(vec![1, 1, 1], vec![2, 2, 2]), - Tag::PlainText(vec![1, 1, 1], String::from("tag_value_3")), - ] - } - - fn _sort(mut v: Vec) -> Vec { - v.sort(); - v - } - - fn _custom_path(name: &str) -> String { - let mut path = environment::tmp_path(); - path.push(name); - path.to_str().unwrap().to_owned() - } -} - -// FIXME: copy/paste -fn _tags_to_plain(tags: &[Tag]) -> HashMap { - let mut map = HashMap::with_capacity(tags.len()); - - for tag in tags { - match *tag { - Tag::Encrypted(ref name, ref value) => { - map.insert(base64::encode(name), base64::encode(value)) - } - Tag::PlainText(ref name, ref value) => { - map.insert(format!("~{}", &base64::encode(name)), value.to_string()) - } - }; - } - - map -} - -// FIXME: copy/paste -fn _tags_to_json(tags: &[Tag]) -> IndyResult { - serde_json::to_string(&_tags_to_plain(tags)).to_indy( - IndyErrorKind::InvalidState, - "Unable to serialize tags as json", - ) -} - -// FIXME: copy/paste -fn _tags_from_json(json: serde_json::Value) -> IndyResult> { - let string_tags: HashMap = serde_json::from_value(json).to_indy( - IndyErrorKind::InvalidState, - "Unable to deserialize tags from json", - )?; - - let mut tags = Vec::with_capacity(string_tags.len()); - - for (k, v) in string_tags { - if k.starts_with('~') { - let mut key = k; - key.remove(0); - tags.push(Tag::PlainText( - base64::decode(&key).to_indy( - IndyErrorKind::InvalidState, - "Unable to decode tag key from base64", - )?, - v, - )); - } else { - tags.push(Tag::Encrypted( - base64::decode(&k).to_indy( - IndyErrorKind::InvalidState, - "Unable to decode tag key from base64", - )?, - base64::decode(&v).to_indy( - IndyErrorKind::InvalidState, - "Unable to decode tag value from base64", - )?, - )); - } - } - Ok(tags) -} - -// FIXME: copy/paste -fn _tag_names_to_plain(tag_names: &[TagName]) -> Vec { - tag_names - .iter() - .map(|tag_name| match *tag_name { - TagName::OfEncrypted(ref tag_name) => base64::encode(tag_name), - TagName::OfPlain(ref tag_name) => format!("~{}", base64::encode(tag_name)), - }) - .collect() -} - -// FIXME: copy/paste -fn _tag_names_to_json(tag_names: &[TagName]) -> IndyResult { - serde_json::to_string(&_tag_names_to_plain(tag_names)).to_indy( - IndyErrorKind::InvalidState, - "Unable to serialize tag names as json", - ) -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs deleted file mode 100644 index d54f4348a0..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/storage/mysql/query.rs +++ /dev/null @@ -1,315 +0,0 @@ -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::base64; -use serde_json::Value; - -use crate::{ - language::{Operator, TagName, TargetValue}, - SearchOptions, -}; - -pub fn wql_to_sql( - wallet_id: i64, - type_: &[u8], - wql: &Operator, - options: &SearchOptions, -) -> IndyResult<(String, Vec)> { - let mut arguments: Vec = Vec::new(); - - let query_condition = match operator_to_sql(wql, &mut arguments) { - Ok(query_condition) => query_condition, - Err(err) => return Err(err), - }; - - let query_string = format!( - "SELECT {}, name, {}, {} FROM items WHERE {} type = ? AND wallet_id = ?", - if options.retrieve_type { - "type" - } else { - "NULL" - }, - if options.retrieve_value { - "value" - } else { - "NULL" - }, - if options.retrieve_tags { - "tags" - } else { - "NULL" - }, - if !query_condition.is_empty() { - query_condition + " AND" - } else { - "".to_string() - } - ); - - arguments.push(base64::encode(type_).into()); - arguments.push(wallet_id.into()); - - Ok((query_string, arguments)) -} - -pub fn wql_to_sql_count( - wallet_id: i64, - type_: &[u8], - wql: &Operator, -) -> IndyResult<(String, Vec)> { - let mut arguments: Vec = Vec::new(); - - let query_condition = match operator_to_sql(wql, &mut arguments) { - Ok(query_condition) => query_condition, - Err(err) => return Err(err), - }; - - let query_string = format!( - "SELECT count(*) FROM items i WHERE {} i.type = ? AND i.wallet_id = ?", - if !query_condition.is_empty() { - query_condition + " AND" - } else { - "".to_string() - } - ); - - arguments.push(base64::encode(type_).into()); - arguments.push(wallet_id.into()); - - Ok((query_string, arguments)) -} - -fn operator_to_sql(op: &Operator, arguments: &mut Vec) -> IndyResult { - match *op { - Operator::Eq(ref tag_name, ref target_value) => { - Ok(eq_to_sql(tag_name, target_value, arguments)) - } - Operator::Neq(ref tag_name, ref target_value) => { - Ok(neq_to_sql(tag_name, target_value, arguments)) - } - Operator::Gt(ref tag_name, ref target_value) => { - gt_to_sql(tag_name, target_value, arguments) - } - Operator::Gte(ref tag_name, ref target_value) => { - gte_to_sql(tag_name, target_value, arguments) - } - Operator::Lt(ref tag_name, ref target_value) => { - lt_to_sql(tag_name, target_value, arguments) - } - Operator::Lte(ref tag_name, ref target_value) => { - lte_to_sql(tag_name, target_value, arguments) - } - Operator::Like(ref tag_name, ref target_value) => { - like_to_sql(tag_name, target_value, arguments) - } - Operator::In(ref tag_name, ref target_values) => { - Ok(in_to_sql(tag_name, target_values, arguments)) - } - Operator::And(ref suboperators) => and_to_sql(suboperators, arguments), - Operator::Or(ref suboperators) => or_to_sql(suboperators, arguments), - Operator::Not(ref suboperator) => not_to_sql(suboperator, arguments), - } -} - -fn eq_to_sql(tag_name: &TagName, tag_value: &TargetValue, arguments: &mut Vec) -> String { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - - arguments.push(tag_value.to_plain().into()); - format!("(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) = ?)", tag_path) -} - -fn neq_to_sql(tag_name: &TagName, tag_value: &TargetValue, arguments: &mut Vec) -> String { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - - arguments.push(tag_value.to_plain().into()); - format!("(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) != ?)", tag_path) -} - -fn gt_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) > ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gt operator", - )), - } -} - -fn gte_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) >= ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $gt operator", - )), - } -} - -fn lt_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) < ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn lte_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) <= ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn like_to_sql( - tag_name: &TagName, - tag_value: &TargetValue, - arguments: &mut Vec, -) -> IndyResult { - match (tag_name, tag_value) { - (&TagName::PlainTagName(_), &TargetValue::Unencrypted(_)) => { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - arguments.push(tag_value.to_plain().into()); - - Ok(format!( - "(JSON_UNQUOTE(JSON_EXTRACT(tags, {})) LIKE ?)", - tag_path - )) - } - _ => Err(err_msg( - IndyErrorKind::WalletQueryError, - "Invalid combination of tag name and value for $lt operator", - )), - } -} - -fn in_to_sql(tag_name: &TagName, tag_values: &[TargetValue], arguments: &mut Vec) -> String { - let tag_path = format!(r#"'$."{}"'"#, tag_name.to_plain()); - let mut in_string = format!("JSON_UNQUOTE(JSON_EXTRACT(tags, {})) IN (", tag_path); - - for (index, tag_value) in tag_values.iter().enumerate() { - in_string.push('?'); - if index < tag_values.len() - 1 { - in_string.push(','); - } else { - in_string.push(')'); - } - - arguments.push(tag_value.to_plain().into()); - } - - in_string -} - -fn and_to_sql(suboperators: &[Operator], arguments: &mut Vec) -> IndyResult { - join_operators(suboperators, " AND ", arguments) -} - -fn or_to_sql(suboperators: &[Operator], arguments: &mut Vec) -> IndyResult { - join_operators(suboperators, " OR ", arguments) -} - -fn not_to_sql(suboperator: &Operator, arguments: &mut Vec) -> IndyResult { - let suboperator_string = operator_to_sql(suboperator, arguments)?; - Ok("NOT (".to_string() + &suboperator_string + ")") -} - -fn join_operators( - operators: &[Operator], - join_str: &str, - arguments: &mut Vec, -) -> IndyResult { - let mut s = String::new(); - - if !operators.is_empty() { - s.push('('); - for (index, operator) in operators.iter().enumerate() { - let operator_string = operator_to_sql(operator, arguments)?; - - s.push_str(&operator_string); - - if index < operators.len() - 1 { - s.push_str(join_str); - } - } - - s.push(')'); - } - - Ok(s) -} - -// FIXME: It is quite smilar for to_string method of tag and value, but for some reason -// to_string uses "". It is added to avoid potential damage as i have no time -// for investigation. -trait ToPlain { - fn to_plain(&self) -> String; -} - -impl ToPlain for TagName { - fn to_plain(&self) -> String { - match *self { - TagName::EncryptedTagName(ref v) => base64::encode(v), - TagName::PlainTagName(ref v) => format!("~{}", base64::encode(v)), - } - } -} - -impl ToPlain for TargetValue { - fn to_plain(&self) -> String { - match *self { - TargetValue::Unencrypted(ref s) => s.to_owned(), - TargetValue::Encrypted(ref v) => base64::encode(v), - } - } -} diff --git a/aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs b/aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs deleted file mode 100644 index 3f11b26b8a..0000000000 --- a/aries/misc/legacy/libvdrtools/indy-wallet/src/wallet.rs +++ /dev/null @@ -1,487 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use futures::future::join; -use indy_api_types::errors::prelude::*; -use indy_utils::{ - crypto::{chacha20poly1305_ietf, hmacsha256}, - wql::Query, -}; -use serde::{Deserialize, Serialize}; -use zeroize::Zeroize; - -use crate::{ - cache::wallet_cache::{WalletCache, WalletCacheHitMetrics}, - encryption::*, - iterator::WalletIterator, - query_encryption::encrypt_query, - storage, - storage::StorageRecord, - RecordOptions, WalletRecord, -}; - -#[derive(Serialize, Deserialize)] -pub struct Keys { - pub type_key: chacha20poly1305_ietf::Key, - pub name_key: chacha20poly1305_ietf::Key, - pub value_key: chacha20poly1305_ietf::Key, - pub item_hmac_key: hmacsha256::Key, - pub tag_name_key: chacha20poly1305_ietf::Key, - pub tag_value_key: chacha20poly1305_ietf::Key, - pub tags_hmac_key: hmacsha256::Key, -} - -#[allow(clippy::new_without_default)] -impl Keys { - pub fn new() -> Keys { - Keys { - type_key: chacha20poly1305_ietf::gen_key(), - name_key: chacha20poly1305_ietf::gen_key(), - value_key: chacha20poly1305_ietf::gen_key(), - item_hmac_key: hmacsha256::gen_key(), - tag_name_key: chacha20poly1305_ietf::gen_key(), - tag_value_key: chacha20poly1305_ietf::gen_key(), - tags_hmac_key: hmacsha256::gen_key(), - } - } - - pub fn serialize_encrypted( - &self, - master_key: &chacha20poly1305_ietf::Key, - ) -> IndyResult> { - let mut serialized = rmp_serde::to_vec(self) - .to_indy(IndyErrorKind::InvalidState, "Unable to serialize keys")?; - - let encrypted = encrypt_as_not_searchable(&serialized, master_key); - - serialized.zeroize(); - Ok(encrypted) - } - - pub fn deserialize_encrypted( - bytes: &[u8], - master_key: &chacha20poly1305_ietf::Key, - ) -> IndyResult { - let mut decrypted = decrypt_merged(bytes, master_key)?; - - let keys: Keys = rmp_serde::from_slice(&decrypted) - .to_indy(IndyErrorKind::InvalidState, "Invalid bytes for Key")?; - - decrypted.zeroize(); - Ok(keys) - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct EncryptedValue { - pub data: Vec, - pub key: Vec, -} - -#[allow(dead_code)] -const ENCRYPTED_KEY_LEN: usize = chacha20poly1305_ietf::TAGBYTES - + chacha20poly1305_ietf::NONCEBYTES - + chacha20poly1305_ietf::KEYBYTES; - -impl EncryptedValue { - pub fn new(data: Vec, key: Vec) -> Self { - Self { data, key } - } - - pub fn encrypt(data: &str, key: &chacha20poly1305_ietf::Key) -> Self { - let value_key = chacha20poly1305_ietf::gen_key(); - EncryptedValue::new( - encrypt_as_not_searchable(data.as_bytes(), &value_key), - encrypt_as_not_searchable(&value_key[..], key), - ) - } - - pub fn decrypt(&self, key: &chacha20poly1305_ietf::Key) -> IndyResult { - let mut value_key_bytes = decrypt_merged(&self.key, key)?; - - let value_key = chacha20poly1305_ietf::Key::from_slice(&value_key_bytes) - .map_err(|err| err.extend("Invalid value key"))?; // FIXME: review kind - - value_key_bytes.zeroize(); - - let res = String::from_utf8(decrypt_merged(&self.data, &value_key)?).to_indy( - IndyErrorKind::InvalidState, - "Invalid UTF8 string inside of value", - )?; - - Ok(res) - } - - #[allow(dead_code)] - pub fn to_bytes(&self) -> Vec { - let mut result = self.key.clone(); - result.extend_from_slice(self.data.as_slice()); - result - } - - #[allow(dead_code)] - pub fn from_bytes(joined_data: &[u8]) -> IndyResult { - // value_key is stored as NONCE || CYPHERTEXT. Lenth of CYPHERTHEXT is length of DATA + - // length of TAG. - if joined_data.len() < ENCRYPTED_KEY_LEN { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "Unable to split value_key from value: value too short", - )); // FIXME: review kind - } - - let value_key = joined_data[..ENCRYPTED_KEY_LEN].to_owned(); - let value = joined_data[ENCRYPTED_KEY_LEN..].to_owned(); - Ok(EncryptedValue { - data: value, - key: value_key, - }) - } -} - -pub(super) struct Wallet { - id: String, - storage: Box, - keys: Arc, - cache: WalletCache, -} - -impl Wallet { - pub fn new( - id: String, - storage: Box, - keys: Arc, - cache: WalletCache, - ) -> Wallet { - Wallet { - id, - storage, - keys, - cache, - } - } - - pub async fn add( - &self, - type_: &str, - name: &str, - value: &str, - tags: &HashMap, - cache_record: bool, - ) -> IndyResult<()> { - let etype = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let ename = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let evalue = EncryptedValue::encrypt(value, &self.keys.value_key); - - let etags = encrypt_tags( - tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - &self.keys.tags_hmac_key, - ); - - self.storage.add(&etype, &ename, &evalue, &etags).await?; - if cache_record { - self.cache.add(type_, &etype, &ename, &evalue, &etags); - } - - Ok(()) - } - - pub async fn add_tags( - &self, - type_: &str, - name: &str, - tags: &HashMap, - ) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_tags = encrypt_tags( - tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - &self.keys.tags_hmac_key, - ); - - self.storage - .add_tags(&encrypted_type, &encrypted_name, &encrypted_tags) - .await?; - self.cache - .add_tags(type_, &encrypted_type, &encrypted_name, &encrypted_tags) - .await; - - Ok(()) - } - - pub async fn update_tags( - &self, - type_: &str, - name: &str, - tags: &HashMap, - ) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_tags = encrypt_tags( - tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - &self.keys.tags_hmac_key, - ); - - self.storage - .update_tags(&encrypted_type, &encrypted_name, &encrypted_tags) - .await?; - self.cache - .update_tags(type_, &encrypted_type, &encrypted_name, &encrypted_tags) - .await; - - Ok(()) - } - - pub async fn delete_tags(&self, type_: &str, name: &str, tag_names: &[&str]) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_tag_names = - encrypt_tag_names(tag_names, &self.keys.tag_name_key, &self.keys.tags_hmac_key); - - self.storage - .delete_tags(&encrypted_type, &encrypted_name, &encrypted_tag_names[..]) - .await?; - self.cache - .delete_tags( - type_, - &encrypted_type, - &encrypted_name, - &encrypted_tag_names[..], - ) - .await; - - Ok(()) - } - - pub async fn update(&self, type_: &str, name: &str, new_value: &str) -> IndyResult<()> { - let encrypted_type = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let encrypted_name = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let encrypted_value = EncryptedValue::encrypt(new_value, &self.keys.value_key); - - self.storage - .update(&encrypted_type, &encrypted_name, &encrypted_value) - .await?; - self.cache - .update(type_, &encrypted_type, &encrypted_name, &encrypted_value) - .await; - - Ok(()) - } - - pub async fn get( - &self, - type_: &str, - name: &str, - options: &str, - cache_hit_metrics: &WalletCacheHitMetrics, - ) -> IndyResult { - let etype = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let ename = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - let result = if self.cache.is_type_cacheable(type_) { - let record_options: RecordOptions = serde_json::from_str(options).to_indy( - IndyErrorKind::InvalidStructure, - "RecordOptions is malformed json", - )?; - - match self.cache.get(type_, &etype, &ename, &record_options).await { - Some(result) => { - cache_hit_metrics.inc_cache_hit(type_).await; - result - } - None => { - // no item in cache, lets retrieve it and put it in cache. - let metrics_fut = cache_hit_metrics.inc_cache_miss(type_); - let full_options = RecordOptions { - retrieve_type: record_options.retrieve_type, - retrieve_value: true, - retrieve_tags: true, - }; - - let full_options = serde_json::to_string(&full_options).unwrap(); - - let storage_fut = self.storage.get(&etype, &ename, &full_options); - // run these two futures in parallel. - let full_result = join(storage_fut, metrics_fut).await.0?; - - // save to cache only if valid data is returned (this should be always true). - if let (Some(evalue), Some(etags)) = (&full_result.value, &full_result.tags) { - self.cache.add(type_, &etype, &ename, evalue, etags); - } - StorageRecord { - id: full_result.id, - type_: if record_options.retrieve_type { - Some(etype) - } else { - None - }, - value: if record_options.retrieve_value { - full_result.value - } else { - None - }, - tags: if record_options.retrieve_tags { - full_result.tags - } else { - None - }, - } - } - } - } else { - let metrics_fut = cache_hit_metrics.inc_not_cached(type_); - let storage_fut = self.storage.get(&etype, &ename, options); - // run these two futures in parallel. - join(storage_fut, metrics_fut).await.0? - }; - - let value = match result.value { - None => None, - Some(encrypted_value) => Some(encrypted_value.decrypt(&self.keys.value_key)?), - }; - - let tags = decrypt_tags( - &result.tags, - &self.keys.tag_name_key, - &self.keys.tag_value_key, - )?; - - Ok(WalletRecord::new( - String::from(name), - result.type_.map(|_| type_.to_string()), - value, - tags, - )) - } - - pub async fn delete(&self, type_: &str, name: &str) -> IndyResult<()> { - let etype = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let ename = encrypt_as_searchable( - name.as_bytes(), - &self.keys.name_key, - &self.keys.item_hmac_key, - ); - - self.storage.delete(&etype, &ename).await?; - self.cache.delete(type_, &etype, &ename).await; - - Ok(()) - } - - pub async fn search( - &self, - type_: &str, - query: &str, - options: Option<&str>, - ) -> IndyResult { - let parsed_query: Query = ::serde_json::from_str::(query) - .map_err(|err| IndyError::from_msg(IndyErrorKind::WalletQueryError, err))? - .optimise() - .unwrap_or_default(); - - let encrypted_query = encrypt_query(parsed_query, &self.keys)?; - - let encrypted_type_ = encrypt_as_searchable( - type_.as_bytes(), - &self.keys.type_key, - &self.keys.item_hmac_key, - ); - - let storage_iterator = self - .storage - .search(&encrypted_type_, &encrypted_query, options) - .await?; - - let wallet_iterator = WalletIterator::new(storage_iterator, Arc::clone(&self.keys)); - - Ok(wallet_iterator) - } - - fn close(&mut self) -> IndyResult<()> { - self.storage.close() - } - - pub async fn get_all(&self) -> IndyResult { - let all_items = self.storage.get_all().await?; - Ok(WalletIterator::new(all_items, self.keys.clone())) - } - - pub fn get_id(&self) -> &str { - &self.id - } -} - -impl Drop for Wallet { - fn drop(&mut self) { - self.close().unwrap(); //FIXME pass the error to the API cb - } -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/crypto.rs b/aries/misc/legacy/libvdrtools/src/controllers/crypto.rs deleted file mode 100644 index 2cd94972ae..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/crypto.rs +++ /dev/null @@ -1,594 +0,0 @@ -use std::{collections::HashMap, str, sync::Arc}; - -use indy_api_types::{errors::prelude::*, WalletHandle}; -use indy_utils::crypto::{base64, chacha20poly1305_ietf}; -use indy_wallet::RecordOptions; - -use crate::{ - domain::crypto::{ - key::{Key, KeyInfo}, - pack::*, - }, - services::{CryptoService, WalletService}, -}; - -pub const PROTECTED_HEADER_ENC: &str = "xchacha20poly1305_ietf"; -pub const PROTECTED_HEADER_TYP: &str = "JWM/1.0"; -pub const PROTECTED_HEADER_ALG_AUTH: &str = "Authcrypt"; -pub const PROTECTED_HEADER_ALG_ANON: &str = "Anoncrypt"; - -pub struct CryptoController { - wallet_service: Arc, - crypto_service: Arc, -} - -impl CryptoController { - pub(crate) fn new( - wallet_service: Arc, - crypto_service: Arc, - ) -> CryptoController { - CryptoController { - wallet_service, - crypto_service, - } - } - - /// Creates keys pair and stores in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// key_json: Key information as json. Example: - /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set - /// random one will be created). Can be UTF-8, base64 or hex - /// string. "crypto_type": string, // Optional (if not set then ed25519 curve is used); - /// Currently only 'ed25519' value is supported for this field. } - /// - /// #Returns - /// verkey: Ver key of generated key pair, also used as key identifier - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn create_key( - &self, - wallet_handle: WalletHandle, - key_info: &KeyInfo, - ) -> IndyResult { - debug!( - "create_key >>> wallet_handle: {:?}, key_info: {:?}", - wallet_handle, - secret!(key_info) - ); - - let key = self.crypto_service.create_key(key_info).await?; - - self.wallet_service - .add_indy_object(wallet_handle, &key.verkey, &key, &HashMap::new()) - .await?; - - let res = key.verkey.to_string(); - debug!("create_key <<< res: {:?}", res); - Ok(res) - } - - /// Signs a message with a key. - /// - /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) - /// for specific DID. - /// - /// #Params - - /// wallet_handle: wallet handler (created by open_wallet). - /// signer_vk: id (verkey) of message signer. The key must be created by calling indy_create_key - /// or indy_create_and_store_my_did message_raw: a pointer to first byte of message to be - /// signed message_len: a message length - /// - /// #Returns - /// a signature string - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn crypto_sign( - &self, - wallet_handle: WalletHandle, - my_vk: &str, - msg: &[u8], - ) -> IndyResult> { - trace!( - "crypto_sign >>> wallet_handle: {:?}, sender_vk: {:?}, msg: {:?}", - wallet_handle, - my_vk, - msg - ); - - self.crypto_service.validate_key(my_vk).await?; - - let key: Key = self - .wallet_service - .get_indy_object(wallet_handle, my_vk, &RecordOptions::id_value()) - .await?; - - let res = self.crypto_service.sign(&key, msg).await?; - - trace!("crypto_sign <<< res: {:?}", res); - - Ok(res) - } - - /// Verify a signature with a verkey. - /// - /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) - /// for specific DID. - /// - /// #Params - - /// signer_vk: verkey of the message signer - /// message_raw: a pointer to first byte of message that has been signed - /// message_len: a message length - /// signature_raw: a pointer to first byte of signature to be verified - /// signature_len: a signature length - /// - /// #Returns - /// valid: true - if signature is valid, false - otherwise - /// - /// #Errors - /// Common* - /// Wallet* - /// Ledger* - /// Crypto* - pub async fn crypto_verify( - &self, - their_vk: &str, - msg: &[u8], - signature: &[u8], - ) -> IndyResult { - trace!( - "crypto_verify >>> their_vk: {:?}, msg: {:?}, signature: {:?}", - their_vk, - msg, - signature - ); - - self.crypto_service.validate_key(their_vk).await?; - - let res = self.crypto_service.verify(their_vk, msg, signature).await?; - - trace!("crypto_verify <<< res: {:?}", res); - - Ok(res) - } - - /// Packs a message by encrypting the message and serializes it in a JWE-like format - /// (Experimental) - /// - /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) - /// for specific DID. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// message: a pointer to the first byte of the message to be packed - /// message_len: the length of the message - /// receivers: a string in the format of a json list which will contain the list of receiver's - /// keys the message is being encrypted for. - /// Example: - /// "[, ]" - /// sender: the sender's verkey as a string When null pointer is used in this parameter, - /// anoncrypt is used - /// - /// #Returns - /// a JWE using authcrypt alg is defined below: - /// { - /// "protected": "b64URLencoded({ - /// "enc": "xsalsa20poly1305", - /// "typ": "JWM/1.0", - /// "alg": "Authcrypt", - /// "recipients": [ - /// { - /// "encrypted_key": base64URLencode(libsodium.crypto_box(my_key, their_vk, cek, - /// cek_iv)) "header": { - /// "kid": "base58encode(recipient_verkey)", - /// "sender" : base64URLencode(libsodium.crypto_box_seal(their_vk, - /// base58encode(sender_vk)), "iv" : base64URLencode(cek_iv) - /// } - /// }, - /// ], - /// })", - /// "iv": , - /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, - /// cek), "tag": - /// } - /// - /// Alternative example in using anoncrypt alg is defined below: - /// { - /// "protected": "b64URLencoded({ - /// "enc": "xsalsa20poly1305", - /// "typ": "JWM/1.0", - /// "alg": "Anoncrypt", - /// "recipients": [ - /// { - /// "encrypted_key": base64URLencode(libsodium.crypto_box_seal(their_vk, cek)), - /// "header": { - /// "kid": base58encode(recipient_verkey), - /// } - /// }, - /// ], - /// })", - /// "iv": b64URLencode(iv), - /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, - /// cek), "tag": b64URLencode(tag) - /// } - /// - /// - /// #Errors - /// Common* - /// Wallet* - /// Ledger* - /// Crypto* - // TODO: Refactor pack to be more modular to version changes or crypto_scheme changes - // this match statement is super messy, but the easiest way to comply with current architecture - pub async fn pack_msg( - &self, - message: Vec, - receiver_list: Vec, - sender_vk: Option, - wallet_handle: WalletHandle, - ) -> IndyResult> { - //break early and error out if no receivers keys are provided - if receiver_list.is_empty() { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - "No receiver keys found".to_string(), - )); - } - - //generate content encryption key that will encrypt `message` - let cek = chacha20poly1305_ietf::gen_key(); - - let base64_protected = if let Some(sender_vk) = sender_vk { - self.crypto_service.validate_key(&sender_vk).await?; - - //returns authcrypted pack_message format. See Wire message format HIPE for details - self._prepare_protected_authcrypt(&cek, receiver_list, &sender_vk, wallet_handle) - .await? - } else { - //returns anoncrypted pack_message format. See Wire message format HIPE for details - self._prepare_protected_anoncrypt(&cek, receiver_list) - .await? - }; - - // Use AEAD to encrypt `message` with "protected" data as "associated data" - let (ciphertext, iv, tag) = - self.crypto_service - .encrypt_plaintext(message, &base64_protected, &cek); - - self._format_pack_message(&base64_protected, &ciphertext, &iv, &tag) - } - - async fn _prepare_protected_anoncrypt( - &self, - cek: &chacha20poly1305_ietf::Key, - receiver_list: Vec, - ) -> IndyResult { - let mut encrypted_recipients_struct: Vec = - Vec::with_capacity(receiver_list.len()); - - for their_vk in receiver_list { - //encrypt sender verkey - let enc_cek = self - .crypto_service - .crypto_box_seal(&their_vk, &cek[..]) - .await?; - - //create recipient struct and push to encrypted list - encrypted_recipients_struct.push(Recipient { - encrypted_key: base64::encode_urlsafe(enc_cek.as_slice()), - header: Header { - kid: their_vk, - sender: None, - iv: None, - }, - }); - } // end for-loop - - self._base64_encode_protected(encrypted_recipients_struct, false) - } - - async fn _prepare_protected_authcrypt( - &self, - cek: &chacha20poly1305_ietf::Key, - receiver_list: Vec, - sender_vk: &str, - wallet_handle: WalletHandle, - ) -> IndyResult { - let mut encrypted_recipients_struct: Vec = vec![]; - - //get my_key from my wallet - let my_key = self - .wallet_service - .get_indy_object(wallet_handle, sender_vk, &RecordOptions::id_value()) - .await?; - - //encrypt cek for recipient - for their_vk in receiver_list { - let (enc_cek, iv) = self - .crypto_service - .crypto_box(&my_key, &their_vk, &cek[..]) - .await?; - - let enc_sender = self - .crypto_service - .crypto_box_seal(&their_vk, sender_vk.as_bytes()) - .await?; - - //create recipient struct and push to encrypted list - encrypted_recipients_struct.push(Recipient { - encrypted_key: base64::encode_urlsafe(enc_cek.as_slice()), - header: Header { - kid: their_vk, - sender: Some(base64::encode_urlsafe(enc_sender.as_slice())), - iv: Some(base64::encode_urlsafe(iv.as_slice())), - }, - }); - } // end for-loop - - self._base64_encode_protected(encrypted_recipients_struct, true) - } - - fn _base64_encode_protected( - &self, - encrypted_recipients_struct: Vec, - alg_is_authcrypt: bool, - ) -> IndyResult { - let alg_val = if alg_is_authcrypt { - String::from(PROTECTED_HEADER_ALG_AUTH) - } else { - String::from(PROTECTED_HEADER_ALG_ANON) - }; - - //structure protected and base64URL encode it - let protected_struct = Protected { - enc: PROTECTED_HEADER_ENC.to_string(), - typ: PROTECTED_HEADER_TYP.to_string(), - alg: alg_val, - recipients: encrypted_recipients_struct, - }; - let protected_encoded = serde_json::to_string(&protected_struct).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to serialize protected field {}", err), - ) - })?; - - Ok(base64::encode_urlsafe(protected_encoded.as_bytes())) - } - - fn _format_pack_message( - &self, - base64_protected: &str, - ciphertext: &str, - iv: &str, - tag: &str, - ) -> IndyResult> { - //serialize pack message and return as vector of bytes - let jwe_struct = JWE { - protected: base64_protected.to_string(), - iv: iv.to_string(), - ciphertext: ciphertext.to_string(), - tag: tag.to_string(), - }; - - serde_json::to_vec(&jwe_struct).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to serialize JWE {}", err), - ) - }) - } - - /// Unpacks a JWE-like formatted message outputted by indy_pack_message (Experimental) - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// jwe_data: a pointer to the first byte of the JWE to be unpacked - /// jwe_len: the length of the JWE message in bytes - /// - /// #Returns - /// if authcrypt was used to pack the message returns this json structure: - /// { - /// message: , - /// sender_verkey: , - /// recipient_verkey: - /// } - /// - /// OR - /// - /// if anoncrypt was used to pack the message returns this json structure: - /// { - /// message: , - /// recipient_verkey: - /// } - /// - /// - /// #Errors - /// Common* - /// Wallet* - /// Ledger* - /// Crypto* - pub async fn unpack_msg( - &self, - jwe_struct: JWE, - wallet_handle: WalletHandle, - ) -> IndyResult> { - //decode protected data - let protected_decoded_vec = base64::decode_urlsafe(&jwe_struct.protected)?; - let protected_decoded_str = String::from_utf8(protected_decoded_vec).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to utf8 encode data {}", err), - ) - })?; - //convert protected_data_str to struct - let protected_struct: Protected = - serde_json::from_str(&protected_decoded_str).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to deserialize protected data {}", err), - ) - })?; - - //extract recipient that matches a key in the wallet - let (recipient, is_auth_recipient) = self - ._find_correct_recipient(protected_struct, wallet_handle) - .await?; - - //get cek and sender data - let (sender_verkey_option, cek) = if is_auth_recipient { - self._unpack_cek_authcrypt(recipient.clone(), wallet_handle) - .await - } else { - self._unpack_cek_anoncrypt(recipient.clone(), wallet_handle) - .await - }?; //close cek and sender_data match statement - - //decrypt message - let message = self.crypto_service.decrypt_ciphertext( - &jwe_struct.ciphertext, - &jwe_struct.protected, - &jwe_struct.iv, - &jwe_struct.tag, - &cek, - )?; - - //serialize and return decrypted message - let res = UnpackMessage { - message, - sender_verkey: sender_verkey_option, - recipient_verkey: recipient.header.kid, - }; - - serde_json::to_vec(&res).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to serialize message {}", err), - ) - }) - } - - async fn _find_correct_recipient( - &self, - protected_struct: Protected, - wallet_handle: WalletHandle, - ) -> IndyResult<(Recipient, bool)> { - for recipient in protected_struct.recipients { - let my_key_res = self - .wallet_service - .get_indy_object::( - wallet_handle, - &recipient.header.kid, - &RecordOptions::id_value(), - ) - .await; - - if my_key_res.is_ok() { - return Ok((recipient.clone(), recipient.header.sender.is_some())); - } - } - Err(IndyError::from(IndyErrorKind::WalletItemNotFound)) - } - - async fn _unpack_cek_authcrypt( - &self, - recipient: Recipient, - wallet_handle: WalletHandle, - ) -> IndyResult<(Option, chacha20poly1305_ietf::Key)> { - let encrypted_key_vec = base64::decode_urlsafe(&recipient.encrypted_key)?; - let iv = base64::decode_urlsafe(&recipient.header.iv.unwrap())?; - let enc_sender_vk = base64::decode_urlsafe(&recipient.header.sender.unwrap())?; - - //get my private key - let my_key = self - .wallet_service - .get_indy_object( - wallet_handle, - &recipient.header.kid, - &RecordOptions::id_value(), - ) - .await?; - - //decrypt sender_vk - let sender_vk_vec = self - .crypto_service - .crypto_box_seal_open(&my_key, enc_sender_vk.as_slice()) - .await?; - let sender_vk = String::from_utf8(sender_vk_vec).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to utf-8 encode sender_vk {}", err), - ) - })?; - - //decrypt cek - let cek_as_vec = self - .crypto_service - .crypto_box_open( - &my_key, - &sender_vk, - encrypted_key_vec.as_slice(), - iv.as_slice(), - ) - .await?; - - //convert cek to chacha Key struct - let cek: chacha20poly1305_ietf::Key = - chacha20poly1305_ietf::Key::from_slice(&cek_as_vec[..]).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decrypt cek {}", err), - ) - })?; - - Ok((Some(sender_vk), cek)) - } - - async fn _unpack_cek_anoncrypt( - &self, - recipient: Recipient, - wallet_handle: WalletHandle, - ) -> IndyResult<(Option, chacha20poly1305_ietf::Key)> { - let encrypted_key_vec = base64::decode_urlsafe(&recipient.encrypted_key)?; - - //get my private key - let my_key: Key = self - .wallet_service - .get_indy_object( - wallet_handle, - &recipient.header.kid, - &RecordOptions::id_value(), - ) - .await?; - - //decrypt cek - let cek_as_vec = self - .crypto_service - .crypto_box_seal_open(&my_key, encrypted_key_vec.as_slice()) - .await?; - - //convert cek to chacha Key struct - let cek: chacha20poly1305_ietf::Key = - chacha20poly1305_ietf::Key::from_slice(&cek_as_vec[..]).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decrypt cek {}", err), - ) - })?; - - Ok((None, cek)) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/did.rs b/aries/misc/legacy/libvdrtools/src/controllers/did.rs deleted file mode 100644 index 95905df4fd..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/did.rs +++ /dev/null @@ -1,678 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use indy_api_types::{errors::prelude::*, WalletHandle}; -use indy_wallet::{RecordOptions, SearchOptions, WalletService}; - -use crate::{ - domain::crypto::{ - did::{ - Did, DidMetadata, DidValue, DidWithMeta, MyDidInfo, TemporaryDid, TheirDid, - TheirDidInfo, - }, - key::KeyInfo, - }, - services::CryptoService, - utils::crypto::base58::{DecodeBase58, ToBase58}, -}; - -pub struct DidController { - wallet_service: Arc, - crypto_service: Arc, -} - -impl DidController { - pub(crate) fn new( - wallet_service: Arc, - crypto_service: Arc, - ) -> DidController { - DidController { - wallet_service, - crypto_service, - } - } - - /// Creates keys (signing and encryption keys) for a new - /// DID (owned by the caller of the library). - /// Identity's DID must be either explicitly provided, or taken as the first 16 bit of verkey. - /// Saves the Identity DID with keys in a secured Wallet, so that it can be used to sign - /// and encrypt transactions. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// did_info: Identity information as json. See domain::crypto::did::MyDidInfo - /// Example: - /// { - /// "did": string, (optional; - /// if not provided and cid param is false then the first 16 bit of the verkey will - /// be used as a new DID; if not provided and cid is true then the full verkey - /// will be used as a new DID; if provided, then keys will be replaced - key - /// rotation use case) "seed": string, (optional) Seed that allows deterministic did - /// creation (if not set random one will be created). Can be - /// UTF-8, base64 or hex string. "crypto_type": string, (optional; if not set then - /// ed25519 curve is used; currently only 'ed25519' value is supported for - /// this field) "cid": bool, (optional; if not set then false is used;) - /// "ledger_type": string, (optional) type of the ledger to create fully qualified did. - /// "method_name": string, (optional) method name to create fully qualified did. - /// } - /// - /// #Returns - /// did: DID generated and stored in the wallet - /// verkey: The DIDs verification key - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn create_and_store_my_did( - &self, - wallet_handle: WalletHandle, - my_did_info: MyDidInfo, - ) -> IndyResult<(String, String)> { - trace!( - "create_and_store_my_did > wallet_handle {:?} my_did_info_json {:?}", - wallet_handle, - secret!(&my_did_info) - ); - - let (did, key) = self.crypto_service.create_my_did(&my_did_info).await?; - - if let Ok(current_did) = self._wallet_get_my_did(wallet_handle, &did.did).await { - if did.verkey == current_did.verkey { - let res = Ok((did.did.0, did.verkey)); - trace!("create_and_store_my_did < already exists {:?}", res); - return res; - } else { - Err(err_msg( - IndyErrorKind::DIDAlreadyExists, - format!( - "DID \"{}\" already exists but with different Verkey. You should specify \ - Seed used for initial generation", - did.did.0 - ), - ))?; - } - } - - self.wallet_service - .add_indy_object(wallet_handle, &did.did.0, &did, &HashMap::new()) - .await?; - - let _ = self - .wallet_service - .add_indy_object(wallet_handle, &key.verkey, &key, &HashMap::new()) - .await - .ok(); - - let res = Ok((did.did.0, did.verkey)); - trace!("create_and_store_my_did < {:?}", res); - res - } - - /// Generated temporary keys (signing and encryption keys) for an existing - /// DID (owned by the caller of the library). - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// did: target did to rotate keys. - /// key_info: key information as json. Example: - /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set - /// random one will be created). Can be UTF-8, base64 or hex - /// string. "crypto_type": string, (optional; if not set then ed25519 curve is used; - /// currently only 'ed25519' value is supported for this field) - /// } - /// - /// #Returns - /// verkey: The DIDs verification key - /// - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn replace_keys_start( - &self, - wallet_handle: WalletHandle, - key_info: KeyInfo, - my_did: DidValue, - ) -> IndyResult { - trace!( - "replace_keys_start > wallet_handle {:?} key_info_json {:?} my_did {:?}", - wallet_handle, - secret!(&key_info), - my_did - ); - - self.crypto_service.validate_did(&my_did)?; - - let my_did = self._wallet_get_my_did(wallet_handle, &my_did).await?; - - let temporary_key = self.crypto_service.create_key(&key_info).await?; - - let my_temporary_did = TemporaryDid { - did: my_did.did, - verkey: temporary_key.verkey.clone(), - }; - - self.wallet_service - .add_indy_object( - wallet_handle, - &temporary_key.verkey, - &temporary_key, - &HashMap::new(), - ) - .await?; - - self.wallet_service - .add_indy_object( - wallet_handle, - &my_temporary_did.did.0, - &my_temporary_did, - &HashMap::new(), - ) - .await?; - - let res = Ok(my_temporary_did.verkey); - trace!("replace_keys_start < {:?}", res); - res - } - - /// Apply temporary keys as main for an existing DID (owned by the caller of the library). - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// did: DID stored in the wallet - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn replace_keys_apply( - &self, - wallet_handle: WalletHandle, - my_did: DidValue, - ) -> IndyResult<()> { - trace!( - "replace_keys_apply > wallet_handle {:?} my_did {:?}", - wallet_handle, - my_did - ); - - self.crypto_service.validate_did(&my_did)?; - - let my_did = self._wallet_get_my_did(wallet_handle, &my_did).await?; - - let my_temporary_did: TemporaryDid = self - .wallet_service - .get_indy_object(wallet_handle, &my_did.did.0, &RecordOptions::id_value()) - .await?; - - let my_did = Did::from(my_temporary_did); - - self.wallet_service - .update_indy_object(wallet_handle, &my_did.did.0, &my_did) - .await?; - - self.wallet_service - .delete_indy_record::(wallet_handle, &my_did.did.0) - .await?; - - let res = Ok(()); - trace!("replace_keys_apply < {:?}", res); - res - } - - /// Saves their DID for a pairwise connection in a secured Wallet, - /// so that it can be used to verify transaction. - /// Updates DID associated verkey in case DID already exists in the Wallet. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// identity_json: Identity information as json. Example: - /// { - /// "did": string, (required) - /// "verkey": string - /// - optional is case of adding a new DID, and DID is cryptonym: did == verkey, - /// - mandatory in case of updating an existing DID - /// } - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn store_their_did( - &self, - wallet_handle: WalletHandle, - their_did_info: TheirDidInfo, - ) -> IndyResult<()> { - trace!( - "store_their_did > wallet_handle {:?} their_did_info {:?}", - wallet_handle, - their_did_info - ); - - let their_did = self - .crypto_service - .create_their_did(&their_did_info) - .await?; - - self.wallet_service - .upsert_indy_object(wallet_handle, &their_did.did.0, &their_did) - .await?; - - let res = Ok(()); - trace!("store_their_did < {:?}", res); - res - } - - /// Retrieves the information about the giving DID in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to retrieve information. - /// - /// #Returns - /// did_with_meta: { - /// "did": string - DID stored in the wallet, - /// "verkey": string - The DIDs transport key (ver key, key id), - /// "tempVerkey": string - Temporary DIDs transport key (ver key, key id), exist only during - /// the rotation of the keys. After rotation is done, it becomes a - /// new verkey. "metadata": string - The meta information stored with the DID - /// } - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn get_my_did_with_meta( - &self, - wallet_handle: WalletHandle, - my_did: DidValue, - ) -> IndyResult { - trace!( - "get_my_did_with_meta > wallet_handle {:?} my_did {:?}", - wallet_handle, - my_did - ); - - let did = self - .wallet_service - .get_indy_object::(wallet_handle, &my_did.0, &RecordOptions::id_value()) - .await?; - - let metadata = self - .wallet_service - .get_indy_opt_object::( - wallet_handle, - &did.did.0, - &RecordOptions::id_value(), - ) - .await?; - - let temp_verkey = self - .wallet_service - .get_indy_opt_object::( - wallet_handle, - &did.did.0, - &RecordOptions::id_value(), - ) - .await?; - - let did_with_meta = DidWithMeta { - did: did.did, - verkey: did.verkey, - temp_verkey: temp_verkey.map(|tv| tv.verkey), - metadata: metadata.map(|m| m.value), - }; - - let did_with_meta = serde_json::to_string(&did_with_meta) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize DID")?; - - let res = Ok(did_with_meta); - trace!("get_my_did_with_meta < {:?}", res); - res - } - - /// Retrieves the information about all DIDs stored in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// - /// #Returns - /// dids: [{ - /// "did": string - DID stored in the wallet, - /// "verkey": string - The DIDs transport key (ver key, key id)., - /// "metadata": string - The meta information stored with the DID - /// }] - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn list_my_dids_with_meta(&self, wallet_handle: WalletHandle) -> IndyResult { - trace!("list_my_dids_with_meta > wallet_handle {:?}", wallet_handle); - - let mut did_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &SearchOptions::id_value()) - .await?; - - let mut metadata_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &SearchOptions::id_value()) - .await?; - - let mut temporarydid_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &SearchOptions::id_value()) - .await?; - - let mut dids: Vec = Vec::new(); - - let mut metadata_map: HashMap = HashMap::new(); - let mut temporarydid_map: HashMap = HashMap::new(); - - while let Some(record) = metadata_search.fetch_next_record().await? { - let did_id = record.get_id(); - - let tup: DidMetadata = record - .get_value() - .ok_or(err_msg( - IndyErrorKind::InvalidState, - "No value for DID record", - )) - .and_then(|tags_json| { - serde_json::from_str(tags_json).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Did {:?}", did_id), - ) - })?; - - metadata_map.insert(String::from(did_id), tup.value); - } - - while let Some(record) = temporarydid_search.fetch_next_record().await? { - let did_id = record.get_id(); - - let did: TemporaryDid = record - .get_value() - .ok_or(err_msg( - IndyErrorKind::InvalidState, - "No value for DID record", - )) - .and_then(|tags_json| { - serde_json::from_str(tags_json).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Did {:?}", did_id), - ) - })?; - - temporarydid_map.insert(did.did.0, did.verkey); - } - - while let Some(did_record) = did_search.fetch_next_record().await? { - let did_id = did_record.get_id(); - - let did: Did = did_record - .get_value() - .ok_or_else(|| err_msg(IndyErrorKind::InvalidState, "No value for DID record")) - .and_then(|tags_json| { - serde_json::from_str(tags_json).to_indy( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Did {:?}", did_id), - ) - })?; - - let temp_verkey = temporarydid_map.remove(&did.did.0); - let metadata = metadata_map.remove(&did.did.0); - - let did_with_meta = DidWithMeta { - did: did.did, - verkey: did.verkey, - temp_verkey, - metadata, - }; - - dids.push(did_with_meta); - } - - let dids = serde_json::to_string(&dids) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize DIDs list")?; - - let res = Ok(dids); - trace!("list_my_dids_with_meta < {:?}", res); - res - } - - /// Returns ver key (key id) for the given DID. - /// - /// "indy_key_for_local_did" call looks data stored in the local wallet only and skips freshness - /// checking. - /// - /// Note if you want to get fresh data from the ledger you can use "indy_key_for_did" call - /// instead. - /// - /// Note that "indy_create_and_store_my_did" makes similar wallet record as "indy_create_key". - /// As result we can use returned ver key in all generic crypto and messaging functions. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to resolve key. - /// - /// #Returns - /// key - The DIDs ver key (key id). - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn key_for_local_did( - &self, - wallet_handle: WalletHandle, - did: DidValue, - ) -> IndyResult { - trace!( - "key_for_local_did > wallet_handle {:?} did {:?}", - wallet_handle, - did - ); - - self.crypto_service.validate_did(&did)?; - - // Look to my did - let my_did = match self._wallet_get_my_did(wallet_handle, &did).await { - Ok(my_did) => Some(my_did), - Err(err) if err.kind() == IndyErrorKind::WalletItemNotFound => None, - Err(err) => Err(err)?, - }; - - if let Some(my_did) = my_did { - let res = Ok(my_did.verkey); - trace!("key_for_local_did < my {:?}", res); - return res; - } - - // look to their did - let their_did = self._wallet_get_their_did(wallet_handle, &did).await?; - - let res = Ok(their_did.verkey); - trace!("key_for_local_did < {:?}", res); - res - } - - /// Saves/replaces the meta information for the giving DID in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - the DID to store metadata. - /// metadata - the meta information that will be store with the DID. - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn set_did_metadata( - &self, - wallet_handle: WalletHandle, - did: DidValue, - metadata: String, - ) -> IndyResult<()> { - trace!( - "set_did_metadata > wallet_handle {:?} did {:?} metadata {:?}", - wallet_handle, - did, - metadata - ); - - self.crypto_service.validate_did(&did)?; - - let metadata = DidMetadata { value: metadata }; - - self.wallet_service - .upsert_indy_object(wallet_handle, &did.0, &metadata) - .await?; - - let res = Ok(()); - trace!("set_did_metadata < {:?}", res); - res - } - - /// Retrieves the meta information for the giving DID in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to retrieve metadata. - /// - /// #Returns - /// metadata - The meta information stored with the DID; Can be null if no metadata was saved - /// for this DID. - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn get_did_metadata( - &self, - wallet_handle: WalletHandle, - did: DidValue, - ) -> IndyResult { - trace!( - "get_did_metadata > wallet_handle {:?} did {:?}", - wallet_handle, - did - ); - - self.crypto_service.validate_did(&did)?; - - let metadata = self - .wallet_service - .get_indy_object::(wallet_handle, &did.0, &RecordOptions::id_value()) - .await?; - - let res = Ok(metadata.value); - trace!("get_did_metadata < {:?}", res); - res - } - - /// Retrieves abbreviated verkey if it is possible otherwise return full verkey. - /// - /// #Params - - /// did: DID. - /// full_verkey: The DIDs verification key, - /// - /// #Returns - /// verkey: The DIDs verification key in either abbreviated or full form - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn abbreviate_verkey(&self, did: DidValue, verkey: String) -> IndyResult { - trace!("abbreviate_verkey > did {:?} verkey {:?}", did, verkey); - - self.crypto_service.validate_did(&did)?; - self.crypto_service.validate_key(&verkey).await?; - - if !did.is_abbreviatable() { - let res = Ok(verkey); - trace!("abbreviate_verkey < not abbreviatable {:?}", res); - return res; - } - - let did = &did.to_unqualified().0.decode_base58()?; - let dverkey = &verkey.decode_base58()?; - - let (first_part, second_part) = dverkey.split_at(16); - - let res = if first_part.eq(did.as_slice()) { - format!("~{}", second_part.to_base58()) - } else { - verkey - }; - - let res = Ok(res); - trace!("abbreviate_verkey < {:?}", res); - res - } - - async fn _update_dependent_entity_reference( - &self, - wallet_handle: WalletHandle, - id: &str, - new_id: &str, - ) -> IndyResult<()> - where - T: ::serde::Serialize + ::serde::de::DeserializeOwned + Sized, - { - if let Ok(record) = self - .wallet_service - .get_indy_record_value::(wallet_handle, id, "{}") - .await - { - self.wallet_service - .delete_indy_record::(wallet_handle, id) - .await?; - self.wallet_service - .add_indy_record::(wallet_handle, new_id, &record, &HashMap::new()) - .await?; - } - - Ok(()) - } - - async fn _wallet_get_my_did( - &self, - wallet_handle: WalletHandle, - my_did: &DidValue, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &my_did.0, &RecordOptions::id_value()) - .await - } - - async fn _wallet_get_their_did( - &self, - wallet_handle: WalletHandle, - their_did: &DidValue, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &their_did.0, &RecordOptions::id_value()) - .await - } -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/mod.rs b/aries/misc/legacy/libvdrtools/src/controllers/mod.rs deleted file mode 100644 index 960f6d8d9d..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod crypto; -pub(crate) mod did; -mod non_secrets; -mod wallet; - -pub(crate) use crypto::CryptoController; -pub(crate) use did::DidController; -pub(crate) use non_secrets::NonSecretsController; -pub(crate) use wallet::WalletController; diff --git a/aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs b/aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs deleted file mode 100644 index 156bdfadf9..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/non_secrets.rs +++ /dev/null @@ -1,504 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use futures::lock::Mutex; -use indy_api_types::{domain::wallet::Tags, errors::prelude::*, SearchHandle, WalletHandle}; -use indy_utils::next_search_handle; -use indy_wallet::{RecordOptions, SearchOptions, WalletRecord, WalletSearch, WalletService}; - -pub struct NonSecretsController { - wallet_service: Arc, - searches: Mutex>>>, -} - -impl NonSecretsController { - pub(crate) fn new(wallet_service: Arc) -> NonSecretsController { - NonSecretsController { - wallet_service, - searches: Mutex::new(HashMap::new()), - } - } - - /// Create a new non-secret record in the wallet - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// value: the value of record - /// tags_json: (optional) the record tags used for search and storing meta information as json: - /// { - /// "tagName1": , // string tag (will be stored encrypted) - /// "tagName2": , // string tag (will be stored encrypted) - /// "~tagName3": , // string tag (will be stored un-encrypted) - /// "~tagName4": , // string tag (will be stored un-encrypted) - /// } - /// Note that null means no tags - /// If tag name starts with "~" the tag will be stored un-encrypted that will allow - /// usage of this tag in complex search queries (comparison, predicates) - /// Encrypted tags can be searched only for exact matching - // TODO: change to String -> &str - pub async fn add_record( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - value: String, - tags: Option, - ) -> IndyResult<()> { - trace!( - "add_record > wallet_handle {:?} type_ {:?} id {:?} value {:?} tags {:?}", - wallet_handle, - type_, - id, - value, - tags - ); - - self._check_type(&type_)?; - - self.wallet_service - .add_record( - wallet_handle, - &type_, - &id, - &value, - &tags.unwrap_or_default(), - ) - .await?; - - let res = Ok(()); - trace!("add_record < {:?}", res); - res - } - - /// Update a non-secret wallet record value - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// value: the new value of record - pub async fn update_record_value( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - value: String, - ) -> IndyResult<()> { - trace!( - "update_record_value > wallet_handle {:?} type_ {:?} id {:?} value {:?}", - wallet_handle, - type_, - id, - value - ); - - self._check_type(&type_)?; - - self.wallet_service - .update_record_value(wallet_handle, &type_, &id, &value) - .await?; - - let res = Ok(()); - trace!("update_record_value < {:?}", res); - res - } - - /// Update a non-secret wallet record tags - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// tags_json: the record tags used for search and storing meta information as json: - /// { - /// "tagName1": , // string tag (will be stored encrypted) - /// "tagName2": , // string tag (will be stored encrypted) - /// "~tagName3": , // string tag (will be stored un-encrypted) - /// "~tagName4": , // string tag (will be stored un-encrypted) - /// } - /// If tag name starts with "~" the tag will be stored un-encrypted that will allow - /// usage of this tag in complex search queries (comparison, predicates) - /// Encrypted tags can be searched only for exact matching - pub async fn update_record_tags( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - tags: Tags, - ) -> IndyResult<()> { - trace!( - "update_record_tags > wallet_handle {:?} type_ {:?} id {:?} tags {:?}", - wallet_handle, - type_, - id, - tags - ); - - self._check_type(&type_)?; - - self.wallet_service - .update_record_tags(wallet_handle, &type_, &id, &tags) - .await?; - - let res = Ok(()); - trace!("update_record_tags < {:?}", res); - res - } - - /// Add new tags to the wallet record - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// tags_json: the record tags used for search and storing meta information as json: - /// { - /// "tagName1": , // string tag (will be stored encrypted) - /// "tagName2": , // string tag (will be stored encrypted) - /// "~tagName3": , // string tag (will be stored un-encrypted) - /// "~tagName4": , // string tag (will be stored un-encrypted) - /// } - /// If tag name starts with "~" the tag will be stored un-encrypted that will allow - /// usage of this tag in complex search queries (comparison, predicates) - /// Encrypted tags can be searched only for exact matching - /// Note if some from provided tags already assigned to the record than - /// corresponding tags values will be replaced - pub async fn add_record_tags( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - tags: Tags, - ) -> IndyResult<()> { - trace!( - "add_record_tags > wallet_handle {:?} type_ {:?} id {:?} tags {:?}", - wallet_handle, - type_, - id, - tags - ); - - self._check_type(&type_)?; - - self.wallet_service - .add_record_tags(wallet_handle, &type_, &id, &tags) - .await?; - - let res = Ok(()); - trace!("add_record_tags < {:?}", tags); - res - } - - /// Delete tags from the wallet record - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// tag_names_json: the list of tag names to remove from the record as json array: - /// ["tagName1", "tagName2", ...] - pub async fn delete_record_tags( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - tag_names_json: String, - ) -> IndyResult<()> { - trace!( - "delete_record_tags > wallet_handle {:?} type_ {:?} id {:?} tag_names_json {:?}", - wallet_handle, - type_, - id, - tag_names_json - ); - - self._check_type(&type_)?; - - let tag_names: Vec<&str> = serde_json::from_str(&tag_names_json).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot deserialize tag names", - )?; - - self.wallet_service - .delete_record_tags(wallet_handle, &type_, &id, &tag_names) - .await?; - - let res = Ok(()); - trace!("delete_record_tags < {:?}", res); - res - } - - /// Delete an existing wallet record in the wallet - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: record type - /// id: the id of record - pub async fn delete_record( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - ) -> IndyResult<()> { - trace!( - "delete_record > wallet_handle {:?} type_ {:?} id {:?}", - wallet_handle, - type_, - id - ); - - self._check_type(&type_)?; - - self.wallet_service - .delete_record(wallet_handle, &type_, &id) - .await?; - - let res = Ok(()); - trace!("delete_record < {:?}", res); - res - } - - /// Get an wallet record by id - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// id: the id of record - /// options_json: //TODO: FIXME: Think about replacing by bitmask - /// { - /// retrieveType: (optional, false by default) Retrieve record type, - /// retrieveValue: (optional, true by default) Retrieve record value, - /// retrieveTags: (optional, false by default) Retrieve record tags - /// } - /// #Returns - /// wallet record json: - /// { - /// id: "Some id", - /// type: "Some type", // present only if retrieveType set to true - /// value: "Some value", // present only if retrieveValue set to true - /// tags: , // present only if retrieveTags set to true - /// } - pub async fn get_record( - &self, - wallet_handle: WalletHandle, - type_: String, - id: String, - options_json: String, - ) -> IndyResult { - trace!( - "get_record > wallet_handle {:?} type_ {:?} id {:?} options_json {:?}", - wallet_handle, - type_, - id, - options_json - ); - - self._check_type(&type_)?; - - serde_json::from_str::(&options_json).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot deserialize options", - )?; - - let record = self - .wallet_service - .get_record(wallet_handle, &type_, &id, &options_json) - .await?; - - let record = serde_json::to_string(&record).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot serialize WalletRecord", - )?; - - let res = Ok(record); - trace!("get_record < {:?}", res); - res - } - - /// Search for wallet records. - /// - /// Note instead of immediately returning of fetched records - /// this call returns wallet_search_handle that can be used later - /// to fetch records by small batches (with indy_fetch_wallet_search_next_records). - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet) - /// type_: allows to separate different record types collections - /// query_json: MongoDB style query to wallet record tags: - /// { - /// "tagName": "tagValue", - /// $or: { - /// "tagName2": { $regex: 'pattern' }, - /// "tagName3": { $gte: '123' }, - /// }, - /// } - /// options_json: //TODO: FIXME: Think about replacing by bitmask - /// { - /// retrieveRecords: (optional, true by default) If false only "counts" will be calculated, - /// retrieveTotalCount: (optional, false by default) Calculate total count, - /// retrieveType: (optional, false by default) Retrieve record type, - /// retrieveValue: (optional, true by default) Retrieve record value, - /// retrieveTags: (optional, false by default) Retrieve record tags, - /// } - /// #Returns - /// search_handle: Wallet search handle that can be used later - /// to fetch records by small batches (with indy_fetch_wallet_search_next_records) - pub async fn open_search( - &self, - wallet_handle: WalletHandle, - type_: String, - query_json: String, - options_json: String, - ) -> IndyResult { - trace!( - "open_search > wallet_handle {:?} type_ {:?} query_json {:?} options_json {:?}", - wallet_handle, - type_, - query_json, - options_json - ); - - self._check_type(&type_)?; - - serde_json::from_str::(&options_json).to_indy( - IndyErrorKind::InvalidStructure, - "Cannot deserialize options", - )?; - - let search = self - .wallet_service - .search_records(wallet_handle, &type_, &query_json, &options_json) - .await?; - - let search_handle = next_search_handle(); - - self.searches - .lock() - .await - .insert(search_handle, Arc::new(Mutex::new(search))); - - let res = Ok(search_handle); - trace!("open_search < {:?}", search_handle); - res - } - - /// Fetch next records for wallet search. - /// - /// Not if there are no records this call returns WalletNoRecords error. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet) - /// wallet_search_handle: wallet search handle (created by indy_open_wallet_search) - /// count: Count of records to fetch - /// - /// #Returns - /// wallet records json: - /// { - /// totalCount: , // present only if retrieveTotalCount set to true - /// records: [{ // present only if retrieveRecords set to true - /// id: "Some id", - /// type: "Some type", // present only if retrieveType set to true - /// value: "Some value", // present only if retrieveValue set to true - /// tags: , // present only if retrieveTags set to true - /// }], - /// } - pub async fn fetch_search_next_records( - &self, - wallet_handle: WalletHandle, - wallet_search_handle: SearchHandle, - count: usize, - ) -> IndyResult { - trace!( - "fetch_search_next_records > wallet_handle {:?} wallet_search_handle {:?} count {:?}", - wallet_handle, - wallet_search_handle, - count - ); - - let search_mut = { - self.searches - .lock() - .await - .get(&wallet_search_handle) - .ok_or_else(|| { - err_msg(IndyErrorKind::InvalidWalletHandle, "Unknown search handle") - })? - .clone() - }; - - let mut search = search_mut.lock().await; - - let mut records: Vec = Vec::new(); - - for _ in 0..count { - match search.fetch_next_record().await? { - Some(record) => records.push(record), - None => break, - } - } - - let search_result = SearchRecords { - total_count: search.get_total_count()?, - records: if records.is_empty() { - None - } else { - Some(records) - }, - }; - - let search_result = serde_json::to_string(&search_result).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize SearchRecords", - )?; - - let res = Ok(search_result); - trace!("fetch_search_next_records < {:?}", res); - res - } - - /// Close wallet search (make search handle invalid) - /// - /// #Params - /// wallet_search_handle: wallet search handle - pub async fn close_search(&self, wallet_search_handle: SearchHandle) -> IndyResult<()> { - trace!( - "close_search > wallet_search_handle {:?}", - wallet_search_handle - ); - - self.searches - .lock() - .await - .remove(&wallet_search_handle) - .ok_or_else(|| err_msg(IndyErrorKind::InvalidWalletHandle, "Unknown search handle"))?; - - let res = Ok(()); - trace!("close_search < {:?}", res); - res - } - - fn _check_type(&self, _type: &str) -> IndyResult<()> { - // if type_.starts_with(WalletService::PREFIX) { - // Err(err_msg( - // IndyErrorKind::WalletAccessFailed, - // format!("Record of type \"{}\" is not available for fetching", type_), - // ))?; - // } - - Ok(()) - } -} - -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct SearchRecords { - pub total_count: Option, - pub records: Option>, -} diff --git a/aries/misc/legacy/libvdrtools/src/controllers/wallet.rs b/aries/misc/legacy/libvdrtools/src/controllers/wallet.rs deleted file mode 100644 index bc4eaf37ef..0000000000 --- a/aries/misc/legacy/libvdrtools/src/controllers/wallet.rs +++ /dev/null @@ -1,436 +0,0 @@ -use std::sync::Arc; - -// use async_std::task::spawn_blocking; -use indy_api_types::{ - domain::wallet::{Config, Credentials, ExportConfig, KeyConfig}, - errors::prelude::*, - WalletHandle, -}; -use indy_utils::crypto::{ - chacha20poly1305_ietf, chacha20poly1305_ietf::Key as MasterKey, randombytes, -}; -use indy_wallet::{iterator::WalletIterator, KeyDerivationData, WalletService}; - -use crate::{services::CryptoService, utils::crypto::base58::ToBase58}; - -pub struct WalletController { - wallet_service: Arc, - crypto_service: Arc, -} - -impl WalletController { - pub(crate) fn new( - wallet_service: Arc, - crypto_service: Arc, - ) -> WalletController { - WalletController { - wallet_service, - crypto_service, - } - } - - /// Create a new secure wallet. - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage - /// call. "storage_config": optional, Storage configuration json. Storage type - /// defines set of supported keys. Can be optional if storage supports - /// default configuration. For 'default' storage type configuration is: - /// { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "storage_credentials": optional Credentials for wallet - /// storage. Storage type defines set of supported keys. Can be - /// optional if storage supports default configuration. For - /// 'default' storage type should be empty. "key_derivation_method": optional - /// Algorithm to use for wallet key derivation: ARGON2I_MOD - - /// derive secured wallet master key (used by default) ARGON2I_INT - /// - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call } - /// - /// #Returns - /// err: Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn create(&self, config: Config, credentials: Credentials) -> IndyResult<()> { - trace!( - "_create > config: {:?} credentials: {:?}", - &config, - secret!(&credentials) - ); - - let key_data = KeyDerivationData::from_passphrase_with_new_salt( - &credentials.key, - &credentials.key_derivation_method, - ); - - let key = Self::_derive_key(&key_data).await?; - - let res = self - .wallet_service - .create_wallet(&config, &credentials, (&key_data, &key)) - .await; - - trace!("create < {:?}", res); - res - } - - /// Open the wallet. - /// - /// Wallet must be previously created with indy_create_wallet method. - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with - /// indy_register_wallet_storage call. "storage_config": optional, Storage - /// configuration json. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. For - /// 'default' storage type configuration is: { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// "cache": optional, Cache configuration json. If omitted the cache is disabled - /// (default). { - /// "size": optional, Number of items in cache, - /// "entities": List, Types of items being cached. eg. ["vdrtools::Did", - /// "vdrtools::Key"] "algorithm" optional, cache algorithm, defaults to - /// lru, which is the only one supported for now. } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "rekey": optional, If present than wallet master key - /// will be rotated to a new one. "storage_credentials": optional Credentials - /// for wallet storage. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type should be empty. "key_derivation_method": - /// optional Algorithm to use for wallet key derivation: - /// ARGON2I_MOD - derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// "rekey_derivation_method": optional Algorithm to use for wallet rekey - /// derivation: ARGON2I_MOD - derive secured wallet master rekey - /// (used by default) ARGON2I_INT - derive secured wallet master - /// rekey (less secured but faster) RAW - raw wallet rekey master - /// provided (skip derivation). RAW keys can be generated - /// with indy_generate_wallet_key call } - /// - /// #Returns - /// err: Error code - /// handle: Handle to opened wallet to use in methods that require wallet access. - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn open(&self, config: Config, credentials: Credentials) -> IndyResult { - trace!( - "open > config: {:?} credentials: {:?}", - &config, - secret!(&credentials) - ); - // TODO: try to refactor to avoid usage of continue methods - - let (wallet_handle, key_derivation_data, rekey_data) = self - .wallet_service - .open_wallet_prepare(&config, &credentials) - .await?; - - let key = Self::_derive_key(&key_derivation_data).await?; - - let rekey = if let Some(rekey_data) = rekey_data { - Some(Self::_derive_key(&rekey_data).await?) - } else { - None - }; - - let res = self - .wallet_service - .open_wallet_continue(wallet_handle, (&key, rekey.as_ref()), config.cache) - .await; - - trace!("open < res: {:?}", res); - - res - } - - /// Closes opened wallet and frees allocated resources. - /// - /// #Params - /// wallet_handle: wallet handle returned by indy_open_wallet. - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn close(&self, wallet_handle: WalletHandle) -> IndyResult<()> { - trace!("close > handle: {:?}", wallet_handle); - - self.wallet_service.close_wallet(wallet_handle).await?; - - trace!("close < res: ()"); - Ok(()) - } - - /// Deletes created wallet. - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage - /// call. "storage_config": optional, Storage configuration json. Storage type - /// defines set of supported keys. Can be optional if storage supports - /// default configuration. For 'default' storage type configuration is: - /// { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "storage_credentials": optional Credentials for wallet - /// storage. Storage type defines set of supported keys. Can be - /// optional if storage supports default configuration. For - /// 'default' storage type should be empty. "key_derivation_method": optional - /// Algorithm to use for wallet key derivation: ARGON2I_MOD - - /// derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call } - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn delete(&self, config: Config, credentials: Credentials) -> IndyResult<()> { - trace!( - "delete > config: {:?} credentials: {:?}", - &config, - secret!(&credentials) - ); - // TODO: try to refactor to avoid usage of continue methods - - let (metadata, key_derivation_data) = self - .wallet_service - .delete_wallet_prepare(&config, &credentials) - .await?; - - let key = Self::_derive_key(&key_derivation_data).await?; - - let res = self - .wallet_service - .delete_wallet_continue(&config, &credentials, &metadata, &key) - .await; - - trace!("delete < {:?}", res); - res - } - - /// Exports opened wallet - /// - /// #Params: - /// wallet_handle: wallet handle returned by indy_open_wallet - /// export_config: JSON containing settings for input operation. - /// { - /// "path": , Path of the file that contains exported wallet content - /// "key": , Key or passphrase used for wallet export key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "key_derivation_method": optional Algorithm to use for - /// wallet export key derivation: ARGON2I_MOD - derive secured - /// export key (used by default) ARGON2I_INT - derive secured - /// export key (less secured but faster) RAW - raw export key - /// provided (skip derivation). RAW keys can be generated - /// with indy_generate_wallet_key call } - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn export( - &self, - wallet_handle: WalletHandle, - export_config: ExportConfig, - ) -> IndyResult<()> { - trace!( - "export > handle: {:?} export_config: {:?}", - wallet_handle, - secret!(&export_config) - ); - - let key_data = KeyDerivationData::from_passphrase_with_new_salt( - &export_config.key, - &export_config.key_derivation_method, - ); - - let key = Self::_derive_key(&key_data).await?; - - let res = self - .wallet_service - .export_wallet(wallet_handle, &export_config, 0, (&key_data, &key)) - .await; - - trace!("export < {:?}", res); - res - } - - /// Creates a new secure wallet and then imports its content - /// according to fields provided in import_config - /// This can be seen as an indy_create_wallet call with additional content import - /// - /// #Params - /// config: Wallet configuration json. - /// { - /// "id": string, Identifier of the wallet. - /// Configured storage uses this identifier to lookup exact wallet data placement. - /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. - /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage - /// call. "storage_config": optional, Storage configuration json. Storage type - /// defines set of supported keys. Can be optional if storage supports - /// default configuration. For 'default' storage type configuration is: - /// { - /// "path": optional, Path to the directory with wallet files. - /// Defaults to $HOME/.indy_client/wallet. - /// Wallet will be stored in the file {path}/{id}/sqlite.db - /// } - /// } - /// credentials: Wallet credentials json - /// { - /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key - /// derivation methods. "storage_credentials": optional Credentials for wallet - /// storage. Storage type defines set of supported keys. Can be - /// optional if storage supports default configuration. For - /// 'default' storage type should be empty. "key_derivation_method": optional - /// Algorithm to use for wallet key derivation: ARGON2I_MOD - - /// derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call } - /// import_config: Import settings json. - /// { - /// "path": , path of the file that contains exported wallet content - /// "key": , key used for export of the wallet - /// } - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn import( - &self, - config: Config, - credentials: Credentials, - import_config: ExportConfig, - ) -> IndyResult<()> { - trace!( - "import > config: {:?} credentials: {:?} import_config: {:?}", - &config, - secret!(&credentials), - secret!(&import_config) - ); - // TODO: try to refactor to avoid usage of continue methods - - let (wallet_handle, key_data, import_key_data) = self - .wallet_service - .import_wallet_prepare(&config, &credentials, &import_config) - .await?; - - let import_key = Self::_derive_key(&import_key_data).await?; - let key = Self::_derive_key(&key_data).await?; - - let res = self - .wallet_service - .import_wallet_continue(wallet_handle, &config, &credentials, (import_key, key)) - .await; - - trace!("import < {:?}", res); - - res - } - - pub async fn get_all(&self, handle: WalletHandle) -> IndyResult { - self.wallet_service.get_all(handle).await - } - - /// Generate wallet master key. - /// Returned key is compatible with "RAW" key derivation method. - /// It allows to avoid expensive key derivation for use cases when wallet keys can be stored in - /// a secure enclave. - /// - /// #Params - /// config: (optional) key configuration json. - /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set random - /// one will be created). Can be UTF-8, base64 or hex string. - /// } - /// - /// #Returns - /// err: Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub fn generate_key(&self, config: Option) -> IndyResult { - trace!("generate_key > config: {:?}", secret!(&config)); - - let seed = config.as_ref().and_then(|config| config.seed.as_deref()); - - let key = match self.crypto_service.convert_seed(seed)? { - Some(seed) => randombytes::randombytes_deterministic( - chacha20poly1305_ietf::KEYBYTES, - &randombytes::Seed::from_slice(&seed[..])?, - ), - None => randombytes::randombytes(chacha20poly1305_ietf::KEYBYTES), - }; - - let res = key[..].to_base58(); - - trace!("generate_key < res: {:?}", res); - Ok(res) - } - - async fn _derive_key(key_data: &KeyDerivationData) -> IndyResult { - key_data.calc_master_key() - // let res = spawn_blocking(move || key_data.calc_master_key()).await?; - // Ok(res) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs deleted file mode 100644 index 9080a82f24..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::collections::HashMap; - -use ursa::cl::{CredentialSignature, RevocationRegistry, SignatureCorrectnessProof, Witness}; - -use super::{ - credential_definition::CredentialDefinitionId, - revocation_registry_definition::RevocationRegistryId, schema::SchemaId, -}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct Credential { - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub rev_reg_id: Option, - pub values: CredentialValues, - pub signature: CredentialSignature, - pub signature_correctness_proof: SignatureCorrectnessProof, - pub rev_reg: Option, - pub witness: Option, -} - -impl Credential { - pub const QUALIFIABLE_TAGS: [&'static str; 5] = [ - "issuer_did", - "cred_def_id", - "schema_id", - "schema_issuer_did", - "rev_reg_id", - ]; - pub const EXTRA_TAG_SUFFIX: &'static str = "_short"; - - pub fn add_extra_tag_suffix(tag: &str) -> String { - format!("{}{}", tag, Self::EXTRA_TAG_SUFFIX) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub struct CredentialInfo { - pub referent: String, - pub attrs: ShortCredentialValues, - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub rev_reg_id: Option, - pub cred_rev_id: Option, -} - -pub type ShortCredentialValues = HashMap; - -#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] -pub struct CredentialValues(pub HashMap); - -#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] -pub struct AttributeValues { - pub raw: String, - pub encoded: String, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs deleted file mode 100644 index 8a765e782e..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_definition.rs +++ /dev/null @@ -1,437 +0,0 @@ -use std::collections::HashMap; - -use indy_api_types::{ - errors::{IndyErrorKind, IndyResult}, - IndyError, -}; -use ursa::cl::{ - CredentialKeyCorrectnessProof, CredentialPrimaryPublicKey, CredentialPrivateKey, - CredentialRevocationPublicKey, -}; - -use super::{ - super::{ - anoncreds::{schema::SchemaId, DELIMITER}, - crypto::did::DidValue, - }, - indy_identifiers, -}; -use crate::utils::qualifier; - -pub const CL_SIGNATURE_TYPE: &str = "CL"; - -#[derive(Deserialize, Debug, Serialize, PartialEq, Clone)] -pub enum SignatureType { - CL, -} - -impl SignatureType { - pub fn to_str(&self) -> &'static str { - match *self { - SignatureType::CL => CL_SIGNATURE_TYPE, - } - } -} - -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct CredentialDefinitionConfig { - #[serde(default)] - pub support_revocation: bool, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialDefinitionData { - pub primary: CredentialPrimaryPublicKey, - #[serde(skip_serializing_if = "Option::is_none")] - pub revocation: Option, -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CredentialDefinitionV1 { - pub id: CredentialDefinitionId, - pub schema_id: SchemaId, - #[serde(rename = "type")] - pub signature_type: SignatureType, - pub tag: String, - pub value: CredentialDefinitionData, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum CredentialDefinition { - #[serde(rename = "1.0")] - CredentialDefinitionV1(CredentialDefinitionV1), -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct TemporaryCredentialDefinition { - pub cred_def: CredentialDefinition, - pub cred_def_priv_key: CredentialDefinitionPrivateKey, - pub cred_def_correctness_proof: CredentialDefinitionCorrectnessProof, -} - -impl CredentialDefinition { - pub fn to_unqualified(self) -> CredentialDefinition { - match self { - CredentialDefinition::CredentialDefinitionV1(cred_def) => { - CredentialDefinition::CredentialDefinitionV1(CredentialDefinitionV1 { - id: cred_def.id.to_unqualified(), - schema_id: cred_def.schema_id.to_unqualified(), - signature_type: cred_def.signature_type, - tag: cred_def.tag, - value: cred_def.value, - }) - } - } - } -} - -impl From for CredentialDefinitionV1 { - fn from(cred_def: CredentialDefinition) -> Self { - match cred_def { - CredentialDefinition::CredentialDefinitionV1(cred_def) => cred_def, - } - } -} - -pub type CredentialDefinitions = HashMap; - -pub fn cred_defs_map_to_cred_defs_v1_map( - cred_defs: CredentialDefinitions, -) -> HashMap { - cred_defs - .into_iter() - .map(|(cred_def_id, cred_def)| (cred_def_id, CredentialDefinitionV1::from(cred_def))) - .collect() -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialDefinitionPrivateKey { - pub value: CredentialPrivateKey, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialDefinitionCorrectnessProof { - pub value: CredentialKeyCorrectnessProof, -} - -qualifiable_type!(CredentialDefinitionId); - -impl CredentialDefinitionId { - pub const PREFIX: &'static str = "/anoncreds/v0/CLAIM_DEF/"; - pub const MARKER: &'static str = "3"; - - pub fn new( - did: &DidValue, - schema_id: &SchemaId, - signature_type: &str, - tag: &str, - ) -> IndyResult { - match did.get_method() { - Some(method) if method.starts_with("indy") => Ok(CredentialDefinitionId(format!( - "{}{}{}/{}", - did.0, - Self::PREFIX, - &schema_id.0, - tag - ))), - Some(_method) => Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unsupported DID method", - )), - None => { - let tag = if tag.is_empty() { - "".to_owned() - } else { - format!("{}{}", DELIMITER, tag) - }; - - let id = CredentialDefinitionId(format!( - "{}{}{}{}{}{}{}{}", - did.0, - DELIMITER, - Self::MARKER, - DELIMITER, - signature_type, - DELIMITER, - schema_id.0, - tag - )); - - Ok(id) - } - } - } - - pub fn parts(&self) -> Option<(DidValue, String, SchemaId, String)> { - trace!("CredentialDefinitionId::parts >> self.0 {}", self.0); - if let Some((did, seq_no, tag)) = - indy_identifiers::try_parse_indy_creddef_id(self.0.as_str()) - { - trace!("{:?} {:?} {:?}", did, seq_no, tag); - return Some(( - DidValue(did), - CL_SIGNATURE_TYPE.to_owned(), - SchemaId(seq_no), - tag, - )); - } - - let parts = self.0.split_terminator(DELIMITER).collect::>(); - - if parts.len() == 4 { - // Th7MpTaRZVRYnPiabds81Y:3:CL:1 - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3].to_string(); - let tag = String::new(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 5 { - // Th7MpTaRZVRYnPiabds81Y:3:CL:1:tag - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3].to_string(); - let tag = parts[4].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 7 { - // NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0 - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3..7].join(DELIMITER); - let tag = String::new(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 8 { - // NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag - let did = parts[0].to_string(); - let signature_type = parts[2].to_string(); - let schema_id = parts[3..7].join(DELIMITER); - let tag = parts[7].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 9 { - // creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:3:tag - warn!("Deprecated format of FQ CredDef ID is used (creddef: suffix)"); - let did = parts[2..5].join(DELIMITER); - let signature_type = parts[6].to_string(); - let schema_id = parts[7].to_string(); - let tag = parts[8].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - if parts.len() == 16 { - // creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov: - // NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag - warn!("Deprecated format of FQ CredDef ID is used (creddef: suffix)"); - let did = parts[2..5].join(DELIMITER); - let signature_type = parts[6].to_string(); - let schema_id = parts[7..15].join(DELIMITER); - let tag = parts[15].to_string(); - return Some((DidValue(did), signature_type, SchemaId(schema_id), tag)); - } - - None - } - - pub fn issuer_did(&self) -> Option { - self.parts().map(|(did, _, _, _)| did) - } - - pub fn qualify(&self, method: &str) -> IndyResult { - match self.parts() { - Some((did, signature_type, schema_id, tag)) => CredentialDefinitionId::new( - &did.qualify(method), - &schema_id.qualify(method)?, - &signature_type, - &tag, - ), - None => Ok(self.clone()), - } - } - - pub fn to_unqualified(&self) -> CredentialDefinitionId { - match self.parts() { - Some((did, signature_type, schema_id, tag)) => CredentialDefinitionId::new( - &did.to_unqualified(), - &schema_id.to_unqualified(), - &signature_type, - &tag, - ) - .expect("Can't create unqualified CredentialDefinitionId"), - None => self.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _did() -> DidValue { - DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _signature_type() -> String { - "CL".to_string() - } - - fn _tag() -> String { - "tag".to_string() - } - - fn _did_qualified() -> DidValue { - DidValue("did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _schema_id_seq_no() -> SchemaId { - SchemaId("1".to_string()) - } - - fn _schema_id_unqualified() -> SchemaId { - SchemaId("NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0".to_string()) - } - - fn _schema_id_qualified() -> SchemaId { - SchemaId( - "did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/SCHEMA/gvt/1.0".to_string(), - ) - } - - fn _cred_def_id_unqualified() -> CredentialDefinitionId { - CredentialDefinitionId( - "NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag".to_string(), - ) - } - - fn _cred_def_id_unqualified_with_schema_as_seq_no() -> CredentialDefinitionId { - CredentialDefinitionId("NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag".to_string()) - } - - fn _cred_def_id_unqualified_with_schema_as_seq_no_without_tag() -> CredentialDefinitionId { - CredentialDefinitionId("NcYxiDXkpYi6ov5FcYDi1e:3:CL:1".to_string()) - } - - fn _cred_def_id_unqualified_without_tag() -> CredentialDefinitionId { - CredentialDefinitionId( - "NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0".to_string(), - ) - } - - fn _cred_def_id_qualified_with_schema_as_seq_no() -> CredentialDefinitionId { - CredentialDefinitionId( - "did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag" - .to_string(), - ) - } - - mod to_unqualified { - use super::*; - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified() { - assert_eq!( - _cred_def_id_unqualified(), - _cred_def_id_unqualified().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag() { - assert_eq!( - _cred_def_id_unqualified_without_tag(), - _cred_def_id_unqualified_without_tag().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag_with_schema_as_seq_no() { - assert_eq!( - _cred_def_id_unqualified_with_schema_as_seq_no(), - _cred_def_id_unqualified_with_schema_as_seq_no().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag_with_schema_as_seq_no_without_tag( - ) { - assert_eq!( - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag(), - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag().to_unqualified() - ); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_qualified_with_schema_as_seq_no() { - assert_eq!( - _cred_def_id_unqualified_with_schema_as_seq_no(), - _cred_def_id_qualified_with_schema_as_seq_no().to_unqualified() - ); - } - } - - mod parts { - use super::*; - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified() { - let (did, signature_type, schema_id, tag) = _cred_def_id_unqualified().parts().unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_unqualified(), schema_id); - assert_eq!(_tag(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_without_tag() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_unqualified_without_tag().parts().unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_unqualified(), schema_id); - assert_eq!(String::new(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_with_schema_as_seq() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_unqualified_with_schema_as_seq_no() - .parts() - .unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_seq_no(), schema_id); - assert_eq!(_tag(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_unqualified_with_schema_as_seq_without_tag() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag() - .parts() - .unwrap(); - assert_eq!(_did(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_seq_no(), schema_id); - assert_eq!(String::new(), tag); - } - - #[test] - fn test_cred_def_id_parts_for_id_as_qualified_with_schema_as_seq() { - let (did, signature_type, schema_id, tag) = - _cred_def_id_qualified_with_schema_as_seq_no() - .parts() - .unwrap(); - assert_eq!(_did_qualified(), did); - assert_eq!(_signature_type(), signature_type); - assert_eq!(_schema_id_seq_no(), schema_id); - assert_eq!(_tag(), tag); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs deleted file mode 100644 index d980b5d52c..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_offer.rs +++ /dev/null @@ -1,30 +0,0 @@ -use ursa::cl::{CredentialKeyCorrectnessProof, Nonce}; - -use super::{credential_definition::CredentialDefinitionId, schema::SchemaId}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct CredentialOffer { - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub key_correctness_proof: CredentialKeyCorrectnessProof, - pub nonce: Nonce, - #[serde(skip_serializing_if = "Option::is_none")] - pub method_name: Option, -} - -impl CredentialOffer { - pub fn to_unqualified(self) -> CredentialOffer { - let method_name = if self.cred_def_id.is_fully_qualified() { - self.cred_def_id.get_method() - } else { - None - }; - CredentialOffer { - method_name, - schema_id: self.schema_id.to_unqualified(), - cred_def_id: self.cred_def_id.to_unqualified(), - key_correctness_proof: self.key_correctness_proof, - nonce: self.nonce, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs deleted file mode 100644 index a85416094d..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/credential_request.rs +++ /dev/null @@ -1,34 +0,0 @@ -use ursa::cl::{ - BlindedCredentialSecrets, BlindedCredentialSecretsCorrectnessProof, - CredentialSecretsBlindingFactors, Nonce, -}; - -use super::{super::crypto::did::DidValue, credential_definition::CredentialDefinitionId}; - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialRequest { - pub prover_did: DidValue, - pub cred_def_id: CredentialDefinitionId, - pub blinded_ms: BlindedCredentialSecrets, - pub blinded_ms_correctness_proof: BlindedCredentialSecretsCorrectnessProof, - pub nonce: Nonce, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct CredentialRequestMetadata { - pub master_secret_blinding_data: CredentialSecretsBlindingFactors, - pub nonce: Nonce, - pub master_secret_name: String, -} - -impl CredentialRequest { - pub fn to_unqualified(self) -> CredentialRequest { - CredentialRequest { - prover_did: self.prover_did.to_unqualified(), - cred_def_id: self.cred_def_id.to_unqualified(), - blinded_ms: self.blinded_ms, - blinded_ms_correctness_proof: self.blinded_ms_correctness_proof, - nonce: self.nonce, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs deleted file mode 100644 index 9b5fcba751..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/indy_identifiers.rs +++ /dev/null @@ -1,155 +0,0 @@ -use lazy_static::lazy_static; -use regex::Regex; - -use super::{ - super::crypto::did::DidValue, credential_definition::CredentialDefinitionId, - revocation_registry_definition::CL_ACCUM, schema::SchemaId, -}; - -const NAMESPACE_RE: &str = r"[a-z][a-z0-9_:-]*"; -const DID_RE: &str = r"[1-9A-HJ-NP-Za-km-z]*"; //base58 -const SCHEMA_TYPE: &str = super::schema::SchemaId::PREFIX; -const SCHEMA_NAME_RE: &str = r"[^/]*"; -const SCHEMA_VER_RE: &str = r"[^/]*"; -const SCHEMA_SEQ_NO_RE: &str = r"[0-9]*"; - -lazy_static! { - static ref SCHEMA_RE: String = format!( - "(did:indy(:{NAMESPACE_RE})?:{DID_RE}){SCHEMA_TYPE}({SCHEMA_NAME_RE})/({SCHEMA_VER_RE})" - ); - static ref SCHEMA_REF_RE: String = format!("({SCHEMA_SEQ_NO_RE}|{})", *SCHEMA_RE); -} -const CREDDEF_TYPE: &str = super::credential_definition::CredentialDefinitionId::PREFIX; -const CREDDEF_TAG_RE: &str = r".*"; - -pub fn try_parse_indy_schema_id(id: &str) -> Option<(String, String, String)> { - let id_re = format!("^{}$", *SCHEMA_RE); - let id_re = Regex::new(id_re.as_str()).unwrap(); - if let Some(captures) = id_re.captures(id) { - trace!("try_parse_indy_schema_id: captures {:?}", captures); - if let (Some(did), Some(name), Some(ver)) = - (captures.get(1), captures.get(3), captures.get(4)) - { - return Some(( - did.as_str().to_owned(), - name.as_str().to_owned(), - ver.as_str().to_owned(), - )); - } - } - None -} - -pub fn try_parse_indy_creddef_id(id: &str) -> Option<(String, String, String)> { - let schema_ref_re = &*SCHEMA_REF_RE; - let id_re = format!( - "^(did:indy(:{NAMESPACE_RE})?:{DID_RE}){CREDDEF_TYPE}({schema_ref_re})/({CREDDEF_TAG_RE})$" - ); - let id_re = Regex::new(id_re.as_str()).unwrap(); - - if let Some(captures) = id_re.captures(id) { - trace!("try_parse_indy_creddef_id: captures {:?}", captures); - if let (Some(did), Some(seq_no), Some(tag)) = - (captures.get(1), captures.get(3), captures.get(9)) - { - return Some(( - did.as_str().to_owned(), - seq_no.as_str().to_owned(), - tag.as_str().to_owned(), - )); - } - } - - None -} - -pub fn try_parse_indy_rev_reg( - id: &str, -) -> Option<(DidValue, CredentialDefinitionId, String, String)> { - let creddef_name_re = r"[^/]*"; - let tag_re = r"[^/]*"; - let schema_ref_re = &*SCHEMA_REF_RE; - let id_re = format!( - "^(did:indy(:{NAMESPACE_RE})?:{DID_RE})/anoncreds/v0/REV_REG_DEF/{schema_ref_re}/\ - ({creddef_name_re})/({tag_re})$" - ); - let id_re = Regex::new(id_re.as_str()).unwrap(); - - if let Some(captures) = id_re.captures(id) { - trace!("try_parse_indy_rev_reg: captures {:?}", captures); - if let (Some(did), Some(schema_id), Some(creddef_name), Some(tag)) = ( - captures.get(1), - captures.get(3), - captures.get(8), - captures.get(9), - ) { - let did = DidValue(did.as_str().to_owned()); - let schema_id = SchemaId(schema_id.as_str().to_owned()); - let creddef_id = CredentialDefinitionId::new( - &did, - &schema_id, - super::credential_definition::CL_SIGNATURE_TYPE, - creddef_name.as_str(), - ) - .ok()?; - return Some(( - did, - creddef_id, - CL_ACCUM.to_owned(), - tag.as_str().to_owned(), - )); - } - } - - None -} - -#[test] -fn test_try_parse_valid_indy_creddefid_works() { - let (did, schema_seq_no, tag) = - try_parse_indy_creddef_id("did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag") - .unwrap(); - assert_eq!(did, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!(schema_seq_no, "1".to_owned()); - assert_eq!(tag, "tag".to_owned()); - - let (did, schema_ref, tag) = try_parse_indy_creddef_id( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ - anoncreds/v0/SCHEMA/gvt/1.0/tag", - ) - .unwrap(); - assert_eq!(did, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!( - schema_ref, - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0".to_owned() - ); - assert_eq!(tag, "tag".to_owned()); -} - -#[test] -fn test_try_parse_valid_indy_revreg_works() { - let (did, creddef, _, tag) = try_parse_indy_rev_reg( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/1/creddef_name/TAG1", - ) - .unwrap(); - assert_eq!(did.0, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!( - creddef.0, - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/creddef_name".to_owned() - ); - assert_eq!(tag, "TAG1".to_owned()); - - let (did, creddef, _, tag) = try_parse_indy_rev_reg( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ - anoncreds/v0/SCHEMA/gvt/1.0/creddef_name/TAG1", - ) - .unwrap(); - assert_eq!(did.0, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!( - creddef.0, - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ - anoncreds/v0/SCHEMA/gvt/1.0/creddef_name" - .to_owned() - ); - assert_eq!(tag, "TAG1".to_owned()); -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs deleted file mode 100644 index 0b6b30c9c4..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/master_secret.rs +++ /dev/null @@ -1,6 +0,0 @@ -use ursa::cl::MasterSecret as CryptoMasterSecret; - -#[derive(Debug, Deserialize, Serialize)] -pub struct MasterSecret { - pub value: CryptoMasterSecret, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs deleted file mode 100644 index 77fd6297b3..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/mod.rs +++ /dev/null @@ -1,15 +0,0 @@ -pub mod credential; -pub mod credential_definition; -pub mod credential_offer; -pub mod credential_request; -pub mod indy_identifiers; -pub mod master_secret; -pub mod proof; -pub mod proof_request; -pub mod requested_credential; -pub mod revocation_registry; -pub mod revocation_registry_definition; -pub mod revocation_registry_delta; -pub mod schema; - -pub const DELIMITER: &str = ":"; diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs deleted file mode 100644 index f66e0cce0a..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::collections::HashMap; - -use ursa::cl::Proof as CryptoProof; - -use super::{ - credential_definition::CredentialDefinitionId, - revocation_registry_definition::RevocationRegistryId, schema::SchemaId, -}; - -#[derive(Debug, Serialize, Deserialize)] -pub struct Proof { - pub proof: CryptoProof, - pub requested_proof: RequestedProof, - pub identifiers: Vec, -} - -#[derive(Debug, Serialize, Deserialize, Default)] -pub struct RequestedProof { - pub revealed_attrs: HashMap, - #[serde(skip_serializing_if = "HashMap::is_empty")] - #[serde(default)] - pub revealed_attr_groups: HashMap, - #[serde(default)] - pub self_attested_attrs: HashMap, - #[serde(default)] - pub unrevealed_attrs: HashMap, - #[serde(default)] - pub predicates: HashMap, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct SubProofReferent { - pub sub_proof_index: u32, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct RevealedAttributeInfo { - pub sub_proof_index: u32, - pub raw: String, - pub encoded: String, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct RevealedAttributeGroupInfo { - pub sub_proof_index: u32, - pub values: HashMap, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct AttributeValue { - pub raw: String, - pub encoded: String, -} - -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Hash)] -pub struct Identifier { - pub schema_id: SchemaId, - pub cred_def_id: CredentialDefinitionId, - pub rev_reg_id: Option, - pub timestamp: Option, -} - -#[cfg(test)] -mod tests { - use serde_json::json; - - use super::*; - - #[test] - fn deserialize_requested_proof_with_empty_revealed_attr_groups() { - let mut req_proof_old: RequestedProof = Default::default(); - req_proof_old.revealed_attrs.insert( - "attr1".to_string(), - RevealedAttributeInfo { - sub_proof_index: 0, - raw: "123".to_string(), - encoded: "123".to_string(), - }, - ); - let json = json!(req_proof_old).to_string(); - debug!("{}", json); - - let req_proof: RequestedProof = serde_json::from_str(&json).unwrap(); - assert!(req_proof.revealed_attr_groups.is_empty()) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs deleted file mode 100644 index f3b28ad616..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/proof_request.rs +++ /dev/null @@ -1,476 +0,0 @@ -use std::{collections::HashMap, fmt}; - -use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer}; -use serde_json::{json, Value}; -use ursa::cl::Nonce; - -use super::{ - super::crypto::did::DidValue, credential::Credential, - credential_definition::CredentialDefinitionId, - revocation_registry_definition::RevocationRegistryId, schema::SchemaId, -}; -use crate::utils::{qualifier, wql::Query}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct ProofRequestPayload { - pub nonce: Nonce, - pub name: String, - pub version: String, - #[serde(default)] - pub requested_attributes: HashMap, - #[serde(default)] - pub requested_predicates: HashMap, - pub non_revoked: Option, -} - -#[derive(Debug)] -pub enum ProofRequest { - ProofRequestV1(ProofRequestPayload), - ProofRequestV2(ProofRequestPayload), -} - -#[derive(Debug, Eq, PartialEq, Clone)] -pub enum ProofRequestsVersion { - V1, - V2, -} - -impl ProofRequest { - pub fn value(&self) -> &ProofRequestPayload { - match self { - ProofRequest::ProofRequestV1(proof_req) => proof_req, - ProofRequest::ProofRequestV2(proof_req) => proof_req, - } - } - - pub fn version(&self) -> ProofRequestsVersion { - match self { - ProofRequest::ProofRequestV1(_) => ProofRequestsVersion::V1, - ProofRequest::ProofRequestV2(_) => ProofRequestsVersion::V2, - } - } -} - -impl<'de> Deserialize<'de> for ProofRequest { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - struct Helper { - ver: Option, - nonce: String, - } - - let v = Value::deserialize(deserializer)?; - - let helper = Helper::deserialize(&v).map_err(de::Error::custom)?; - let nonce_cleaned = helper.nonce.replace([' ', '_'], ""); - - let proof_req = match helper.ver { - Some(version) => match version.as_ref() { - "1.0" => { - let proof_request = - ProofRequestPayload::deserialize(v).map_err(de::Error::custom)?; - ProofRequest::ProofRequestV1(proof_request) - } - "2.0" => { - let proof_request = - ProofRequestPayload::deserialize(v).map_err(de::Error::custom)?; - ProofRequest::ProofRequestV2(proof_request) - } - _ => return Err(de::Error::unknown_variant(&version, &["2.0"])), - }, - None => { - let proof_request = - ProofRequestPayload::deserialize(v).map_err(de::Error::custom)?; - ProofRequest::ProofRequestV1(proof_request) - } - }; - let nonce_parsed = match &proof_req { - ProofRequest::ProofRequestV1(payload) => { - payload.nonce.to_dec().map_err(de::Error::custom)? - } - ProofRequest::ProofRequestV2(payload) => { - payload.nonce.to_dec().map_err(de::Error::custom)? - } - }; - if nonce_cleaned != nonce_parsed { - Err(de::Error::custom(format!( - "Invalid nonce provided: {}", - nonce_cleaned - ))) - } else { - Ok(proof_req) - } - } -} - -impl Serialize for ProofRequest { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let value = match self { - ProofRequest::ProofRequestV1(proof_req) => { - let mut value = ::serde_json::to_value(proof_req).map_err(ser::Error::custom)?; - value - .as_object_mut() - .unwrap() - .insert("ver".into(), json!("1.0")); - value - } - ProofRequest::ProofRequestV2(proof_req) => { - let mut value = ::serde_json::to_value(proof_req).map_err(ser::Error::custom)?; - value - .as_object_mut() - .unwrap() - .insert("ver".into(), json!("2.0")); - value - } - }; - - value.serialize(serializer) - } -} - -pub type ProofRequestExtraQuery = HashMap; - -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Hash)] -pub struct NonRevocedInterval { - pub from: Option, - pub to: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub struct AttributeInfo { - #[serde(skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub names: Option>, - pub restrictions: Option, - pub non_revoked: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub struct PredicateInfo { - pub name: String, - pub p_type: PredicateTypes, - pub p_value: i32, - pub restrictions: Option, - pub non_revoked: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -pub enum PredicateTypes { - #[serde(rename = ">=")] - GE, - #[serde(rename = "<=")] - LE, - #[serde(rename = ">")] - GT, - #[serde(rename = "<")] - LT, -} - -impl fmt::Display for PredicateTypes { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - PredicateTypes::GE => write!(f, "GE"), - PredicateTypes::GT => write!(f, "GT"), - PredicateTypes::LE => write!(f, "LE"), - PredicateTypes::LT => write!(f, "LT"), - } - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RequestedAttributeInfo { - pub attr_referent: String, - pub attr_info: AttributeInfo, - pub revealed: bool, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct RequestedPredicateInfo { - pub predicate_referent: String, - pub predicate_info: PredicateInfo, -} - -impl ProofRequest { - pub fn to_unqualified(self) -> ProofRequest { - let convert = |proof_request: &mut ProofRequestPayload| { - for (_, requested_attribute) in proof_request.requested_attributes.iter_mut() { - requested_attribute.restrictions = requested_attribute - .restrictions - .as_mut() - .map(|ref mut restrictions| _convert_query_to_unqualified(restrictions)); - } - for (_, requested_predicate) in proof_request.requested_predicates.iter_mut() { - requested_predicate.restrictions = requested_predicate - .restrictions - .as_mut() - .map(|ref mut restrictions| _convert_query_to_unqualified(restrictions)); - } - }; - - match self { - ProofRequest::ProofRequestV2(mut proof_request) => { - convert(&mut proof_request); - ProofRequest::ProofRequestV2(proof_request) - } - ProofRequest::ProofRequestV1(mut proof_request) => { - convert(&mut proof_request); - ProofRequest::ProofRequestV1(proof_request) - } - } - } -} - -fn _convert_query_to_unqualified(query: &Query) -> Query { - match query { - Query::Eq(tag_name, ref tag_value) => Query::Eq( - tag_name.to_string(), - _convert_value_to_unqualified(tag_name, tag_value), - ), - Query::Neq(ref tag_name, ref tag_value) => Query::Neq( - tag_name.to_string(), - _convert_value_to_unqualified(tag_name, tag_value), - ), - Query::In(ref tag_name, ref tag_values) => Query::In( - tag_name.to_string(), - tag_values - .iter() - .map(|tag_value| _convert_value_to_unqualified(tag_name, tag_value)) - .collect::>(), - ), - Query::And(ref queries) => Query::And( - queries - .iter() - .map(_convert_query_to_unqualified) - .collect::>(), - ), - Query::Or(ref queries) => Query::Or( - queries - .iter() - .map(_convert_query_to_unqualified) - .collect::>(), - ), - Query::Not(ref query) => _convert_query_to_unqualified(query), - query => query.clone(), - } -} - -fn _convert_value_to_unqualified(tag_name: &str, tag_value: &str) -> String { - match tag_name { - "issuer_did" | "schema_issuer_did" => DidValue(tag_value.to_string()).to_unqualified().0, - "schema_id" => SchemaId(tag_value.to_string()).to_unqualified().0, - "cred_def_id" => { - CredentialDefinitionId(tag_value.to_string()) - .to_unqualified() - .0 - } - "rev_reg_id" => { - RevocationRegistryId(tag_value.to_string()) - .to_unqualified() - .0 - } - _ => tag_value.to_string(), - } -} - -fn _process_operator(restriction_op: &Query, version: &ProofRequestsVersion) -> Result<(), String> { - match restriction_op { - Query::Eq(ref tag_name, ref tag_value) - | Query::Neq(ref tag_name, ref tag_value) - | Query::Gt(ref tag_name, ref tag_value) - | Query::Gte(ref tag_name, ref tag_value) - | Query::Lt(ref tag_name, ref tag_value) - | Query::Lte(ref tag_name, ref tag_value) - | Query::Like(ref tag_name, ref tag_value) => { - _check_restriction(tag_name, tag_value, version) - } - Query::In(ref tag_name, ref tag_values) => { - tag_values - .iter() - .map(|tag_value| _check_restriction(tag_name, tag_value, version)) - .collect::, String>>()?; - Ok(()) - } - Query::And(ref operators) | Query::Or(ref operators) => { - operators - .iter() - .map(|operator| _process_operator(operator, version)) - .collect::, String>>()?; - Ok(()) - } - Query::Not(ref operator) => _process_operator(operator, version), - } -} - -fn _check_restriction( - tag_name: &str, - tag_value: &str, - version: &ProofRequestsVersion, -) -> Result<(), String> { - if *version == ProofRequestsVersion::V1 - && Credential::QUALIFIABLE_TAGS.contains(&tag_name) - && qualifier::is_fully_qualified(tag_value) - { - return Err( - "Proof Request validation failed: fully qualified identifiers can not be used for \ - Proof Request of the first version. Please, set \"ver\":\"2.0\" to use fully \ - qualified identifiers." - .to_string(), - ); - } - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - - mod invalid_nonce { - use super::*; - - #[test] - fn proof_request_valid_nonce() { - let proof_req_json = json!({ - "nonce": "123456", - "name": "name", - "version": "2.0", - "requested_attributes": {}, - "requested_predicates": {}, - }) - .to_string(); - - let proof_req: ProofRequest = serde_json::from_str(&proof_req_json).unwrap(); - let payload = match proof_req { - ProofRequest::ProofRequestV1(p) => p, - ProofRequest::ProofRequestV2(p) => p, - }; - - assert_eq!(payload.nonce.to_dec().unwrap(), "123456"); - } - - #[test] - fn proof_request_invalid_nonce() { - let proof_req_json = json!({ - "nonce": "123abc", - "name": "name", - "version": "2.0", - "requested_attributes": {}, - "requested_predicates": {}, - }) - .to_string(); - - serde_json::from_str::(&proof_req_json).unwrap_err(); - } - } - - mod to_unqualified { - use super::*; - - const DID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e"; - const DID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e"; - const SCHEMA_ID_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0"; - const SCHEMA_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0"; - const CRED_DEF_ID_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag"; - const CRED_DEF_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag"; - const REV_REG_ID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/\ - REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/\ - v0/SCHEMA/gvt/1.0/tag/TAG_1"; - const REV_REG_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:\ - CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:\ - TAG_1"; - - #[test] - fn proof_request_to_unqualified() { - let mut requested_attributes: HashMap = HashMap::new(); - requested_attributes.insert( - "attr1_referent".to_string(), - AttributeInfo { - name: Some("name".to_string()), - names: None, - restrictions: Some(Query::And(vec![ - Query::Eq("issuer_did".to_string(), DID_QUALIFIED.to_string()), - Query::Eq("schema_id".to_string(), SCHEMA_ID_QUALIFIED.to_string()), - Query::Eq("cred_def_id".to_string(), CRED_DEF_ID_QUALIFIED.to_string()), - ])), - non_revoked: None, - }, - ); - - let mut requested_predicates: HashMap = HashMap::new(); - requested_predicates.insert( - "predicate1_referent".to_string(), - PredicateInfo { - name: "age".to_string(), - p_type: PredicateTypes::GE, - p_value: 0, - restrictions: Some(Query::And(vec![ - Query::Eq("schema_issuer_did".to_string(), DID_QUALIFIED.to_string()), - Query::Eq("rev_reg_id".to_string(), REV_REG_ID_QUALIFIED.to_string()), - ])), - non_revoked: None, - }, - ); - - let proof_request = ProofRequest::ProofRequestV2(ProofRequestPayload { - nonce: Nonce::new().unwrap(), - name: "proof_request_to_unqualified".to_string(), - version: "1.0".to_string(), - requested_attributes, - requested_predicates, - non_revoked: None, - }); - - let mut expected_requested_attributes: HashMap = HashMap::new(); - expected_requested_attributes.insert( - "attr1_referent".to_string(), - AttributeInfo { - name: Some("name".to_string()), - names: None, - restrictions: Some(Query::And(vec![ - Query::Eq("issuer_did".to_string(), DID_UNQUALIFIED.to_string()), - Query::Eq("schema_id".to_string(), SCHEMA_ID_UNQUALIFIED.to_string()), - Query::Eq( - "cred_def_id".to_string(), - CRED_DEF_ID_UNQUALIFIED.to_string(), - ), - ])), - non_revoked: None, - }, - ); - - let mut expected_requested_predicates: HashMap = HashMap::new(); - expected_requested_predicates.insert( - "predicate1_referent".to_string(), - PredicateInfo { - name: "age".to_string(), - p_type: PredicateTypes::GE, - p_value: 0, - restrictions: Some(Query::And(vec![ - Query::Eq("schema_issuer_did".to_string(), DID_UNQUALIFIED.to_string()), - Query::Eq("rev_reg_id".to_string(), REV_REG_ID_UNQUALIFIED.to_string()), - ])), - non_revoked: None, - }, - ); - - let proof_request = proof_request.to_unqualified(); - assert_eq!( - expected_requested_attributes, - proof_request.value().requested_attributes - ); - assert_eq!( - expected_requested_predicates, - proof_request.value().requested_predicates - ); - assert_eq!(ProofRequestsVersion::V2, proof_request.version()); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs deleted file mode 100644 index 2f2ee0df53..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/requested_credential.rs +++ /dev/null @@ -1,21 +0,0 @@ -use std::collections::HashMap; - -#[derive(Debug, Deserialize, Serialize)] -pub struct RequestedCredentials { - pub self_attested_attributes: HashMap, - pub requested_attributes: HashMap, - pub requested_predicates: HashMap, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct RequestedAttribute { - pub cred_id: String, - pub timestamp: Option, - pub revealed: bool, -} - -#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Hash, Clone)] -pub struct ProvingCredentialKey { - pub cred_id: String, - pub timestamp: Option, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs deleted file mode 100644 index f9a1042dd9..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry.rs +++ /dev/null @@ -1,42 +0,0 @@ -use std::collections::HashMap; - -use ursa::cl::RevocationRegistry as CryptoRevocationRegistry; - -use super::revocation_registry_definition::RevocationRegistryId; - -#[derive(Debug, Serialize, Deserialize)] -pub struct RevocationRegistryV1 { - pub value: CryptoRevocationRegistry, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum RevocationRegistry { - #[serde(rename = "1.0")] - RevocationRegistryV1(RevocationRegistryV1), -} - -impl From for RevocationRegistryV1 { - fn from(rev_reg: RevocationRegistry) -> Self { - match rev_reg { - RevocationRegistry::RevocationRegistryV1(rev_reg) => rev_reg, - } - } -} - -pub type RevocationRegistries = HashMap>; - -pub fn rev_regs_map_to_rev_regs_local_map( - rev_regs: RevocationRegistries, -) -> HashMap> { - rev_regs - .into_iter() - .map(|(rev_reg_id, rev_reg_to_timespams)| { - let val = rev_reg_to_timespams - .into_iter() - .map(|(timestamp, rev_reg)| (timestamp, RevocationRegistryV1::from(rev_reg))) - .collect(); - (rev_reg_id, val) - }) - .collect() -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs deleted file mode 100644 index 003d0af9f6..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs +++ /dev/null @@ -1,323 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use indy_api_types::errors::{err_msg, IndyErrorKind, IndyResult}; -use lazy_static::lazy_static; -use regex::Regex; -use ursa::cl::{RevocationKeyPrivate, RevocationKeyPublic}; - -use super::{ - super::crypto::did::DidValue, credential_definition::CredentialDefinitionId, indy_identifiers, - DELIMITER, -}; -use crate::utils::qualifier; - -pub const CL_ACCUM: &str = "CL_ACCUM"; -pub const REV_REG_DEG_MARKER: &str = "4"; - -lazy_static! { - static ref QUALIFIED_REV_REG_ID: Regex = Regex::new( - "(^revreg:(?P[a-z0-9]+):)?(?P.+):4:(?P.+):(?P.+):\ - (?P.+)$" - ) - .unwrap(); -} - -#[derive(Deserialize, Debug, Serialize)] -pub struct RevocationRegistryConfig { - pub issuance_type: Option, - pub max_cred_num: Option, -} - -#[allow(non_camel_case_types)] -#[derive(Deserialize, Debug, Serialize, PartialEq, Clone)] -pub enum IssuanceType { - ISSUANCE_BY_DEFAULT, - ISSUANCE_ON_DEMAND, -} - -impl IssuanceType { - pub fn to_bool(&self) -> bool { - self.clone() == IssuanceType::ISSUANCE_BY_DEFAULT - } -} - -#[allow(non_camel_case_types)] -#[derive(Deserialize, Debug, Serialize, PartialEq)] -pub enum RegistryType { - CL_ACCUM, -} - -impl RegistryType { - pub fn to_str(&self) -> &'static str { - match *self { - RegistryType::CL_ACCUM => CL_ACCUM, - } - } -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDefinitionValue { - pub issuance_type: IssuanceType, - pub max_cred_num: u32, - pub public_keys: RevocationRegistryDefinitionValuePublicKeys, - pub tails_hash: String, - pub tails_location: String, -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDefinitionValuePublicKeys { - pub accum_key: RevocationKeyPublic, -} - -#[derive(Deserialize, Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDefinitionV1 { - pub id: RevocationRegistryId, - pub revoc_def_type: RegistryType, - pub tag: String, - pub cred_def_id: CredentialDefinitionId, - pub value: RevocationRegistryDefinitionValue, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum RevocationRegistryDefinition { - #[serde(rename = "1.0")] - RevocationRegistryDefinitionV1(RevocationRegistryDefinitionV1), -} - -impl RevocationRegistryDefinition { - pub fn to_unqualified(self) -> RevocationRegistryDefinition { - match self { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1(rev_ref_def) => { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1( - RevocationRegistryDefinitionV1 { - id: rev_ref_def.id.to_unqualified(), - revoc_def_type: rev_ref_def.revoc_def_type, - tag: rev_ref_def.tag, - cred_def_id: rev_ref_def.cred_def_id.to_unqualified(), - value: rev_ref_def.value, - }, - ) - } - } - } -} - -impl From for RevocationRegistryDefinitionV1 { - fn from(rev_reg_def: RevocationRegistryDefinition) -> Self { - match rev_reg_def { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1(rev_reg_def) => { - rev_reg_def - } - } - } -} - -pub type RevocationRegistryDefinitions = - HashMap; - -pub fn rev_reg_defs_map_to_rev_reg_defs_v1_map( - rev_reg_defs: RevocationRegistryDefinitions, -) -> HashMap { - rev_reg_defs - .into_iter() - .map(|(rev_reg_id, rev_reg_def)| { - ( - rev_reg_id, - RevocationRegistryDefinitionV1::from(rev_reg_def), - ) - }) - .collect() -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct RevocationRegistryDefinitionPrivate { - pub value: RevocationKeyPrivate, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct RevocationRegistryInfo { - pub id: RevocationRegistryId, - pub curr_id: u32, - pub used_ids: HashSet, -} - -qualifiable_type!(RevocationRegistryId); - -impl RevocationRegistryId { - pub const PREFIX: &'static str = "/anoncreds/v0/REV_REG_DEF/"; - - pub fn new( - did: &DidValue, - cred_def_id: &CredentialDefinitionId, - rev_reg_type: &str, - tag: &str, - ) -> IndyResult { - match did.get_method() { - Some(method) if method.starts_with("indy") => { - if let Some((_issuer_did, _cl_type, schema_id, creddef_tag)) = cred_def_id.parts() { - Ok(RevocationRegistryId( - did.0.to_owned() - + "/anoncreds/v0/REV_REG_DEF/" - + &schema_id.0 - + "/" - + &creddef_tag - + "/" - + tag, - )) - } else { - Err(err_msg( - IndyErrorKind::InvalidStructure, - "Can't parse Indy CredDef to construct RevReg ID", - )) - } - } - None => Ok(RevocationRegistryId(format!( - "{}{}{}{}{}{}{}{}{}", - did.0, - DELIMITER, - REV_REG_DEG_MARKER, - DELIMITER, - cred_def_id.0, - DELIMITER, - rev_reg_type, - DELIMITER, - tag - ))), - Some(method) => Err(err_msg( - IndyErrorKind::InvalidStructure, - format!("Unsupported DID method {} for RevReg ID", method), - )), - } - } - - pub fn parts(&self) -> Option<(DidValue, CredentialDefinitionId, String, String)> { - trace!("RevocationRegistryId::parts >> self.0 {}", self.0); - if let Some(parts) = indy_identifiers::try_parse_indy_rev_reg(self.0.as_str()) { - trace!( - "RevocationRegistryId::parts: parsed Indy RevReg {:?}", - parts - ); - return Some(parts); - } - - QUALIFIED_REV_REG_ID.captures(&self.0).map(|caps| { - ( - DidValue(caps["did"].to_string()), - CredentialDefinitionId(caps["cred_def_id"].to_string()), - caps["rev_reg_type"].to_string(), - caps["tag"].to_string(), - ) - }) - } - - pub fn to_unqualified(&self) -> RevocationRegistryId { - match self.parts() { - Some((did, cred_def_id, rev_reg_type, tag)) => RevocationRegistryId::new( - &did.to_unqualified(), - &cred_def_id.to_unqualified(), - &rev_reg_type, - &tag, - ) - .expect("Can't create unqualified RevocationRegistryId"), - None => self.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _did() -> DidValue { - DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _rev_reg_type() -> String { - "CL_ACCUM".to_string() - } - - fn _tag() -> String { - "TAG_1".to_string() - } - - fn _did_qualified() -> DidValue { - DidValue("did:sov:NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _cred_def_id_unqualified() -> CredentialDefinitionId { - CredentialDefinitionId( - "NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag".to_string(), - ) - } - - fn _cred_def_id_qualified() -> CredentialDefinitionId { - CredentialDefinitionId( - "creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:\ - NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag" - .to_string(), - ) - } - - fn _rev_reg_id_unqualified() -> RevocationRegistryId { - RevocationRegistryId( - "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.\ - 0:tag:CL_ACCUM:TAG_1" - .to_string(), - ) - } - - fn _rev_reg_id_qualified() -> RevocationRegistryId { - RevocationRegistryId( - "revreg:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:4:creddef:sov:did:sov:\ - NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:\ - CL_ACCUM:TAG_1" - .to_string(), - ) - } - - mod to_unqualified { - use super::*; - - #[test] - fn test_rev_reg_id_parts_for_id_as_unqualified() { - assert_eq!( - _rev_reg_id_unqualified(), - _rev_reg_id_unqualified().to_unqualified() - ); - } - - #[test] - fn test_rev_reg_id_parts_for_id_as_qualified() { - assert_eq!( - _rev_reg_id_unqualified(), - _rev_reg_id_qualified().to_unqualified() - ); - } - } - - mod parts { - use super::*; - - #[test] - fn test_rev_reg_id_parts_for_id_as_unqualified() { - let (did, cred_def_id, rev_reg_type, tag) = _rev_reg_id_unqualified().parts().unwrap(); - assert_eq!(_did(), did); - assert_eq!(_cred_def_id_unqualified(), cred_def_id); - assert_eq!(_rev_reg_type(), rev_reg_type); - assert_eq!(_tag(), tag); - } - - #[test] - fn test_rev_reg_id_parts_for_id_as_qualified() { - let (did, cred_def_id, rev_reg_type, tag) = _rev_reg_id_qualified().parts().unwrap(); - assert_eq!(_did_qualified(), did); - assert_eq!(_cred_def_id_qualified(), cred_def_id); - assert_eq!(_rev_reg_type(), rev_reg_type); - assert_eq!(_tag(), tag); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs deleted file mode 100644 index f316d2cc6c..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs +++ /dev/null @@ -1,22 +0,0 @@ -use ursa::cl::RevocationRegistryDelta as RegistryDelta; - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDeltaV1 { - pub value: RegistryDelta, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum RevocationRegistryDelta { - #[serde(rename = "1.0")] - RevocationRegistryDeltaV1(RevocationRegistryDeltaV1), -} - -impl From for RevocationRegistryDeltaV1 { - fn from(rev_reg_delta: RevocationRegistryDelta) -> Self { - match rev_reg_delta { - RevocationRegistryDelta::RevocationRegistryDeltaV1(rev_reg_delta) => rev_reg_delta, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs b/aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs deleted file mode 100644 index d2823b1f0d..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/anoncreds/schema.rs +++ /dev/null @@ -1,255 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use indy_api_types::{ - errors::{IndyErrorKind, IndyResult}, - IndyError, -}; - -use super::{super::crypto::did::DidValue, indy_identifiers, DELIMITER}; -use crate::utils::qualifier; - -pub const MAX_ATTRIBUTES_COUNT: usize = 125; - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct SchemaV1 { - pub id: SchemaId, - pub name: String, - pub version: String, - #[serde(rename = "attrNames")] - pub attr_names: AttributeNames, - pub seq_no: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "ver")] -pub enum Schema { - #[serde(rename = "1.0")] - SchemaV1(SchemaV1), -} - -impl Schema { - pub fn to_unqualified(self) -> Schema { - match self { - Schema::SchemaV1(schema) => Schema::SchemaV1(SchemaV1 { - id: schema.id.to_unqualified(), - name: schema.name, - version: schema.version, - attr_names: schema.attr_names, - seq_no: schema.seq_no, - }), - } - } -} - -impl From for SchemaV1 { - fn from(schema: Schema) -> Self { - match schema { - Schema::SchemaV1(schema) => schema, - } - } -} - -pub type Schemas = HashMap; - -pub fn schemas_map_to_schemas_v1_map(schemas: Schemas) -> HashMap { - schemas - .into_iter() - .map(|(schema_id, schema)| (schema_id, SchemaV1::from(schema))) - .collect() -} - -#[derive(Serialize, Deserialize, Debug, Clone, Default)] -pub struct AttributeNames(pub HashSet); - -impl AttributeNames { - pub fn new() -> Self { - AttributeNames(HashSet::new()) - } -} - -impl From> for AttributeNames { - fn from(attrs: HashSet) -> Self { - AttributeNames(attrs) - } -} - -impl From for HashSet { - fn from(value: AttributeNames) -> HashSet { - value.0 - } -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] -pub struct SchemaId(pub String); - -impl SchemaId { - pub const PREFIX: &'static str = "/anoncreds/v0/SCHEMA/"; - - pub fn get_method(&self) -> Option { - qualifier::method(&self.0) - } - - pub fn new(did: &DidValue, name: &str, version: &str) -> IndyResult { - const MARKER: &str = "2"; - match did.get_method() { - Some(method) if method.starts_with("indy") => Ok(SchemaId(format!( - "{}{}{}/{}", - did.0, - Self::PREFIX, - name, - version - ))), - Some(_method) => Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Unsupported DID method", - )), - None => Ok(SchemaId(format!( - "{}:{}:{}:{}", - did.0, MARKER, name, version - ))), - } - } - - pub fn parts(&self) -> Option<(DidValue, String, String)> { - trace!("SchemaId::parts >> {:?}", self.0); - if let Some((did, name, ver)) = indy_identifiers::try_parse_indy_schema_id(&self.0) { - return Some((DidValue(did), name, ver)); - } - - let parts = self.0.split_terminator(DELIMITER).collect::>(); - - if parts.len() == 1 { - // 1 - return None; - } - - if parts.len() == 4 { - // NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0 - let did = parts[0].to_string(); - let name = parts[2].to_string(); - let version = parts[3].to_string(); - return Some((DidValue(did), name, version)); - } - - if parts.len() == 8 { - // schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0 - let did = parts[2..5].join(DELIMITER); - let name = parts[6].to_string(); - let version = parts[7].to_string(); - return Some((DidValue(did), name, version)); - } - - None - } - - pub fn qualify(&self, method: &str) -> IndyResult { - match self.parts() { - Some((did, name, version)) => SchemaId::new(&did.qualify(method), &name, &version), - None => Ok(self.clone()), - } - } - - pub fn to_unqualified(&self) -> SchemaId { - trace!("SchemaId::to_unqualified >> {}", &self.0); - match self.parts() { - Some((did, name, version)) => { - trace!( - "SchemaId::to_unqualified: parts {:?}", - (&did, &name, &version) - ); - SchemaId::new(&did.to_unqualified(), &name, &version) - .expect("Can't create unqualified SchemaId") - } - None => self.clone(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _did() -> DidValue { - DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _did_qualified() -> DidValue { - DidValue("did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e".to_string()) - } - - fn _schema_id_seq_no() -> SchemaId { - SchemaId("1".to_string()) - } - - fn _schema_id_unqualified() -> SchemaId { - SchemaId("NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0".to_string()) - } - - fn _schema_id_qualified() -> SchemaId { - SchemaId( - "did:indy:sovrin:builder:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0" - .to_string(), - ) - } - - fn _schema_id_invalid() -> SchemaId { - SchemaId("NcYxiDXkpYi6ov5FcYDi1e:2".to_string()) - } - - mod to_unqualified { - use super::*; - - #[test] - fn test_schema_id_unqualify_for_id_as_seq_no() { - assert_eq!(_schema_id_seq_no(), _schema_id_seq_no().to_unqualified()); - } - - #[test] - fn test_schema_id_parts_for_id_as_unqualified() { - assert_eq!( - _schema_id_unqualified(), - _schema_id_unqualified().to_unqualified() - ); - } - - #[test] - fn test_schema_id_parts_for_id_as_qualified() { - assert_eq!( - _schema_id_unqualified(), - _schema_id_qualified().to_unqualified() - ); - } - - #[test] - fn test_schema_id_parts_for_invalid_unqualified() { - assert_eq!(_schema_id_invalid(), _schema_id_invalid().to_unqualified()); - } - } - - mod parts { - use super::*; - - #[test] - fn test_schema_id_parts_for_id_as_seq_no() { - assert!(_schema_id_seq_no().parts().is_none()); - } - - #[test] - fn test_schema_id_parts_for_id_as_unqualified() { - let (did, _, _) = _schema_id_unqualified().parts().unwrap(); - assert_eq!(_did(), did); - } - - #[test] - fn test_schema_id_parts_for_id_as_qualified() { - let (did, _, _) = _schema_id_qualified().parts().unwrap(); - assert_eq!(_did_qualified(), did); - } - - #[test] - fn test_schema_id_parts_for_invalid_unqualified() { - assert!(_schema_id_invalid().parts().is_none()); - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/cache.rs b/aries/misc/legacy/libvdrtools/src/domain/cache.rs deleted file mode 100644 index deea3c64be..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/cache.rs +++ /dev/null @@ -1,15 +0,0 @@ -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PurgeOptions { - pub max_age: Option, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct GetCacheOptions { - pub no_cache: Option, // Skip usage of cache, - pub no_update: Option, // Use only cached data, do not try to update. - pub no_store: Option, // Skip storing fresh data if updated - pub min_fresh: Option, /* Return cached data if not older than this many seconds. -1 - * means do not check age. */ -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs deleted file mode 100644 index 9c48c7cf08..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/did.rs +++ /dev/null @@ -1,128 +0,0 @@ -use indy_api_types::errors::{IndyError, IndyErrorKind, IndyResult}; - -use crate::utils::qualifier; - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] -pub struct DidMethod(pub String); - -#[derive(Serialize, Deserialize, Clone, Debug, Default)] -pub struct MyDidInfo { - pub did: Option, - pub seed: Option, - pub crypto_type: Option, - pub cid: Option, - pub method_name: Option, - pub ledger_type: Option, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct TheirDidInfo { - pub did: DidValue, - pub verkey: Option, -} - -impl TheirDidInfo { - pub fn new(did: DidValue, verkey: Option) -> TheirDidInfo { - TheirDidInfo { did, verkey } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct Did { - pub did: DidValue, - pub verkey: String, -} - -impl Did { - pub fn new(did: DidValue, verkey: String) -> Did { - Did { did, verkey } - } -} - -qualifiable_type!(DidValue); - -impl DidValue { - pub const PREFIX: &'static str = "did"; - - pub fn new(did: &str, ledger_type: Option<&str>, method: Option<&str>) -> IndyResult { - match (ledger_type, method) { - (Some(ledger_type_), Some(method_)) => { - Ok(DidValue(did.to_string()).set_ledger_and_method(ledger_type_, method_)) - } - (None, Some(method_)) => Ok(DidValue(did.to_string()).set_method(method_)), - (None, None) => Ok(DidValue(did.to_string())), - (Some(_), None) => Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Ledger type can not be specified if DID method is undefined", - )), - } - } - - pub fn to_short(&self) -> ShortDidValue { - ShortDidValue(self.to_unqualified().0) - } - - pub fn qualify(&self, method: &str) -> DidValue { - self.set_method(method) - } - - pub fn to_unqualified(&self) -> DidValue { - DidValue(qualifier::to_unqualified(&self.0)) - } - - pub fn is_abbreviatable(&self) -> bool { - match self.get_method() { - Some(ref method) if method.starts_with("sov") || method.starts_with("indy") => true, - Some(_) => false, - None => true, - } - } -} - -qualifiable_type!(ShortDidValue); - -impl ShortDidValue { - pub const PREFIX: &'static str = "did"; - - pub fn qualify(&self, method: Option) -> DidValue { - match method { - Some(method_) => DidValue(self.set_method(&method_).0), - None => DidValue(self.0.to_string()), - } - } -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct DidMetadata { - pub value: String, -} - -#[derive(Serialize, Clone, Debug)] -#[serde(rename_all = "camelCase")] -pub struct DidWithMeta { - pub did: DidValue, - pub verkey: String, - pub temp_verkey: Option, - pub metadata: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct TheirDid { - pub did: DidValue, - pub verkey: String, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct TemporaryDid { - pub did: DidValue, - pub verkey: String, -} - -impl From for Did { - fn from(temp_did: TemporaryDid) -> Self { - Did { - did: temp_did.did, - verkey: temp_did.verkey, - } - } -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs deleted file mode 100644 index 050b96e448..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/key.rs +++ /dev/null @@ -1,38 +0,0 @@ -extern crate zeroize; - -use self::zeroize::Zeroize; - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct Key { - pub verkey: String, - pub signkey: String, -} - -impl Key { - pub fn new(verkey: String, signkey: String) -> Key { - Key { verkey, signkey } - } -} - -impl Zeroize for Key { - fn zeroize(&mut self) { - self.signkey.zeroize(); - } -} - -impl Drop for Key { - fn drop(&mut self) { - self.signkey.zeroize(); - } -} - -#[derive(Serialize, Deserialize, Debug, Default)] -pub struct KeyInfo { - pub seed: Option, - pub crypto_type: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct KeyMetadata { - pub value: String, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs deleted file mode 100644 index b529c85c74..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -pub mod did; -pub mod key; -pub mod pack; - -#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Clone)] -pub enum CryptoTypes { - #[serde(rename = "ed25519")] - Ed25519, - #[serde(rename = "secp256k1")] - Secp256k1, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs b/aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs deleted file mode 100644 index 195c8b1c98..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/crypto/pack.rs +++ /dev/null @@ -1,40 +0,0 @@ -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct JWE { - pub protected: String, - pub iv: String, - pub ciphertext: String, - pub tag: String, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Recipient { - pub encrypted_key: String, - pub header: Header, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Header { - pub kid: String, - #[serde(default)] - #[serde(skip_serializing_if = "Option::is_none")] - pub iv: Option, - #[serde(default)] - #[serde(skip_serializing_if = "Option::is_none")] - pub sender: Option, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct Protected { - pub enc: String, - pub typ: String, - pub alg: String, - pub recipients: Vec, -} - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct UnpackMessage { - pub message: String, - pub recipient_verkey: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub sender_verkey: Option, -} diff --git a/aries/misc/legacy/libvdrtools/src/domain/mod.rs b/aries/misc/legacy/libvdrtools/src/domain/mod.rs deleted file mode 100644 index 1386661e11..0000000000 --- a/aries/misc/legacy/libvdrtools/src/domain/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod anoncreds; -pub mod cache; -pub mod crypto; - -#[derive(Debug, Serialize, Deserialize)] -pub struct IndyConfig { - pub crypto_thread_pool_size: Option, - pub collect_backtrace: Option, -} diff --git a/aries/misc/legacy/libvdrtools/src/lib.rs b/aries/misc/legacy/libvdrtools/src/lib.rs deleted file mode 100644 index 006e3b81f4..0000000000 --- a/aries/misc/legacy/libvdrtools/src/lib.rs +++ /dev/null @@ -1,109 +0,0 @@ -#[macro_use] -extern crate log; - -#[macro_use] -extern crate serde_derive; - -#[macro_use] -extern crate indy_utils; - -pub use indy_api_types as types; -pub use indy_wallet; - -#[macro_use] -mod utils; - -#[macro_use] -mod controllers; -pub mod domain; -mod services; - -use std::sync::Arc; - -pub use domain::{ - anoncreds::{ - credential::{AttributeValues, Credential, CredentialValues}, - credential_definition::{ - CredentialDefinition, CredentialDefinitionCorrectnessProof, CredentialDefinitionData, - CredentialDefinitionId, CredentialDefinitionPrivateKey, CredentialDefinitionV1, - SignatureType, - }, - credential_offer::CredentialOffer, - credential_request::{CredentialRequest, CredentialRequestMetadata}, - master_secret::MasterSecret, - revocation_registry::{RevocationRegistry, RevocationRegistryV1}, - revocation_registry_definition::{ - IssuanceType, RegistryType, RevocationRegistryConfig, RevocationRegistryDefinition, - RevocationRegistryDefinitionPrivate, RevocationRegistryDefinitionV1, - RevocationRegistryDefinitionValue, RevocationRegistryDefinitionValuePublicKeys, - RevocationRegistryId, RevocationRegistryInfo, - }, - revocation_registry_delta::{RevocationRegistryDelta, RevocationRegistryDeltaV1}, - schema::{AttributeNames, Schema, SchemaId, SchemaV1}, - }, - crypto::{ - did::{DidMethod, DidValue, MyDidInfo}, - key::KeyInfo, - pack::JWE, - }, -}; -pub use indy_api_types::{ - CommandHandle, IndyError, SearchHandle, WalletHandle, INVALID_COMMAND_HANDLE, - INVALID_SEARCH_HANDLE, INVALID_WALLET_HANDLE, -}; -pub use indy_wallet::WalletRecord; -use lazy_static::lazy_static; - -use crate::{ - controllers::{CryptoController, DidController, NonSecretsController, WalletController}, - services::{CryptoService, WalletService}, -}; - -// Global (lazy inited) instance of Locator -lazy_static! { - static ref LOCATOR: Locator = Locator::new(); -} - -pub struct Locator { - pub crypto_controller: CryptoController, - pub did_controller: DidController, - pub wallet_controller: WalletController, - pub non_secret_controller: NonSecretsController, -} - -impl Locator { - pub fn instance() -> &'static Locator { - &LOCATOR - } - - fn new() -> Locator { - info!("new >"); - - let crypto_service = Arc::new(CryptoService::new()); - let wallet_service = Arc::new(WalletService::new()); - - let crypto_controller = - CryptoController::new(wallet_service.clone(), crypto_service.clone()); - - let did_controller = DidController::new(wallet_service.clone(), crypto_service.clone()); - - let wallet_controller = WalletController::new(wallet_service.clone(), crypto_service); - let non_secret_controller = NonSecretsController::new(wallet_service); - - let res = Locator { - crypto_controller, - did_controller, - wallet_controller, - non_secret_controller, - }; - - info!("new <"); - res - } -} - -impl Drop for Locator { - fn drop(&mut self) { - info!(target: "Locator", "drop <>"); - } -} diff --git a/aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs b/aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs deleted file mode 100644 index 00c3ecbb96..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/crypto/ed25519.rs +++ /dev/null @@ -1,98 +0,0 @@ -use indy_api_types::errors::IndyError; -use indy_utils::crypto::{ed25519_box, ed25519_sign, sealedbox}; - -use super::CryptoType; - -pub struct ED25519CryptoType {} - -impl ED25519CryptoType { - pub fn new() -> ED25519CryptoType { - ED25519CryptoType {} - } -} - -impl CryptoType for ED25519CryptoType { - fn crypto_box( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> Result, IndyError> { - ed25519_box::encrypt( - &ed25519_sign::sk_to_curve25519(sk)?, - &ed25519_sign::vk_to_curve25519(vk)?, - doc, - nonce, - ) - } - - fn crypto_box_open( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> Result, IndyError> { - ed25519_box::decrypt( - &ed25519_sign::sk_to_curve25519(sk)?, - &ed25519_sign::vk_to_curve25519(vk)?, - doc, - nonce, - ) - } - - fn gen_nonce(&self) -> ed25519_box::Nonce { - ed25519_box::gen_nonce() - } - - fn create_key( - &self, - seed: Option<&ed25519_sign::Seed>, - ) -> Result<(ed25519_sign::PublicKey, ed25519_sign::SecretKey), IndyError> { - ed25519_sign::create_key_pair_for_signature(seed) - } - - fn sign( - &self, - sk: &ed25519_sign::SecretKey, - doc: &[u8], - ) -> Result { - ed25519_sign::sign(sk, doc) - } - - fn verify( - &self, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - signature: &ed25519_sign::Signature, - ) -> Result { - ed25519_sign::verify(vk, doc, signature) - } - - fn crypto_box_seal( - &self, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - ) -> Result, IndyError> { - sealedbox::encrypt(&ed25519_sign::vk_to_curve25519(vk)?, doc) - } - - fn crypto_box_seal_open( - &self, - vk: &ed25519_sign::PublicKey, - sk: &ed25519_sign::SecretKey, - doc: &[u8], - ) -> Result, IndyError> { - sealedbox::decrypt( - &ed25519_sign::vk_to_curve25519(vk)?, - &ed25519_sign::sk_to_curve25519(sk)?, - doc, - ) - } - - fn validate_key(&self, _vk: &ed25519_sign::PublicKey) -> Result<(), IndyError> { - // TODO: FIXME: Validate key - Ok(()) - } -} diff --git a/aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs b/aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs deleted file mode 100644 index af71363f61..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/crypto/mod.rs +++ /dev/null @@ -1,1183 +0,0 @@ -mod ed25519; - -use std::{collections::HashMap, str}; - -use async_std::sync::RwLock; -use ed25519::ED25519CryptoType; -use hex::FromHex; -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::{ - base64, chacha20poly1305_ietf, chacha20poly1305_ietf::gen_nonce_and_encrypt_detached, - ed25519_box, ed25519_sign, -}; - -use crate::{ - domain::crypto::{ - did::{Did, DidValue, MyDidInfo, TheirDid, TheirDidInfo}, - key::{Key, KeyInfo}, - }, - utils::crypto::{ - base58::{DecodeBase58, ToBase58}, - verkey_builder::{build_full_verkey, split_verkey, verkey_get_cryptoname}, - }, -}; - -const DEFAULT_CRYPTO_TYPE: &str = "ed25519"; - -//TODO fix this crypto trait so it matches the functions below -//TODO create a second crypto trait for additional functions -trait CryptoType: Send + Sync { - fn crypto_box( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> IndyResult>; - - fn crypto_box_open( - &self, - sk: &ed25519_sign::SecretKey, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - nonce: &ed25519_box::Nonce, - ) -> IndyResult>; - - fn gen_nonce(&self) -> ed25519_box::Nonce; - - fn create_key( - &self, - seed: Option<&ed25519_sign::Seed>, - ) -> IndyResult<(ed25519_sign::PublicKey, ed25519_sign::SecretKey)>; - - fn validate_key(&self, _vk: &ed25519_sign::PublicKey) -> IndyResult<()>; - - fn sign(&self, sk: &ed25519_sign::SecretKey, doc: &[u8]) - -> IndyResult; - - fn verify( - &self, - vk: &ed25519_sign::PublicKey, - doc: &[u8], - signature: &ed25519_sign::Signature, - ) -> IndyResult; - - fn crypto_box_seal(&self, vk: &ed25519_sign::PublicKey, doc: &[u8]) -> IndyResult>; - - fn crypto_box_seal_open( - &self, - vk: &ed25519_sign::PublicKey, - sk: &ed25519_sign::SecretKey, - doc: &[u8], - ) -> IndyResult>; -} - -pub struct CryptoService { - crypto_types: RwLock>>, -} - -impl CryptoService { - pub(crate) fn new() -> CryptoService { - let crypto_types = { - let mut types = HashMap::<&'static str, Box>::new(); - types.insert(DEFAULT_CRYPTO_TYPE, Box::new(ED25519CryptoType::new())); - RwLock::new(types) - }; - - CryptoService { crypto_types } - } - - pub(crate) fn defualt_crypto_type() -> &'static str { - DEFAULT_CRYPTO_TYPE - } - - pub(crate) async fn create_key(&self, key_info: &KeyInfo) -> IndyResult { - trace!("create_key > key_info {:?}", secret!(key_info)); - - let crypto_type_name = key_info - .crypto_type - .as_deref() - .unwrap_or(DEFAULT_CRYPTO_TYPE); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!("KeyInfo contains unknown crypto: {}", crypto_type_name), - ) - })?; - - let seed = self.convert_seed(key_info.seed.as_ref().map(String::as_ref))?; - let (vk, sk) = crypto_type.create_key(seed.as_ref())?; - let mut vk = vk[..].to_base58(); - let sk = sk[..].to_base58(); - - if !crypto_type_name.eq(DEFAULT_CRYPTO_TYPE) { - // Use suffix with crypto type name to store crypto type inside of vk - vk = format!("{}:{}", vk, crypto_type_name); - } - - let key = Key::new(vk, sk); - - let res = Ok(key); - trace!("create_key < {:?}", res); - res - } - - pub(crate) async fn create_my_did(&self, my_did_info: &MyDidInfo) -> IndyResult<(Did, Key)> { - trace!("create_my_did > my_did_info {:?}", secret!(my_did_info)); - - let crypto_type_name = my_did_info - .crypto_type - .as_deref() - .unwrap_or(DEFAULT_CRYPTO_TYPE); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!("MyDidInfo contains unknown crypto: {}", crypto_type_name), - ) - })?; - - let seed = self.convert_seed(my_did_info.seed.as_ref().map(String::as_ref))?; - let (vk, sk) = crypto_type.create_key(seed.as_ref())?; - let did = match my_did_info.did { - Some(ref did) => did.clone(), - _ if my_did_info.cid == Some(true) => DidValue::new( - &vk[..].to_vec().to_base58(), - my_did_info.ledger_type.as_deref(), - my_did_info - .method_name - .as_ref() - .map(|method| method.0.as_str()), - )?, - _ => DidValue::new( - &vk[0..16].to_vec().to_base58(), - my_did_info.ledger_type.as_deref(), - my_did_info - .method_name - .as_ref() - .map(|method| method.0.as_str()), - )?, - }; - - let mut vk = vk[..].to_base58(); - let sk = sk[..].to_base58(); - - if !crypto_type_name.eq(DEFAULT_CRYPTO_TYPE) { - // Use suffix with crypto type name to store crypto type inside of vk - vk = format!("{}:{}", vk, crypto_type_name); - } - - let did = (Did::new(did, vk.clone()), Key::new(vk, sk)); - - let res = Ok(did); - trace!("create_my_did < {:?}", res); - res - } - - pub(crate) async fn create_their_did( - &self, - their_did_info: &TheirDidInfo, - ) -> IndyResult { - trace!("create_their_did > their_did_info {:?}", their_did_info); - - // Check did is correct Base58 - self.validate_did(&their_did_info.did)?; - - let verkey = build_full_verkey( - &their_did_info.did.to_unqualified().0, - their_did_info.verkey.as_deref(), - )?; - - self.validate_key(&verkey).await?; - - let did = TheirDid { - did: their_did_info.did.clone(), - verkey, - }; - - let res = Ok(did); - trace!("create_their_did < {:?}", res); - res - } - - pub(crate) async fn sign(&self, my_key: &Key, doc: &[u8]) -> IndyResult> { - trace!("sign > my_key {:?} doc {:?}", my_key, doc); - - let crypto_type_name = verkey_get_cryptoname(&my_key.verkey); - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to sign message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_sk = ed25519_sign::SecretKey::from_slice( - my_key.signkey.as_str().decode_base58()?.as_slice(), - )?; - - let signature = crypto_type.sign(&my_sk, doc)?[..].to_vec(); - - let res = Ok(signature); - trace!("sign < {:?}", res); - res - } - - pub(crate) async fn verify( - &self, - their_vk: &str, - msg: &[u8], - signature: &[u8], - ) -> IndyResult { - trace!( - "verify > their_vk {:?} msg {:?} signature {:?}", - their_vk, - msg, - signature - ); - - let (their_vk, crypto_type_name) = split_verkey(their_vk); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to verify message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let their_vk = ed25519_sign::PublicKey::from_slice(&their_vk.decode_base58()?)?; - let signature = ed25519_sign::Signature::from_slice(signature)?; - - let valid = crypto_type.verify(&their_vk, msg, &signature)?; - - let res = Ok(valid); - trace!("verify < {:?}", res); - res - } - - pub(crate) async fn crypto_box( - &self, - my_key: &Key, - their_vk: &str, - doc: &[u8], - ) -> IndyResult<(Vec, Vec)> { - trace!( - "crypto_box > my_key {:?} their_vk {:?} doc {:?}", - my_key, - their_vk, - doc - ); - - let crypto_type_name = verkey_get_cryptoname(&my_key.verkey); - - let (their_vk, their_crypto_type_name) = split_verkey(their_vk); - - if !crypto_type_name.eq(their_crypto_type_name) { - // TODO: FIXME: Use dedicated error code - return Err(err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "My key crypto type is incompatible with their key crypto type: {} {}", - crypto_type_name, their_crypto_type_name - ), - )); - } - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to crypto_box message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_sk = ed25519_sign::SecretKey::from_slice( - my_key.signkey.as_str().decode_base58()?.as_slice(), - )?; - - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; - let nonce = crypto_type.gen_nonce(); - - let encrypted_doc = crypto_type.crypto_box(&my_sk, &their_vk, doc, &nonce)?; - let nonce = nonce[..].to_vec(); - - let res = Ok((encrypted_doc, nonce)); - trace!("crypto_box < {:?}", res); - res - } - - pub(crate) async fn crypto_box_open( - &self, - my_key: &Key, - their_vk: &str, - doc: &[u8], - nonce: &[u8], - ) -> IndyResult> { - trace!( - "crypto_box_open > my_key {:?} their_vk {:?} doc {:?} nonce {:?}", - my_key, - their_vk, - doc, - nonce - ); - - let crypto_type_name = verkey_get_cryptoname(&my_key.verkey); - let (their_vk, their_crypto_type_name) = split_verkey(their_vk); - - if !crypto_type_name.eq(their_crypto_type_name) { - // TODO: FIXME: Use dedicated error code - return Err(err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "My key crypto type is incompatible with their key crypto type: {} {}", - crypto_type_name, their_crypto_type_name - ), - )); - } - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to crypto_box_open message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_sk = - ed25519_sign::SecretKey::from_slice(my_key.signkey.decode_base58()?.as_slice())?; - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; - let nonce = ed25519_box::Nonce::from_slice(nonce)?; - - let decrypted_doc = crypto_type.crypto_box_open(&my_sk, &their_vk, doc, &nonce)?; - - let res = Ok(decrypted_doc); - trace!("crypto_box_open < {:?}", res); - res - } - - pub(crate) async fn crypto_box_seal(&self, their_vk: &str, doc: &[u8]) -> IndyResult> { - trace!("crypto_box_seal > their_vk {:?} doc {:?}", their_vk, doc); - - let (their_vk, crypto_type_name) = split_verkey(their_vk); - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to encrypt sealed message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; - let encrypted_doc = crypto_type.crypto_box_seal(&their_vk, doc)?; - - let res = Ok(encrypted_doc); - trace!("crypto_box_seal < {:?}", res); - res - } - - pub(crate) async fn crypto_box_seal_open( - &self, - my_key: &Key, - doc: &[u8], - ) -> IndyResult> { - trace!("crypto_box_seal_open > my_key {:?} doc {:?}", my_key, doc); - - let (my_vk, crypto_type_name) = split_verkey(&my_key.verkey); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to crypto_box_open sealed message with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - let my_vk = ed25519_sign::PublicKey::from_slice(my_vk.decode_base58()?.as_slice())?; - - let my_sk = ed25519_sign::SecretKey::from_slice( - my_key.signkey.as_str().decode_base58()?.as_slice(), - )?; - - let decrypted_doc = crypto_type.crypto_box_seal_open(&my_vk, &my_sk, doc)?; - - let res = Ok(decrypted_doc); - trace!("crypto_box_seal_open < {:?}", res); - res - } - - pub(crate) fn convert_seed( - &self, - seed: Option<&str>, - ) -> IndyResult> { - trace!("convert_seed > seed {:?}", secret!(seed)); - - if seed.is_none() { - trace!("convert_seed <<< res: None"); - return Ok(None); - } - - let seed = seed.unwrap(); - - let bytes = if seed.as_bytes().len() == ed25519_sign::SEEDBYTES { - // is acceptable seed length - seed.as_bytes().to_vec() - } else if seed.ends_with('=') { - // is base64 string - let decoded = base64::decode(seed).to_indy( - IndyErrorKind::InvalidStructure, - "Can't deserialize Seed from Base64 string", - )?; - if decoded.len() == ed25519_sign::SEEDBYTES { - decoded - } else { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Trying to use invalid base64 encoded `seed`. The number of bytes must be \ - {} ", - ed25519_sign::SEEDBYTES - ), - )); - } - } else if seed.as_bytes().len() == ed25519_sign::SEEDBYTES * 2 { - // is hex string - Vec::from_hex(seed).to_indy(IndyErrorKind::InvalidStructure, "Seed is invalid hex")? - } else { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Trying to use invalid `seed`. It can be either {} bytes string or base64 \ - string or {} bytes HEX string", - ed25519_sign::SEEDBYTES, - ed25519_sign::SEEDBYTES * 2 - ), - )); - }; - - let seed = ed25519_sign::Seed::from_slice(bytes.as_slice())?; - - let res = Ok(Some(seed)); - trace!("convert_seed < {:?}", secret!(&res)); - res - } - - pub(crate) async fn validate_key(&self, vk: &str) -> IndyResult<()> { - trace!("validate_key > vk {:?}", vk); - - let (vk, crypto_type_name) = split_verkey(vk); - - let crypto_types = self.crypto_types.read().await; - - let crypto_type = crypto_types.get(crypto_type_name).ok_or_else(|| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!( - "Trying to use key with unknown crypto: {}", - crypto_type_name - ), - ) - })?; - - if let Some(vk) = vk.strip_prefix('~') { - let _ = vk.decode_base58()?; // TODO: proper validate abbreviated verkey - } else { - let vk = ed25519_sign::PublicKey::from_slice(vk.decode_base58()?.as_slice())?; - crypto_type.validate_key(&vk)?; - }; - - let res = Ok(()); - trace!("validate_key < {:?}", res); - res - } - - pub(crate) fn validate_did(&self, did: &DidValue) -> IndyResult<()> { - trace!("validate_did > did {:?}", did); - // Useful method, huh? - // Soon some state did validation will be put here - - let res = Ok(()); - trace!("validate_did < {:?}", res); - res - } - - pub(crate) fn encrypt_plaintext( - &self, - plaintext: Vec, - aad: &str, - cek: &chacha20poly1305_ietf::Key, - ) -> (String, String, String) { - //encrypt message with aad - let (ciphertext, iv, tag) = - gen_nonce_and_encrypt_detached(plaintext.as_slice(), aad.as_bytes(), cek); - - //base64 url encode data - let iv_encoded = base64::encode_urlsafe(&iv[..]); - let ciphertext_encoded = base64::encode_urlsafe(ciphertext.as_slice()); - let tag_encoded = base64::encode_urlsafe(&tag[..]); - - (ciphertext_encoded, iv_encoded, tag_encoded) - } - - /* ciphertext helper functions */ - pub(crate) fn decrypt_ciphertext( - &self, - ciphertext: &str, - aad: &str, - iv: &str, - tag: &str, - cek: &chacha20poly1305_ietf::Key, - ) -> Result { - //convert ciphertext to bytes - let ciphertext_as_vec = base64::decode_urlsafe(ciphertext).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decode ciphertext {}", err), - ) - })?; - - let ciphertext_as_bytes = ciphertext_as_vec.as_ref(); - - //convert IV from &str to &Nonce - let nonce_as_vec = base64::decode_urlsafe(iv).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decode IV {}", err), - ) - })?; - - let nonce_as_slice = nonce_as_vec.as_slice(); - - let nonce = chacha20poly1305_ietf::Nonce::from_slice(nonce_as_slice).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to convert IV to Nonce type {}", err), - ) - })?; - - //convert tag from &str to &Tag - let tag_as_vec = base64::decode_urlsafe(tag).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to decode tag {}", err), - ) - })?; - let tag_as_slice = tag_as_vec.as_slice(); - let tag = chacha20poly1305_ietf::Tag::from_slice(tag_as_slice).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to convert tag to Tag type {}", err), - ) - })?; - - //decrypt message - let plaintext_bytes = chacha20poly1305_ietf::decrypt_detached( - ciphertext_as_bytes, - cek, - &nonce, - &tag, - Some(aad.as_bytes()), - ) - .map_err(|err| { - err_msg( - IndyErrorKind::UnknownCrypto, - format!("Failed to decrypt ciphertext {}", err), - ) - })?; - - //convert message to readable (UTF-8) string - String::from_utf8(plaintext_bytes).map_err(|err| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Failed to convert message to UTF-8 {}", err), - ) - }) - } -} - -#[cfg(test)] -mod tests { - use indy_utils::crypto::chacha20poly1305_ietf::gen_key; - - use super::*; - use crate::domain::crypto::did::MyDidInfo; - - #[async_std::test] - async fn create_my_did_with_works_for_empty_info() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let my_did = service.create_my_did(&did_info).await; - assert!(my_did.is_ok()); - } - - #[async_std::test] - async fn create_my_did_works_for_passed_did() { - let service = CryptoService::new(); - - let did = DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()); - let did_info = MyDidInfo { - did: Some(did.clone()), - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (my_did, _) = service.create_my_did(&did_info).await.unwrap(); - assert_eq!(did, my_did.did); - } - - #[async_std::test] - async fn create_my_did_not_works_for_invalid_crypto_type() { - let service = CryptoService::new(); - - let did = DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()); - let crypto_type = Some("type".to_string()); - - let did_info = MyDidInfo { - did: Some(did), - cid: None, - seed: None, - crypto_type, - method_name: None, - ledger_type: None, - }; - - assert!(service.create_my_did(&did_info).await.is_err()); - } - - #[async_std::test] - async fn create_my_did_works_for_seed() { - let service = CryptoService::new(); - - let did = DidValue("NcYxiDXkpYi6ov5FcYDi1e".to_string()); - let seed = Some("00000000000000000000000000000My1".to_string()); - - let did_info_with_seed = MyDidInfo { - did: Some(did.clone()), - cid: None, - seed, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let did_info_without_seed = MyDidInfo { - did: Some(did.clone()), - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (did_with_seed, _) = service.create_my_did(&did_info_with_seed).await.unwrap(); - let (did_without_seed, _) = service.create_my_did(&did_info_without_seed).await.unwrap(); - - assert_ne!(did_with_seed.verkey, did_without_seed.verkey) - } - - #[async_std::test] - async fn create_their_did_works_without_verkey() { - let service = CryptoService::new(); - let did = DidValue("CnEDk9HrMnmiHXEV1WFgbVCRteYnPqsJwrTdcZaNhFVW".to_string()); - - let their_did_info = TheirDidInfo::new(did.clone(), None); - let their_did = service.create_their_did(&their_did_info).await.unwrap(); - - assert_eq!(did, their_did.did); - assert_eq!(did.0, their_did.verkey); - } - - #[async_std::test] - async fn create_their_did_works_for_full_verkey() { - let service = CryptoService::new(); - let did = DidValue("8wZcEriaNLNKtteJvx7f8i".to_string()); - let verkey = "5L2HBnzbu6Auh2pkDRbFt5f4prvgE2LzknkuYLsKkacp"; - - let their_did_info = TheirDidInfo::new(did.clone(), Some(verkey.to_string())); - let their_did = service.create_their_did(&their_did_info).await.unwrap(); - - assert_eq!(did, their_did.did); - assert_eq!(verkey, their_did.verkey); - } - - #[async_std::test] - async fn create_their_did_works_for_abbreviated_verkey() { - let service = CryptoService::new(); - let did = DidValue("8wZcEriaNLNKtteJvx7f8i".to_string()); - - let their_did_info = - TheirDidInfo::new(did.clone(), Some("~NcYxiDXkpYi6ov5FcYDi1e".to_string())); - - let their_did = service.create_their_did(&their_did_info).await.unwrap(); - - assert_eq!(did, their_did.did); - - assert_eq!( - "5L2HBnzbu6Auh2pkDRbFt5f4prvgE2LzknkuYLsKkacp", - their_did.verkey - ); - } - - #[async_std::test] - async fn sign_works() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let message = r#"message"#; - let (_, my_key) = service.create_my_did(&did_info).await.unwrap(); - let sig = service.sign(&my_key, message.as_bytes()).await; - - assert!(sig.is_ok()); - } - - #[async_std::test] - async fn sign_works_for_invalid_signkey() { - let service = CryptoService::new(); - let message = r#"message"#; - let my_key = Key::new( - "8wZcEriaNLNKtteJvx7f8i".to_string(), - "5L2HBnzbu6Auh2pkDRbFt5f4prvgE2LzknkuYLsKkacp".to_string(), - ); - assert!(service.sign(&my_key, message.as_bytes()).await.is_err()); - } - - #[async_std::test] - async fn sign_verify_works() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let message = r#"message"#; - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - - let valid = service - .verify(&my_did.verkey, message.as_bytes(), &signature) - .await - .unwrap(); - - assert!(valid); - } - - #[async_std::test] - async fn sign_verify_works_for_verkey_contained_crypto_type() { - let service = CryptoService::new(); - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let message = r#"message"#; - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - let verkey = my_did.verkey + ":ed25519"; - let valid = service - .verify(&verkey, message.as_bytes(), &signature) - .await - .unwrap(); - assert!(valid); - } - - #[async_std::test] - async fn sign_verify_works_for_verkey_contained_invalid_crypto_type() { - let service = CryptoService::new(); - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let message = r#"message"#; - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - let verkey = format!("crypto_type:{}", my_did.verkey); - - assert!(service - .verify(&verkey, message.as_bytes(), &signature) - .await - .is_err()); - } - - #[async_std::test] - async fn verify_not_works_for_invalid_verkey() { - let service = CryptoService::new(); - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let message = r#"message"#; - let (_, my_key) = service.create_my_did(&did_info).await.unwrap(); - let signature = service.sign(&my_key, message.as_bytes()).await.unwrap(); - let verkey = "AnnxV4t3LUHKZaxVQDWoVaG44NrGmeDYMA4Gz6C2tCZd"; - - let valid = service - .verify(verkey, message.as_bytes(), &signature) - .await - .unwrap(); - - assert!(!valid); - } - - #[async_std::test] - async fn crypto_box_works() { - let service = CryptoService::new(); - let msg = "some message"; - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (_, my_key) = service.create_my_did(&did_info).await.unwrap(); - let (their_did, _) = service.create_my_did(&did_info.clone()).await.unwrap(); - let their_did = Did::new(their_did.did, their_did.verkey); - - let encrypted_message = service - .crypto_box(&my_key, &their_did.verkey, msg.as_bytes()) - .await; - - assert!(encrypted_message.is_ok()); - } - - #[async_std::test] - async fn crypto_box_and_crypto_box_open_works() { - let service = CryptoService::new(); - - let msg = "some message"; - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - - let my_key_for_encrypt = my_key.clone(); - - let their_did_for_decrypt = Did::new(my_did.did, my_did.verkey); - - let (their_did, their_key) = service.create_my_did(&did_info.clone()).await.unwrap(); - - let my_key_for_decrypt = their_key.clone(); - - let their_did_for_encrypt = Did::new(their_did.did, their_did.verkey); - - let (encrypted_message, noce) = service - .crypto_box( - &my_key_for_encrypt, - &their_did_for_encrypt.verkey, - msg.as_bytes(), - ) - .await - .unwrap(); - - let decrypted_message = service - .crypto_box_open( - &my_key_for_decrypt, - &their_did_for_decrypt.verkey, - &encrypted_message, - &noce, - ) - .await - .unwrap(); - - assert_eq!(msg.as_bytes().to_vec(), decrypted_message); - } - - #[async_std::test] - async fn crypto_box_and_crypto_box_open_works_for_verkey_contained_crypto_type() { - let service = CryptoService::new(); - - let msg = "some message"; - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (my_did, my_key) = service.create_my_did(&did_info).await.unwrap(); - - let my_key_for_encrypt = my_key.clone(); - - let their_did_for_decrypt = Did::new(my_did.did, my_did.verkey); - - let (their_did, their_key) = service.create_my_did(&did_info.clone()).await.unwrap(); - let my_key_for_decrypt = their_key.clone(); - - let their_did_for_encrypt = Did::new(their_did.did, their_did.verkey); - - let (encrypted_message, noce) = service - .crypto_box( - &my_key_for_encrypt, - &their_did_for_encrypt.verkey, - msg.as_bytes(), - ) - .await - .unwrap(); - - let verkey = their_did_for_decrypt.verkey + ":ed25519"; - - let decrypted_message = service - .crypto_box_open(&my_key_for_decrypt, &verkey, &encrypted_message, &noce) - .await - .unwrap(); - - assert_eq!(msg.as_bytes().to_vec(), decrypted_message); - } - - #[async_std::test] - async fn crypto_box_seal_works() { - let service = CryptoService::new(); - let msg = "some message"; - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - let (did, _) = service.create_my_did(&did_info.clone()).await.unwrap(); - let did = Did::new(did.did, did.verkey); - let encrypted_message = service.crypto_box_seal(&did.verkey, msg.as_bytes()).await; - assert!(encrypted_message.is_ok()); - } - - #[async_std::test] - async fn crypto_box_seal_and_crypto_box_seal_open_works() { - let service = CryptoService::new(); - let msg = "some message".as_bytes(); - - let did_info = MyDidInfo { - did: None, - cid: None, - seed: None, - crypto_type: None, - method_name: None, - ledger_type: None, - }; - - let (did, key) = service.create_my_did(&did_info.clone()).await.unwrap(); - let encrypt_did = Did::new(did.did.clone(), did.verkey.clone()); - - let encrypted_message = service - .crypto_box_seal(&encrypt_did.verkey, msg) - .await - .unwrap(); - - let decrypted_message = service - .crypto_box_seal_open(&key, &encrypted_message) - .await - .unwrap(); - - assert_eq!(msg, decrypted_message.as_slice()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_works() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext.clone(), aad, &cek); - - let expected_plaintext = service - .decrypt_ciphertext(&expected_ciphertext, aad, &iv_encoded, &tag, &cek) - .unwrap(); - - assert_eq!(expected_plaintext.as_bytes().to_vec(), plaintext); - } - - #[async_std::test] - async fn test_encrypt_plaintext_decrypt_ciphertext_empty_string_works() { - let service: CryptoService = CryptoService::new(); - let plaintext = "".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext.clone(), aad, &cek); - - let expected_plaintext = service - .decrypt_ciphertext(&expected_ciphertext, aad, &iv_encoded, &tag, &cek) - .unwrap(); - - assert_eq!(expected_plaintext.as_bytes().to_vec(), plaintext); - } - - #[async_std::test] - async fn test_encrypt_plaintext_decrypt_ciphertext_bad_iv_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, _, tag) = service.encrypt_plaintext(plaintext, aad, &cek); - - //convert values to base64 encoded strings - let bad_iv_input = "invalid_iv"; - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, bad_iv_input, &tag, aad, &cek); - - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_decrypt_ciphertext_bad_ciphertext_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (_, iv_encoded, tag) = service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_ciphertext = base64::encode_urlsafe("bad_ciphertext".as_bytes()); - - let expected_error = - service.decrypt_ciphertext(&bad_ciphertext, &iv_encoded, &tag, aad, &cek); - - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_wrong_cek_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = chacha20poly1305_ietf::gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_cek = gen_key(); - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, &iv_encoded, &tag, aad, &bad_cek); - - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_bad_tag_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, _) = service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_tag = "bad_tag".to_string(); - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, &iv_encoded, &bad_tag, aad, &cek); - assert!(expected_error.is_err()); - } - - #[async_std::test] - async fn test_encrypt_plaintext_and_decrypt_ciphertext_bad_aad_fails() { - let service: CryptoService = CryptoService::new(); - let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and - // alg Which the receiver MUST then check before decryption - let aad = "some protocol data input to the encryption"; - let cek = gen_key(); - - let (expected_ciphertext, iv_encoded, tag) = - service.encrypt_plaintext(plaintext, aad, &cek); - - let bad_aad = "bad aad"; - - let expected_error = - service.decrypt_ciphertext(&expected_ciphertext, &iv_encoded, &tag, bad_aad, &cek); - assert!(expected_error.is_err()); - } -} diff --git a/aries/misc/legacy/libvdrtools/src/services/mod.rs b/aries/misc/legacy/libvdrtools/src/services/mod.rs deleted file mode 100644 index 83962a3493..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod crypto; -mod wallet; - -pub use crypto::CryptoService; -pub(crate) use wallet::WalletService; diff --git a/aries/misc/legacy/libvdrtools/src/services/wallet.rs b/aries/misc/legacy/libvdrtools/src/services/wallet.rs deleted file mode 100644 index 6505049d65..0000000000 --- a/aries/misc/legacy/libvdrtools/src/services/wallet.rs +++ /dev/null @@ -1 +0,0 @@ -pub(crate) use indy_wallet::WalletService; diff --git a/aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs b/aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs deleted file mode 100644 index 1f4948543a..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/crypto/base58.rs +++ /dev/null @@ -1,26 +0,0 @@ -use bs58::{decode, decode::Result, encode}; - -pub trait ToBase58 { - fn to_base58(&self) -> String; -} -pub trait DecodeBase58 { - fn decode_base58(self) -> Result>; -} - -impl ToBase58 for [u8] { - fn to_base58(&self) -> String { - encode(self).into_string() - } -} - -impl DecodeBase58 for &[u8] { - fn decode_base58(self) -> Result> { - decode(self).into_vec() - } -} - -impl DecodeBase58 for &str { - fn decode_base58(self) -> Result> { - decode(self.as_bytes()).into_vec() - } -} diff --git a/aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs b/aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs deleted file mode 100644 index df7b29719e..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/crypto/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod base58; -pub mod verkey_builder; diff --git a/aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs b/aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs deleted file mode 100644 index b0b6fb2126..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/crypto/verkey_builder.rs +++ /dev/null @@ -1,132 +0,0 @@ -use indy_api_types::errors::prelude::*; - -use crate::{ - services::CryptoService, - utils::crypto::base58::{DecodeBase58, ToBase58}, -}; - -pub fn build_full_verkey(dest: &str, verkey: Option<&str>) -> Result { - if let Some(verkey) = verkey { - let (verkey, crypto_type) = if verkey.contains(':') { - let splits: Vec<&str> = verkey.split(':').collect(); - (splits[0], Some(splits[1])) - } else { - (verkey, None) - }; - - let verkey = if let Some(verkey) = verkey.strip_prefix('~') { - let mut result = dest.decode_base58()?; - let mut end = verkey.decode_base58()?; - result.append(&mut end); - result.to_base58() - } else { - verkey.to_owned() - }; - - let verkey = if let Some(crypto_type) = crypto_type { - format!("{}:{}", verkey, crypto_type) - } else { - verkey - }; - - Ok(verkey) - } else { - // Cryptonym - Ok(dest.to_owned()) - } -} - -pub fn split_verkey(verkey: &str) -> (&str, &str) { - let position = verkey.find(':'); - match position { - Some(p) => { - let cryptoname = if p + 1 < verkey.len() { - verkey[p + 1..].as_ref() - } else { - CryptoService::defualt_crypto_type() - }; - let v = if p > 0 { verkey[..p].as_ref() } else { "" }; - (v, cryptoname) - } - None => (verkey, CryptoService::defualt_crypto_type()), - } -} - -pub fn verkey_get_cryptoname(verkey: &str) -> &str { - let position = verkey.find(':'); - match position { - Some(p) => { - if p + 1 < verkey.len() { - verkey[p + 1..].as_ref() - } else { - CryptoService::defualt_crypto_type() - } - } - None => CryptoService::defualt_crypto_type(), - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn split_verkey_empty() { - assert_eq!(split_verkey(""), ("", CryptoService::defualt_crypto_type())) - } - - #[test] - fn split_verkey_single_colon() { - assert_eq!( - split_verkey(":"), - ("", CryptoService::defualt_crypto_type()) - ) - } - - #[test] - fn split_verkey_ends_with_colon() { - assert_eq!( - split_verkey("foo:"), - ("foo", CryptoService::defualt_crypto_type()) - ) - } - - #[test] - fn split_verkey_starts_with_colon() { - assert_eq!(split_verkey(":bar"), ("", "bar")) - } - - #[test] - fn split_verkey_works() { - assert_eq!(split_verkey("foo:bar:baz"), ("foo", "bar:baz")) - } - - #[test] - fn verkey_get_cryptoname_empty() { - assert_eq!( - verkey_get_cryptoname(""), - CryptoService::defualt_crypto_type() - ) - } - - #[test] - fn verkey_get_cryptoname_single_colon() { - assert_eq!( - verkey_get_cryptoname(":"), - CryptoService::defualt_crypto_type() - ) - } - - #[test] - fn verkey_get_cryptoname_ends_with_colon() { - assert_eq!( - verkey_get_cryptoname("foo:"), - CryptoService::defualt_crypto_type() - ) - } - - #[test] - fn verkey_get_cryptoname_works() { - assert_eq!(verkey_get_cryptoname("foo:bar"), "bar") - } -} diff --git a/aries/misc/legacy/libvdrtools/src/utils/mod.rs b/aries/misc/legacy/libvdrtools/src/utils/mod.rs deleted file mode 100755 index c5c38aba13..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub mod crypto; -pub use indy_utils::wql; -#[macro_use] -pub mod qualifier; diff --git a/aries/misc/legacy/libvdrtools/src/utils/qualifier.rs b/aries/misc/legacy/libvdrtools/src/utils/qualifier.rs deleted file mode 100644 index 6da00b58b8..0000000000 --- a/aries/misc/legacy/libvdrtools/src/utils/qualifier.rs +++ /dev/null @@ -1,94 +0,0 @@ -use lazy_static::lazy_static; -use regex::Regex; - -lazy_static! { - pub static ref REGEX: Regex = - Regex::new("^[a-z0-9]+(:(indy|cheqd))?(:[a-z0-9:]+)?:(.*)$").unwrap(); -} - -pub fn qualify(entity: &str, prefix: &str, method: &str) -> String { - format!("{}:{}:{}", prefix, method, entity) -} - -pub fn qualify_with_ledger(entity: &str, prefix: &str, method: &str, ledger_type: &str) -> String { - format!("{}:{}:{}:{}", prefix, method, ledger_type, entity) -} - -pub fn to_unqualified(entity: &str) -> String { - trace!("qualifier::to_unqualified >> {}", entity); - match REGEX.captures(entity) { - None => entity.to_string(), - Some(caps) => { - trace!("qualifier::to_unqualified: parts {:?}", caps); - caps.get(4) - .map(|m| m.as_str().to_string()) - .unwrap_or(entity.to_string()) - } - } -} - -pub fn method(entity: &str) -> Option { - match REGEX.captures(entity) { - None => None, - Some(caps) => { - trace!("qualifier::method: caps {:?}", caps); - match (caps.get(2), caps.get(3)) { - (Some(type_), Some(subnet)) => Some(type_.as_str().to_owned() + subnet.as_str()), - (Some(type_), None) => Some(type_.as_str().to_owned()), - _ => { - warn!( - "Unrecognized FQ method for {}, parsed items are (where 2nd is method \ - type, and 3rd is sub-method (namespace, ledger, type, etc){:?}", - entity, caps - ); - None - } - } - } - } -} - -pub fn is_fully_qualified(entity: &str) -> bool { - REGEX.is_match(entity) -} - -macro_rules! qualifiable_type (($newtype:ident) => ( - - #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] - pub struct $newtype(pub String); - - impl $newtype { - - #[allow(dead_code)] - pub fn get_method(&self) -> Option { - qualifier::method(&self.0) - } - - #[allow(dead_code)] - pub fn set_method(&self, method: &str) -> $newtype { - $newtype(qualifier::qualify(&self.0, $newtype::PREFIX, &method)) - } - - #[allow(dead_code)] - pub fn set_ledger_and_method(&self, ledger_type: &str, method: &str) -> $newtype { - $newtype(qualifier::qualify_with_ledger(&self.0, $newtype::PREFIX, method, ledger_type)) - } - - #[allow(dead_code)] - pub fn is_fully_qualified(&self) -> bool { - self.0.contains($newtype::PREFIX) && qualifier::is_fully_qualified(&self.0) - } - } - - impl From<&str> for $newtype { - fn from(value: &str) -> Self { - Self(value.to_owned()) - } - } - - impl From<&String> for $newtype { - fn from(value: &String) -> Self { - Self(value.clone()) - } - } -)); From cbb2117515aad92e5ac5c02d2d5280f86fc7eb06 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 08:56:28 +1000 Subject: [PATCH 06/15] remove credx Signed-off-by: gmulhearn --- Cargo.lock | 42 +- Cargo.toml | 1 - aries/agents/aries-vcx-agent/Cargo.toml | 2 +- .../aries-vcx-agent/src/agent/agent_struct.rs | 6 +- .../agents/aries-vcx-agent/src/agent/init.rs | 6 +- .../src/handlers/credential_definition.rs | 6 +- .../aries-vcx-agent/src/handlers/holder.rs | 6 +- .../aries-vcx-agent/src/handlers/issuer.rs | 6 +- .../aries-vcx-agent/src/handlers/prover.rs | 6 +- .../src/handlers/revocation_registry.rs | 6 +- .../aries-vcx-agent/src/handlers/schema.rs | 6 +- .../aries-vcx-agent/src/handlers/verifier.rs | 6 +- aries/aries_vcx/Cargo.toml | 6 - aries/aries_vcx/src/common/keys.rs | 2 +- aries/aries_vcx/tests/test_pool.rs | 3 - aries/aries_vcx_anoncreds/Cargo.toml | 2 - .../src/anoncreds/anoncreds/mod.rs | 2 +- .../src/anoncreds/credx_anoncreds/mod.rs | 1349 ----------------- .../credx_anoncreds/type_conversion.rs | 513 ------- .../aries_vcx_anoncreds/src/anoncreds/mod.rs | 3 +- .../src/errors/mapping_credx.rs | 35 - aries/aries_vcx_anoncreds/src/errors/mod.rs | 2 - aries/misc/test_utils/Cargo.toml | 3 +- aries/misc/test_utils/src/devsetup.rs | 10 +- .../wrappers/uniffi-aries-vcx/core/Cargo.toml | 3 +- .../core/src/core/profile/askar.rs | 11 +- .../core/src/core/profile/mod.rs | 4 +- 27 files changed, 44 insertions(+), 2003 deletions(-) delete mode 100644 aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/mod.rs delete mode 100644 aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/type_conversion.rs delete mode 100644 aries/aries_vcx_anoncreds/src/errors/mapping_credx.rs diff --git a/Cargo.lock b/Cargo.lock index 48049a63ce..708de0a90e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -588,7 +588,6 @@ dependencies = [ "async-trait", "bitvec", "did_parser_nom", - "indy-credx", "log", "serde", "serde_json", @@ -2869,45 +2868,6 @@ dependencies = [ "sha3", ] -[[package]] -name = "indy-credx" -version = "1.1.0" -source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.1.0#0260b93f76573613cedb486bc8836c75c47d4cf4" -dependencies = [ - "env_logger 0.10.2", - "ffi-support", - "indy-data-types 0.7.0", - "log", - "once_cell", - "rand", - "regex", - "serde", - "serde_json", - "sha2", - "zeroize", -] - -[[package]] -name = "indy-data-types" -version = "0.7.0" -source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.1.0#0260b93f76573613cedb486bc8836c75c47d4cf4" -dependencies = [ - "anoncreds-clsignatures 0.2.4", - "bs58", - "curve25519-dalek", - "ed25519-dalek", - "hex", - "once_cell", - "rand", - "regex", - "serde", - "serde_json", - "sha2", - "thiserror", - "x25519-dalek", - "zeroize", -] - [[package]] name = "indy-data-types" version = "0.7.1" @@ -2957,7 +2917,7 @@ dependencies = [ "futures-util", "hex", "indy-blssignatures", - "indy-data-types 0.7.1", + "indy-data-types", "log", "once_cell", "percent-encoding", diff --git a/Cargo.toml b/Cargo.toml index 715a48e0e6..8a22790358 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -77,7 +77,6 @@ indy-vdr = { git = "https://github.com/hyperledger/indy-vdr.git", tag = "v0.4.3" "log", ] } indy-vdr-proxy-client = { git = "https://github.com/hyperledger/indy-vdr.git", tag = "v0.4.3" } -indy-credx = { git = "https://github.com/hyperledger/indy-shared-rs", tag = "v1.1.0" } anoncreds = { git = "https://github.com/hyperledger/anoncreds-rs.git", tag = "v0.2.0" } aries-askar = { version = "0.3.1" } askar-crypto = { version = "0.3.1", default-features = false } diff --git a/aries/agents/aries-vcx-agent/Cargo.toml b/aries/agents/aries-vcx-agent/Cargo.toml index ffa46fd673..7921d4fc65 100644 --- a/aries/agents/aries-vcx-agent/Cargo.toml +++ b/aries/agents/aries-vcx-agent/Cargo.toml @@ -11,7 +11,7 @@ aries_vcx = { path = "../../aries_vcx" } aries_vcx_wallet = { path = "../../aries_vcx_wallet", features = [ "askar_wallet", ] } -aries_vcx_anoncreds = { path = "../../aries_vcx_anoncreds", features = ["credx"] } +aries_vcx_anoncreds = { path = "../../aries_vcx_anoncreds", features = ["anoncreds"] } aries_vcx_ledger = { path = "../../aries_vcx_ledger"} anoncreds_types = { path = "../../misc/anoncreds_types" } shared = { path = "../../misc/shared" } diff --git a/aries/agents/aries-vcx-agent/src/agent/agent_struct.rs b/aries/agents/aries-vcx-agent/src/agent/agent_struct.rs index b7e26b5307..3e221115b8 100644 --- a/aries/agents/aries-vcx-agent/src/agent/agent_struct.rs +++ b/aries/agents/aries-vcx-agent/src/agent/agent_struct.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::indy_vdr_ledger::{DefaultIndyLedgerRead, DefaultIndyLedgerWrite}; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; @@ -16,7 +16,7 @@ pub struct Agent { pub(super) issuer_did: String, pub(super) ledger_read: Arc, pub(super) ledger_write: Arc, - pub(super) anoncreds: IndyCredxAnonCreds, + pub(super) anoncreds: Anoncreds, pub(super) wallet: Arc, pub(super) connections: Arc>, pub(super) schemas: Arc>, @@ -64,7 +64,7 @@ impl Agent { &self.ledger_write } - pub fn anoncreds(&self) -> &IndyCredxAnonCreds { + pub fn anoncreds(&self) -> &Anoncreds { &self.anoncreds } diff --git a/aries/agents/aries-vcx-agent/src/agent/init.rs b/aries/agents/aries-vcx-agent/src/agent/init.rs index ec6589881a..ecd39ca56a 100644 --- a/aries/agents/aries-vcx-agent/src/agent/init.rs +++ b/aries/agents/aries-vcx-agent/src/agent/init.rs @@ -11,7 +11,7 @@ use aries_vcx::{ }; use aries_vcx_anoncreds::{ self, - anoncreds::{base_anoncreds::BaseAnonCreds, credx_anoncreds::IndyCredxAnonCreds}, + anoncreds::{anoncreds::Anoncreds, base_anoncreds::BaseAnonCreds}, errors::error::VcxAnoncredsError, }; use aries_vcx_ledger::ledger::indy_vdr_ledger::{ @@ -62,7 +62,7 @@ pub async fn build_askar_wallet( let wallet = config_wallet.create_wallet().await.unwrap(); let config_issuer = wallet.configure_issuer(&issuer_seed).await.unwrap(); - let anoncreds = IndyCredxAnonCreds; + let anoncreds = Anoncreds; if let Err(err) = anoncreds .prover_create_link_secret(&wallet, &DEFAULT_LINK_SECRET_ALIAS.to_string()) @@ -123,7 +123,7 @@ impl Agent { genesis_file_path: genesis_path, }; - let anoncreds = IndyCredxAnonCreds; + let anoncreds = Anoncreds; let (ledger_read, ledger_write) = build_ledger_components(vcx_pool_config.clone()).unwrap(); let ledger_read = Arc::new(ledger_read); diff --git a/aries/agents/aries-vcx-agent/src/handlers/credential_definition.rs b/aries/agents/aries-vcx-agent/src/handlers/credential_definition.rs index 4a1406c411..1ef372f0bf 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/credential_definition.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/credential_definition.rs @@ -2,7 +2,7 @@ use std::sync::{Arc, Mutex}; use anoncreds_types::data_types::identifiers::schema_id::SchemaId; use aries_vcx::{common::primitives::credential_definition::CredentialDef, did_parser_nom::Did}; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::indy_vdr_ledger::{DefaultIndyLedgerRead, DefaultIndyLedgerWrite}; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; @@ -14,7 +14,7 @@ use crate::{ pub struct ServiceCredentialDefinitions { ledger_read: Arc, ledger_write: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, cred_defs: AgentStorageInMem, } @@ -23,7 +23,7 @@ impl ServiceCredentialDefinitions { pub fn new( ledger_read: Arc, ledger_write: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, ) -> Self { Self { diff --git a/aries/agents/aries-vcx-agent/src/handlers/holder.rs b/aries/agents/aries-vcx-agent/src/handlers/holder.rs index 6ed60fea95..4c9b1dc929 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/holder.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/holder.rs @@ -12,7 +12,7 @@ use aries_vcx::{ }, protocols::{issuance::holder::state_machine::HolderState, SendClosure}, }; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::indy_vdr_ledger::DefaultIndyLedgerRead; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; @@ -40,7 +40,7 @@ impl HolderWrapper { pub struct ServiceCredentialsHolder { ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, creds_holder: AgentStorageInMem, service_connections: Arc>, @@ -49,7 +49,7 @@ pub struct ServiceCredentialsHolder { impl ServiceCredentialsHolder { pub fn new( ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, service_connections: Arc>, ) -> Self { diff --git a/aries/agents/aries-vcx-agent/src/handlers/issuer.rs b/aries/agents/aries-vcx-agent/src/handlers/issuer.rs index 2826363d43..50238216fa 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/issuer.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/issuer.rs @@ -11,7 +11,7 @@ use aries_vcx::{ }, protocols::{issuance::issuer::state_machine::IssuerState, SendClosure}, }; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; use crate::{ @@ -37,7 +37,7 @@ impl IssuerWrapper { } pub struct ServiceCredentialsIssuer { - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, creds_issuer: AgentStorageInMem, service_connections: Arc>, @@ -45,7 +45,7 @@ pub struct ServiceCredentialsIssuer { impl ServiceCredentialsIssuer { pub fn new( - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, service_connections: Arc>, ) -> Self { diff --git a/aries/agents/aries-vcx-agent/src/handlers/prover.rs b/aries/agents/aries-vcx-agent/src/handlers/prover.rs index 1d99a4d0d5..02914c06da 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/prover.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/prover.rs @@ -11,7 +11,7 @@ use aries_vcx::{ }, protocols::{proof_presentation::prover::state_machine::ProverState, SendClosure}, }; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::indy_vdr_ledger::DefaultIndyLedgerRead; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; use serde_json::Value; @@ -40,7 +40,7 @@ impl ProverWrapper { pub struct ServiceProver { ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, provers: AgentStorageInMem, service_connections: Arc>, @@ -49,7 +49,7 @@ pub struct ServiceProver { impl ServiceProver { pub fn new( ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, service_connections: Arc>, ) -> Self { diff --git a/aries/agents/aries-vcx-agent/src/handlers/revocation_registry.rs b/aries/agents/aries-vcx-agent/src/handlers/revocation_registry.rs index 415fc01c6b..3960db0bf0 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/revocation_registry.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/revocation_registry.rs @@ -5,7 +5,7 @@ use std::{ use anoncreds_types::data_types::identifiers::cred_def_id::CredentialDefinitionId; use aries_vcx::{common::primitives::revocation_registry::RevocationRegistry, did_parser_nom::Did}; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::indy_vdr_ledger::{DefaultIndyLedgerRead, DefaultIndyLedgerWrite}; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; @@ -17,7 +17,7 @@ use crate::{ pub struct ServiceRevocationRegistries { ledger_write: Arc, ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, issuer_did: Did, rev_regs: AgentStorageInMem, @@ -27,7 +27,7 @@ impl ServiceRevocationRegistries { pub fn new( ledger_write: Arc, ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, issuer_did: String, ) -> Self { diff --git a/aries/agents/aries-vcx-agent/src/handlers/schema.rs b/aries/agents/aries-vcx-agent/src/handlers/schema.rs index 6aaa6e7f89..3c5511a922 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/schema.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/schema.rs @@ -1,7 +1,7 @@ use std::sync::{Arc, Mutex}; use aries_vcx::{common::primitives::credential_schema::Schema, did_parser_nom::Did}; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::{ base_ledger::AnoncredsLedgerRead, indy_vdr_ledger::{DefaultIndyLedgerRead, DefaultIndyLedgerWrite}, @@ -16,7 +16,7 @@ use crate::{ pub struct ServiceSchemas { ledger_read: Arc, ledger_write: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, issuer_did: Did, schemas: AgentStorageInMem, @@ -26,7 +26,7 @@ impl ServiceSchemas { pub fn new( ledger_read: Arc, ledger_write: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, issuer_did: String, ) -> Self { diff --git a/aries/agents/aries-vcx-agent/src/handlers/verifier.rs b/aries/agents/aries-vcx-agent/src/handlers/verifier.rs index 9fb3a6d672..7fd262b1a2 100644 --- a/aries/agents/aries-vcx-agent/src/handlers/verifier.rs +++ b/aries/agents/aries-vcx-agent/src/handlers/verifier.rs @@ -16,7 +16,7 @@ use aries_vcx::{ SendClosure, }, }; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::indy_vdr_ledger::DefaultIndyLedgerRead; use aries_vcx_wallet::wallet::base_wallet::BaseWallet; @@ -44,7 +44,7 @@ impl VerifierWrapper { pub struct ServiceVerifier { ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, verifiers: AgentStorageInMem, service_connections: Arc>, @@ -53,7 +53,7 @@ pub struct ServiceVerifier { impl ServiceVerifier { pub fn new( ledger_read: Arc, - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet: Arc, service_connections: Arc>, ) -> Self { diff --git a/aries/aries_vcx/Cargo.toml b/aries/aries_vcx/Cargo.toml index 985fa750d9..12d6ffb1fd 100644 --- a/aries/aries_vcx/Cargo.toml +++ b/aries/aries_vcx/Cargo.toml @@ -11,16 +11,10 @@ path = "src/lib.rs" doctest = false [features] -credx = [ - "aries_vcx_anoncreds/credx", - "test_utils/askar_wallet", - "test_utils/credx" -] anoncreds = ["aries_vcx_anoncreds/anoncreds", "test_utils/anoncreds", "test_utils/askar_wallet"] vdr_proxy_ledger = [ "aries_vcx_wallet/askar_wallet", "test_utils/vdr_proxy_ledger", - "credx", ] backtrace_errors = ["backtrace"] diff --git a/aries/aries_vcx/src/common/keys.rs b/aries/aries_vcx/src/common/keys.rs index 9fd14a97f1..d96ce249de 100644 --- a/aries/aries_vcx/src/common/keys.rs +++ b/aries/aries_vcx/src/common/keys.rs @@ -111,7 +111,7 @@ pub async fn get_verkey_from_ledger( // // #[tokio::test] // #[ignore] -// #[cfg(all(not(feature = "vdr_proxy_ledger"), not(feature = "credx"),))] +// #[cfg(all(not(feature = "vdr_proxy_ledger")))] // async fn test_pool_rotate_verkey_fails() { // use super::*; // diff --git a/aries/aries_vcx/tests/test_pool.rs b/aries/aries_vcx/tests/test_pool.rs index e993567ae7..f6302820b3 100644 --- a/aries/aries_vcx/tests/test_pool.rs +++ b/aries/aries_vcx/tests/test_pool.rs @@ -427,9 +427,6 @@ async fn test_pool_rev_reg_def_fails_for_cred_def_created_without_revocation( ) .await; - #[cfg(feature = "credx")] - assert_eq!(rc.unwrap_err().kind(), AriesVcxErrorKind::InvalidState); - #[cfg(not(feature = "credx"))] assert_eq!(rc.unwrap_err().kind(), AriesVcxErrorKind::InvalidInput); Ok(()) } diff --git a/aries/aries_vcx_anoncreds/Cargo.toml b/aries/aries_vcx_anoncreds/Cargo.toml index b9a29f9030..4a8028f9b9 100644 --- a/aries/aries_vcx_anoncreds/Cargo.toml +++ b/aries/aries_vcx_anoncreds/Cargo.toml @@ -8,12 +8,10 @@ edition.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [features] -credx = ["dep:indy-credx"] anoncreds = ["dep:anoncreds"] legacy_proof = [] [dependencies] -indy-credx = { workspace = true, optional = true } anoncreds = { workspace = true, optional = true } aries_vcx_wallet = { path = "../aries_vcx_wallet" } anoncreds_types = { path = "../misc/anoncreds_types" } diff --git a/aries/aries_vcx_anoncreds/src/anoncreds/anoncreds/mod.rs b/aries/aries_vcx_anoncreds/src/anoncreds/anoncreds/mod.rs index 4e4b3aeeda..d32a9114cd 100644 --- a/aries/aries_vcx_anoncreds/src/anoncreds/anoncreds/mod.rs +++ b/aries/aries_vcx_anoncreds/src/anoncreds/anoncreds/mod.rs @@ -165,7 +165,7 @@ fn from_revocation_status_list_to_revocation_registry_delta( }) } -#[derive(Debug)] +#[derive(Debug, Copy, Clone)] pub struct Anoncreds; #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/mod.rs b/aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/mod.rs deleted file mode 100644 index afe0796c95..0000000000 --- a/aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/mod.rs +++ /dev/null @@ -1,1349 +0,0 @@ -mod type_conversion; - -use std::{ - borrow::Borrow, - collections::{HashMap, HashSet}, - path::Path, - sync::Arc, -}; - -use anoncreds_types::data_types::{ - identifiers::{ - cred_def_id::CredentialDefinitionId, rev_reg_def_id::RevocationRegistryDefinitionId, - schema_id::SchemaId, - }, - ledger::{ - cred_def::CredentialDefinition, - rev_reg::RevocationRegistry, - rev_reg_def::RevocationRegistryDefinition, - rev_reg_delta::RevocationRegistryDelta, - schema::{AttributeNames, Schema}, - }, - messages::{ - cred_definition_config::CredentialDefinitionConfig, - cred_offer::CredentialOffer, - cred_request::{CredentialRequest, CredentialRequestMetadata}, - cred_selection::{RetrievedCredentialInfo, RetrievedCredentials}, - credential::{Credential, CredentialValues}, - nonce::Nonce, - pres_request::PresentationRequest, - presentation::{Presentation, RequestedCredentials}, - revocation_state::CredentialRevocationState, - }, -}; -use aries_vcx_wallet::{ - errors::error::VcxWalletResult, - wallet::{ - base_wallet::{ - record::{AllRecords, Record}, - record_category::RecordCategory, - record_wallet::RecordWallet, - BaseWallet, - }, - record_tags::{RecordTag, RecordTags}, - }, -}; -use async_trait::async_trait; -use credx::{ - anoncreds_clsignatures::{bn::BigNumber, LinkSecret as ClLinkSecret}, - tails::{TailsFileReader, TailsFileWriter}, - types::{ - Credential as CredxCredential, CredentialDefinition as CredxCredentialDefinition, - CredentialDefinitionId as CredxCredentialDefinitionId, - CredentialOffer as CredxCredentialOffer, CredentialRequest as CredxCredentialRequest, - CredentialRequestMetadata as CredxCredentialRequestMetadata, CredentialRevocationConfig, - CredentialRevocationState as CredxCredentialRevocationState, - CredentialValues as CredxCredentialValues, IssuanceType, LinkSecret, PresentCredentials, - Presentation as CredxPresentation, PresentationRequest as CredxPresentationRequest, - RegistryType, RevocationRegistry as CredxRevocationRegistry, - RevocationRegistryDefinition as CredxRevocationRegistryDefinition, - RevocationRegistryDelta as CredxRevocationRegistryDelta, - RevocationRegistryId as CredxRevocationRegistryId, Schema as CredxSchema, - SchemaId as CredxSchemaId, - }, -}; -use did_parser_nom::Did; -use indy_credx as credx; -use log::warn; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use serde_json::{json, Value}; -use type_conversion::Convert; -use uuid::Uuid; - -use super::base_anoncreds::{ - BaseAnonCreds, CredentialDefinitionsMap, CredentialId, LinkSecretId, RevocationRegistriesMap, - RevocationRegistryDefinitionsMap, RevocationStatesMap, SchemasMap, -}; -use crate::{ - errors::error::{VcxAnoncredsError, VcxAnoncredsResult}, - utils::{constants::ATTRS, json::AsTypeOrDeserializationError}, -}; - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct RevocationRegistryInfo { - pub id: CredxRevocationRegistryId, - pub curr_id: u32, - pub used_ids: HashSet, -} - -/// Adapter used so that credx does not depend strictly on the vdrtools-wallet -/// Will get removed when the wallet and anoncreds interfaces are de-coupled. -#[derive(Debug)] -struct WalletAdapter(Arc); - -#[async_trait] -#[allow(dead_code)] -impl RecordWallet for WalletAdapter { - async fn all_records(&self) -> VcxWalletResult> { - self.0.all_records().await - } - - async fn add_record(&self, record: Record) -> VcxWalletResult<()> { - self.0.add_record(record).await - } - - async fn get_record(&self, category: RecordCategory, name: &str) -> VcxWalletResult { - self.0.get_record(category, name).await - } - - async fn update_record_tags( - &self, - category: RecordCategory, - name: &str, - new_tags: RecordTags, - ) -> VcxWalletResult<()> { - self.0.update_record_tags(category, name, new_tags).await - } - - async fn update_record_value( - &self, - category: RecordCategory, - name: &str, - new_value: &str, - ) -> VcxWalletResult<()> { - self.0.update_record_value(category, name, new_value).await - } - - async fn delete_record(&self, category: RecordCategory, name: &str) -> VcxWalletResult<()> { - self.0.delete_record(category, name).await - } - - async fn search_record( - &self, - category: RecordCategory, - search_filter: Option, - ) -> VcxWalletResult> { - self.0.search_record(category, search_filter).await - } -} - -#[derive(Debug, Copy, Clone)] -pub struct IndyCredxAnonCreds; - -impl IndyCredxAnonCreds { - async fn get_wallet_record_value( - wallet: &impl BaseWallet, - category: RecordCategory, - id: &str, - ) -> VcxAnoncredsResult - where - T: DeserializeOwned, - { - let str_record = wallet.get_record(category, id).await?; - Ok(serde_json::from_str(str_record.value())?) - } - - async fn get_link_secret( - wallet: &impl BaseWallet, - link_secret_id: &LinkSecretId, - ) -> VcxAnoncredsResult { - let record = wallet - .get_record(RecordCategory::LinkSecret, link_secret_id) - .await?; - - let ms_bn: BigNumber = BigNumber::from_dec(record.value()).map_err(|err| { - VcxAnoncredsError::UrsaError(format!( - "Failed to create BigNumber, UrsaErrorKind: {:?}", - err.kind() - )) - })?; - let ursa_ms: ClLinkSecret = serde_json::from_value(json!({ "ms": ms_bn }))?; - - Ok(LinkSecret { value: ursa_ms }) - } - - async fn _get_credential( - wallet: &impl BaseWallet, - credential_id: &str, - ) -> VcxAnoncredsResult { - let cred_record = wallet - .get_record(RecordCategory::Cred, credential_id) - .await?; - - let credential: CredxCredential = serde_json::from_str(cred_record.value())?; - - Ok(credential) - } - - async fn _get_credentials( - wallet: &impl BaseWallet, - wql: &str, - ) -> VcxAnoncredsResult> { - let records = wallet - .search_record(RecordCategory::Cred, Some(wql.into())) - .await?; - - let id_cred_tuple_list: VcxAnoncredsResult> = records - .into_iter() - .map(|record| { - let credential: CredxCredential = serde_json::from_str(record.value())?; - - Ok((record.name().into(), credential)) - }) - .collect(); - - id_cred_tuple_list - } - - async fn _get_credentials_for_proof_req_for_attr_name( - &self, - wallet: &impl BaseWallet, - restrictions: Option<&Value>, - attr_names: Vec, - ) -> VcxAnoncredsResult> { - let mut attrs = Vec::new(); - - for name in attr_names { - let attr_marker_tag_name = _format_attribute_as_marker_tag_name(&name); - - let wql_attr_query = json!({ - attr_marker_tag_name: "1" - }); - - attrs.push(wql_attr_query); - } - - let restrictions = restrictions.map(|x| x.to_owned()); - - let wql_query = if let Some(restrictions) = restrictions { - match restrictions { - Value::Array(restrictions) => { - let restrictions_wql = json!({ "$or": restrictions }); - attrs.push(restrictions_wql); - json!({ "$and": attrs }) - } - Value::Object(restriction) => { - attrs.push(Value::Object(restriction)); - json!({ "$and": attrs }) - } - Value::Null => { - json!({ "$and": attrs }) - } - _ => Err(VcxAnoncredsError::InvalidInput( - "Invalid attribute restrictions (must be array or an object)".into(), - ))?, - } - } else { - json!({ "$and": attrs }) - }; - - let wql_query = serde_json::to_string(&wql_query)?; - - Self::_get_credentials(wallet, &wql_query).await - } -} - -#[async_trait] -impl BaseAnonCreds for IndyCredxAnonCreds { - async fn verifier_verify_proof( - &self, - proof_req_json: PresentationRequest, - proof_json: Presentation, - schemas_json: SchemasMap, - credential_defs_json: CredentialDefinitionsMap, - rev_reg_defs_json: Option, - rev_regs_json: Option, - ) -> VcxAnoncredsResult { - let presentation: CredxPresentation = proof_json.convert(())?; - let pres_req: CredxPresentationRequest = proof_req_json.convert(())?; - - let schemas: HashMap = schemas_json.convert(())?; - - let cred_defs: HashMap = - credential_defs_json.convert(())?; - - let rev_reg_defs: Option< - HashMap, - > = rev_reg_defs_json.map(|v| v.convert(())).transpose()?; - - let rev_regs: Option< - HashMap>, - > = rev_regs_json.map(|v| v.convert(())).transpose()?; - let rev_regs: Option< - HashMap>, - > = rev_regs.as_ref().map(|regs| { - let mut new_regs: HashMap< - CredxRevocationRegistryId, - HashMap, - > = HashMap::new(); - for (k, v) in regs { - new_regs.insert(k.clone(), hashmap_as_ref(v)); - } - new_regs - }); - - let output = credx::verifier::verify_presentation( - &presentation, - &pres_req, - &hashmap_as_ref(&schemas), - &hashmap_as_ref(&cred_defs), - rev_reg_defs.as_ref().map(hashmap_as_ref).as_ref(), - rev_regs.as_ref(), - )?; - - #[cfg(feature = "legacy_proof")] - let output = output - || credx::verifier::verify_presentation_legacy( - &presentation, - &pres_req, - &hashmap_as_ref(&schemas), - &hashmap_as_ref(&cred_defs), - rev_reg_defs.as_ref().map(hashmap_as_ref).as_ref(), - rev_regs.as_ref(), - )?; - - Ok(output) - } - - async fn issuer_create_and_store_revoc_reg( - &self, - wallet: &impl BaseWallet, - issuer_did: &Did, - cred_def_id: &CredentialDefinitionId, - tails_dir: &Path, - max_creds: u32, - tag: &str, - ) -> VcxAnoncredsResult<( - RevocationRegistryDefinitionId, - RevocationRegistryDefinition, - RevocationRegistry, - )> { - let issuer_did = issuer_did.convert(())?; - - let mut tails_writer = TailsFileWriter::new(Some(tails_dir.to_str().unwrap().to_string())); - - let cred_def = - Self::get_wallet_record_value(wallet, RecordCategory::CredDef, &cred_def_id.0).await?; - - let rev_reg_id = credx::issuer::make_revocation_registry_id( - &issuer_did, - &cred_def, - tag, - RegistryType::CL_ACCUM, - )?; - - let res_rev_reg = - Self::get_wallet_record_value(wallet, RecordCategory::RevReg, &rev_reg_id.0).await; - let res_rev_reg_def = - Self::get_wallet_record_value(wallet, RecordCategory::RevRegDef, &rev_reg_id.0).await; - - if let (Ok(rev_reg), Ok(rev_reg_def)) = (res_rev_reg, res_rev_reg_def) { - return Ok((rev_reg_id.to_string().try_into()?, rev_reg, rev_reg_def)); - } - - let (rev_reg_def, rev_reg_def_priv, rev_reg, _rev_reg_delta) = - credx::issuer::create_revocation_registry( - &issuer_did, - &cred_def, - tag, - RegistryType::CL_ACCUM, - IssuanceType::ISSUANCE_BY_DEFAULT, - max_creds, - &mut tails_writer, - )?; - - // Store stuff in wallet - let rev_reg_info = RevocationRegistryInfo { - id: rev_reg_id.clone(), - curr_id: 0, - used_ids: HashSet::new(), - }; - - let str_rev_reg_info = serde_json::to_string(&rev_reg_info)?; - let record = Record::builder() - .name(rev_reg_id.0.clone()) - .category(RecordCategory::RevRegInfo) - .value(str_rev_reg_info) - .build(); - wallet.add_record(record).await?; - - let str_rev_reg_def = serde_json::to_string(&rev_reg_def)?; - let record = Record::builder() - .name(rev_reg_id.0.clone()) - .category(RecordCategory::RevRegDef) - .value(str_rev_reg_def.clone()) - .build(); - wallet.add_record(record).await?; - - let str_rev_reg_def_priv = serde_json::to_string(&rev_reg_def_priv)?; - let record = Record::builder() - .name(rev_reg_id.0.clone()) - .category(RecordCategory::RevRegDefPriv) - .value(str_rev_reg_def_priv) - .build(); - wallet.add_record(record).await?; - - let str_rev_reg = serde_json::to_string(&rev_reg)?; - let record = Record::builder() - .name(rev_reg_id.0.clone()) - .category(RecordCategory::RevReg) - .value(str_rev_reg.clone()) - .build(); - wallet.add_record(record).await?; - - Ok(( - rev_reg_id.to_string().try_into()?, - rev_reg_def.convert(())?, - rev_reg.convert(())?, - )) - } - - async fn issuer_create_and_store_credential_def( - &self, - wallet: &impl BaseWallet, - issuer_did: &Did, - _schema_id: &SchemaId, - schema_json: Schema, - config_json: CredentialDefinitionConfig, - ) -> VcxAnoncredsResult { - let issuer_did = issuer_did.to_owned(); - - let CredentialDefinitionConfig { - signature_type, - tag, - .. - } = config_json.clone(); - - let schema_seq_no = schema_json.seq_no; - let schema = schema_json.clone().convert(())?; - - let cred_def_id = credx::issuer::make_credential_definition_id( - &issuer_did.convert(())?, - schema.id(), - schema_seq_no, - &tag, - signature_type.convert(())?, - )?; - - // If cred def already exists, return it - if let Ok(cred_def) = - Self::get_wallet_record_value(wallet, RecordCategory::CredDef, &cred_def_id.0).await - { - // TODO: Perform conversion - return Ok(cred_def); - } - - // Otherwise, create cred def - let (cred_def, cred_def_priv, cred_key_correctness_proof) = - credx::issuer::create_credential_definition( - &issuer_did.convert(())?, - &schema, - &tag, - signature_type.convert(())?, - config_json.convert(())?, - )?; - - let str_cred_def = serde_json::to_string(&cred_def)?; - let record = Record::builder() - .name(cred_def_id.0.clone()) - .category(RecordCategory::CredDef) - .value(str_cred_def.clone()) - .build(); - wallet.add_record(record).await?; - - let str_cred_def_priv = serde_json::to_string(&cred_def_priv)?; - let record = Record::builder() - .name(cred_def_id.0.clone()) - .category(RecordCategory::CredDefPriv) - .value(str_cred_def_priv) - .build(); - wallet.add_record(record).await?; - - let str_cred_key_proof = serde_json::to_string(&cred_key_correctness_proof)?; - let record = Record::builder() - .name(cred_def_id.0.clone()) - .category(RecordCategory::CredKeyCorrectnessProof) - .value(str_cred_key_proof) - .build(); - wallet.add_record(record).await?; - - let record = Record::builder() - .name(schema.id().to_string()) - .category(RecordCategory::CredSchema) - .value(serde_json::to_string(&schema_json)?) - .build(); - let store_schema_res = wallet.add_record(record).await; - - if let Err(e) = store_schema_res { - warn!( - "Storing schema {schema_json:?} failed - {e}. It's possible it is already stored." - ) - } - - let record = Record::builder() - .name(cred_def_id.0.clone()) - .category(RecordCategory::CredMapSchemaId) - .value(schema.id().0.clone()) - .build(); - wallet.add_record(record).await?; - - Ok(cred_def.convert((issuer_did.to_string(),))?) - } - - async fn issuer_create_credential_offer( - &self, - wallet: &impl BaseWallet, - cred_def_id: &CredentialDefinitionId, - ) -> VcxAnoncredsResult { - let cred_def = - Self::get_wallet_record_value(wallet, RecordCategory::CredDef, &cred_def_id.0).await?; - - let correctness_proof = Self::get_wallet_record_value( - wallet, - RecordCategory::CredKeyCorrectnessProof, - &cred_def_id.0, - ) - .await?; - - let schema = wallet - .get_record(RecordCategory::CredMapSchemaId, &cred_def_id.0) - .await?; - - let schema_id = CredxSchemaId(schema.value().into()); - - // If cred_def contains schema ID, why take it as an argument here...? - let offer = - credx::issuer::create_credential_offer(&schema_id, &cred_def, &correctness_proof)?; - - Ok(offer.convert(())?) - } - - async fn issuer_create_credential( - &self, - wallet: &impl BaseWallet, - cred_offer_json: CredentialOffer, - cred_req_json: CredentialRequest, - cred_values_json: CredentialValues, - rev_reg_id: Option<&RevocationRegistryDefinitionId>, - tails_dir: Option<&Path>, - ) -> VcxAnoncredsResult<(Credential, Option)> { - let rev_reg_id = rev_reg_id.map(ToString::to_string); - let cred_offer: CredxCredentialOffer = cred_offer_json.convert(())?; - let cred_request: CredxCredentialRequest = cred_req_json.convert(())?; - let cred_values: CredxCredentialValues = cred_values_json.convert(())?; - - // TODO: Might need to qualify with offer method or something - look into how vdrtools does - // it - let cred_def_id = &cred_offer.cred_def_id.0; - - let cred_def = - Self::get_wallet_record_value(wallet, RecordCategory::CredDef, cred_def_id).await?; - - let cred_def_private = - Self::get_wallet_record_value(wallet, RecordCategory::CredDefPriv, cred_def_id).await?; - - let mut revocation_config_parts = match &rev_reg_id { - Some(rev_reg_id) => { - let rev_reg_def = - Self::get_wallet_record_value(wallet, RecordCategory::RevRegDef, rev_reg_id) - .await?; - - let rev_reg_def_priv = Self::get_wallet_record_value( - wallet, - RecordCategory::RevRegDefPriv, - rev_reg_id, - ) - .await?; - - let rev_reg = - Self::get_wallet_record_value(wallet, RecordCategory::RevReg, rev_reg_id) - .await?; - let rev_reg_info: RevocationRegistryInfo = - Self::get_wallet_record_value(wallet, RecordCategory::RevRegInfo, rev_reg_id) - .await?; - - Some((rev_reg_def, rev_reg_def_priv, rev_reg, rev_reg_info)) - } - None => { - warn!( - "Missing revocation config params: tails_dir: {tails_dir:?} - {rev_reg_id:?}; \ - Issuing non revokable credential" - ); - None - } - }; - - let revocation_config = match &mut revocation_config_parts { - Some((rev_reg_def, rev_reg_def_priv, rev_reg, rev_reg_info)) => { - rev_reg_info.curr_id += 1; - - let CredxRevocationRegistryDefinition::RevocationRegistryDefinitionV1( - rev_reg_def_v1, - ) = rev_reg_def; - - if rev_reg_info.curr_id > rev_reg_def_v1.value.max_cred_num { - return Err(VcxAnoncredsError::ActionNotSupported( - "The revocation registry is full".into(), - )); - } - - if rev_reg_def_v1.value.issuance_type == IssuanceType::ISSUANCE_ON_DEMAND { - rev_reg_info.used_ids.insert(rev_reg_info.curr_id); - } - - let revocation_config = CredentialRevocationConfig { - reg_def: rev_reg_def, - reg_def_private: rev_reg_def_priv, - registry: rev_reg, - registry_idx: rev_reg_info.curr_id, - registry_used: &rev_reg_info.used_ids, - }; - - Some(revocation_config) - } - None => None, - }; - - let (cred, rev_reg, _rev_reg_delta) = credx::issuer::create_credential( - &cred_def, - &cred_def_private, - &cred_offer, - &cred_request, - cred_values, - revocation_config, - )?; - - let str_rev_reg = rev_reg.as_ref().map(serde_json::to_string).transpose()?; - - let cred_rev_id = - if let (Some(rev_reg_id), Some(str_rev_reg), Some((_, _, _, rev_reg_info))) = - (rev_reg_id, &str_rev_reg, revocation_config_parts) - { - let cred_rev_id = rev_reg_info.curr_id; - let str_rev_reg_info = serde_json::to_string(&rev_reg_info)?; - - wallet - .update_record_value(RecordCategory::RevReg, &rev_reg_id, str_rev_reg) - .await?; - - wallet - .update_record_value(RecordCategory::RevRegInfo, &rev_reg_id, &str_rev_reg_info) - .await?; - - Some(cred_rev_id) - } else { - None - }; - - Ok((cred.convert(())?, cred_rev_id)) - } - - async fn prover_create_proof( - &self, - wallet: &impl BaseWallet, - proof_req_json: PresentationRequest, - requested_credentials_json: RequestedCredentials, - link_secret_id: &LinkSecretId, - schemas_json: SchemasMap, - credential_defs_json: CredentialDefinitionsMap, - revoc_states_json: Option, - ) -> VcxAnoncredsResult { - let pres_req: CredxPresentationRequest = proof_req_json.convert(())?; - - let requested_attributes = requested_credentials_json.requested_attributes; - let requested_predicates = requested_credentials_json.requested_predicates; - let self_attested_attributes = requested_credentials_json.self_attested_attributes; - - let schemas: HashMap = schemas_json.convert(())?; - - let mut present_credentials: PresentCredentials = PresentCredentials::new(); - - let mut proof_details_by_cred_id: HashMap< - String, - ( - CredxCredential, - Option, - Option, - Vec<(String, bool)>, - Vec, - ), - > = HashMap::new(); - - // add cred data and referent details for each requested attribute - for (reft, detail) in requested_attributes { - let cred_id = &detail.cred_id; - let revealed = detail.revealed; - - if let Some((_, _, _, req_attr_refts_revealed, _)) = - proof_details_by_cred_id.get_mut(cred_id) - { - // mapping made for this credential already, add reft and its revealed status - req_attr_refts_revealed.push((reft.to_string(), revealed)); - } else { - let credential = Self::_get_credential(wallet, cred_id).await?; - - let (timestamp, rev_state) = get_rev_state( - cred_id, - &credential, - detail.timestamp, - revoc_states_json.as_ref(), - )?; - - proof_details_by_cred_id.insert( - cred_id.to_string(), - ( - credential, - timestamp, - rev_state.map(|v| v.convert(())).transpose()?, - vec![(reft.to_string(), revealed)], - vec![], - ), - ); - } - } - - // add cred data and referent details for each requested predicate - for (reft, detail) in requested_predicates { - let cred_id = &detail.cred_id; - - if let Some((_, _, _, _, req_preds_refts)) = proof_details_by_cred_id.get_mut(cred_id) { - // mapping made for this credential already, add reft - req_preds_refts.push(reft.to_string()); - } else { - let credential = Self::_get_credential(wallet, cred_id).await?; - - let (timestamp, rev_state) = get_rev_state( - cred_id, - &credential, - detail.timestamp, - revoc_states_json.as_ref(), - )?; - - proof_details_by_cred_id.insert( - cred_id.to_string(), - ( - credential, - timestamp, - rev_state.map(|v| v.convert(())).transpose()?, - vec![], - vec![reft.to_string()], - ), - ); - } - } - - // add all accumulated requested attributes and requested predicates to credx - // [PresentCredential] object - for ( - _cred_id, - (credential, timestamp, rev_state, req_attr_refts_revealed, req_preds_refts), - ) in proof_details_by_cred_id.iter() - { - let mut add_cred = - present_credentials.add_credential(credential, *timestamp, rev_state.as_ref()); - - for (referent, revealed) in req_attr_refts_revealed { - add_cred.add_requested_attribute(referent, *revealed); - } - - for referent in req_preds_refts { - add_cred.add_requested_predicate(referent); - } - } - - let link_secret = Self::get_link_secret(wallet, link_secret_id).await?; - - let presentation = credx::prover::create_presentation( - &pres_req, - present_credentials, - Some(self_attested_attributes), - &link_secret, - &hashmap_as_ref(&schemas), - &hashmap_as_ref(&credential_defs_json.convert(())?), - )?; - - Ok(presentation.convert(())?) - } - - async fn prover_get_credential( - &self, - wallet: &impl BaseWallet, - cred_id: &CredentialId, - ) -> VcxAnoncredsResult { - let cred = Self::_get_credential(wallet, cred_id).await?; - - _make_cred_info(cred_id, &cred) - } - - async fn prover_get_credentials( - &self, - wallet: &impl BaseWallet, - filter_json: Option<&str>, - ) -> VcxAnoncredsResult> { - // filter_json should map to WQL query directly - // TODO - future - may wish to validate the filter_json for more accurate error reporting - - let creds_wql = filter_json.map_or("{}", |x| x); - let creds = Self::_get_credentials(wallet, creds_wql).await?; - - creds - .iter() - .map(|(credential_id, cred)| _make_cred_info(credential_id, cred)) - .collect() - } - - async fn prover_get_credentials_for_proof_req( - &self, - wallet: &impl BaseWallet, - proof_req: PresentationRequest, - ) -> VcxAnoncredsResult { - let proof_req_v: Value = serde_json::to_value(proof_req) - .map_err(|e| VcxAnoncredsError::InvalidProofRequest(e.to_string()))?; - - let requested_attributes = proof_req_v.get("requested_attributes"); - let requested_attributes = if let Some(requested_attributes) = requested_attributes { - Some(requested_attributes.try_as_object()?.clone()) - } else { - None - }; - let requested_predicates = proof_req_v.get("requested_predicates"); - let requested_predicates = if let Some(requested_predicates) = requested_predicates { - Some(requested_predicates.try_as_object()?.clone()) - } else { - None - }; - - // handle special case of "empty because json is bad" vs "empty because no attributes - // sepected" - if requested_attributes.is_none() && requested_predicates.is_none() { - return Err(VcxAnoncredsError::InvalidAttributesStructure( - "Invalid Json Parsing of Requested Attributes Retrieved From Libindy".into(), - )); - } - - let mut referents: HashSet = HashSet::new(); - if let Some(requested_attributes) = &requested_attributes { - requested_attributes.iter().for_each(|(k, _)| { - referents.insert(k.to_string()); - }) - }; - if let Some(requested_predicates) = &requested_predicates { - requested_predicates.iter().for_each(|(k, _)| { - referents.insert(k.to_string()); - }); - } - - let mut cred_by_attr: Value = json!({}); - - for reft in referents { - let requested_val = requested_attributes - .as_ref() - .and_then(|req_attrs| req_attrs.get(&reft)) - .or_else(|| { - requested_predicates - .as_ref() - .and_then(|req_preds| req_preds.get(&reft)) - }) - .ok_or( - // should not happen - VcxAnoncredsError::InvalidState(format!("Unknown referent: {}", reft)), - )?; - - let name = requested_val.get("name"); - let names = requested_val.get("names").and_then(|v| v.as_array()); - - let attr_names = match (name, names) { - (Some(name), None) => vec![_normalize_attr_name(name.try_as_str()?)], - (None, Some(names)) => names - .iter() - .map(|v| v.try_as_str().map(_normalize_attr_name)) - .collect::>()?, - _ => Err(VcxAnoncredsError::InvalidInput( - "exactly one of 'name' or 'names' must be present".into(), - ))?, - }; - - let non_revoked = requested_val.get("non_revoked"); // note that aca-py askar fetches from proof_req json - let restrictions = requested_val.get("restrictions"); - - let credx_creds = self - ._get_credentials_for_proof_req_for_attr_name(wallet, restrictions, attr_names) - .await?; - - let mut credentials_json = vec![]; - - for (cred_id, credx_cred) in credx_creds { - credentials_json.push(json!({ - "cred_info": _make_cred_info(&cred_id, &credx_cred)?, - "interval": non_revoked - })) - } - - cred_by_attr[ATTRS][reft] = Value::Array(credentials_json); - } - - Ok(serde_json::from_value(cred_by_attr)?) - } - - async fn prover_create_credential_req( - &self, - wallet: &impl BaseWallet, - prover_did: &Did, - cred_offer_json: CredentialOffer, - credential_def_json: CredentialDefinition, - link_secret_id: &LinkSecretId, - ) -> VcxAnoncredsResult<(CredentialRequest, CredentialRequestMetadata)> { - let prover_did = prover_did.convert(())?; - let cred_def: CredxCredentialDefinition = credential_def_json.convert(())?; - let credential_offer: CredxCredentialOffer = cred_offer_json.convert(())?; - let link_secret = Self::get_link_secret(wallet, link_secret_id).await?; - - let (cred_req, cred_req_metadata) = credx::prover::create_credential_request( - &prover_did, - &cred_def, - &link_secret, - link_secret_id, - &credential_offer, - )?; - - Ok((cred_req.convert(())?, cred_req_metadata.convert(())?)) - } - - async fn create_revocation_state( - &self, - tails_dir: &Path, - rev_reg_def_json: RevocationRegistryDefinition, - rev_reg_delta_json: RevocationRegistryDelta, - timestamp: u64, - cred_rev_id: u32, - ) -> VcxAnoncredsResult { - let revoc_reg_def: CredxRevocationRegistryDefinition = rev_reg_def_json.convert(())?; - let tails_file_hash = match revoc_reg_def.borrow() { - CredxRevocationRegistryDefinition::RevocationRegistryDefinitionV1(r) => { - &r.value.tails_hash - } - }; - - let mut tails_file_path = std::path::PathBuf::new(); - tails_file_path.push(tails_dir); - tails_file_path.push(tails_file_hash); - - let tails_path = tails_file_path.to_str().ok_or_else(|| { - VcxAnoncredsError::InvalidOption("tails file is not an unicode string".into()) - })?; - - let tails_reader = TailsFileReader::new(tails_path); - let rev_reg_delta: CredxRevocationRegistryDelta = rev_reg_delta_json.convert(())?; - - let rev_state = credx::prover::create_or_update_revocation_state( - tails_reader, - &revoc_reg_def, - &rev_reg_delta, - cred_rev_id, - timestamp, - None, - )?; - - Ok(rev_state.convert(())?) - } - - async fn prover_store_credential( - &self, - wallet: &impl BaseWallet, - cred_req_meta: CredentialRequestMetadata, - cred_json: Credential, - cred_def_json: CredentialDefinition, - rev_reg_def_json: Option, - ) -> VcxAnoncredsResult { - let mut credential: CredxCredential = cred_json.convert(())?; - let cred_request_metadata: CredxCredentialRequestMetadata = cred_req_meta.convert(())?; - let link_secret_id = &cred_request_metadata.master_secret_name; - let link_secret = Self::get_link_secret(wallet, link_secret_id).await?; - let cred_def: CredxCredentialDefinition = cred_def_json.convert(())?; - let rev_reg_def: Option = - if let Some(rev_reg_def_json) = rev_reg_def_json { - Some(rev_reg_def_json.convert(())?) - } else { - None - }; - - credx::prover::process_credential( - &mut credential, - &cred_request_metadata, - &link_secret, - &cred_def, - rev_reg_def.as_ref(), - )?; - - let schema_id = &credential.schema_id; - let (_schema_method, schema_issuer_did, schema_name, schema_version) = - schema_id.parts().ok_or(VcxAnoncredsError::InvalidSchema( - "Could not process credential.schema_id as parts.".into(), - ))?; - - let cred_def_id = &credential.cred_def_id; - let (_cred_def_method, issuer_did, _signature_type, _schema_id, _tag) = - cred_def_id.parts().ok_or(VcxAnoncredsError::InvalidSchema( - "Could not process credential.cred_def_id as parts.".into(), - ))?; - - let mut tags = RecordTags::new(vec![ - RecordTag::new("schema_id", &schema_id.0), - RecordTag::new("schema_issuer_did", &schema_issuer_did.0), - RecordTag::new("schema_name", &schema_name), - RecordTag::new("schema_version", &schema_version), - RecordTag::new("issuer_did", &issuer_did.0), - RecordTag::new("cred_def_id", &cred_def_id.0), - ]); - - if let Some(rev_reg_id) = &credential.rev_reg_id { - tags.add(RecordTag::new("rev_reg_id", &rev_reg_id.0)); - } - - for (raw_attr_name, attr_value) in credential.values.0.iter() { - let attr_name = _normalize_attr_name(raw_attr_name); - // add attribute name and raw value pair - let value_tag_name = _format_attribute_as_value_tag_name(&attr_name); - tags.add(RecordTag::new(&value_tag_name, &attr_value.raw)); - - // add attribute name and marker (used for checking existent) - let marker_tag_name = _format_attribute_as_marker_tag_name(&attr_name); - tags.add(RecordTag::new(&marker_tag_name, "1")) - } - - let credential_id = Uuid::new_v4().to_string(); - - let record_value = serde_json::to_string(&credential)?; - - let record = Record::builder() - .name(credential_id.clone()) - .category(RecordCategory::Cred) - .value(record_value) - .tags(tags) - .build(); - - wallet.add_record(record).await?; - - Ok(credential_id) - } - - async fn prover_create_link_secret( - &self, - wallet: &impl BaseWallet, - link_secret_id: &LinkSecretId, - ) -> VcxAnoncredsResult<()> { - let existing_record = wallet - .get_record(RecordCategory::LinkSecret, link_secret_id) - .await - .ok(); // ignore error, as we only care about whether it exists or not - - if existing_record.is_some() { - return Err(VcxAnoncredsError::DuplicationMasterSecret(format!( - "Master secret id: {} already exists in wallet.", - link_secret_id - ))); - } - - let secret = credx::prover::create_link_secret()?; - let ms_decimal = secret - .value - .value() - .map_err(|err| { - VcxAnoncredsError::UrsaError(format!( - "failed to get BigNumber from master secret, UrsaErrorKind: {:?}", - err.kind() - )) - })? - .to_dec() - .map_err(|err| { - VcxAnoncredsError::UrsaError(format!( - "Failed convert BigNumber to decimal string, UrsaErrorKind: {:?}", - err.kind() - )) - })?; - - let record = Record::builder() - .name(link_secret_id.into()) - .category(RecordCategory::LinkSecret) - .value(ms_decimal) - .build(); - - wallet.add_record(record).await?; - Ok(()) - } - - async fn prover_delete_credential( - &self, - wallet: &impl BaseWallet, - cred_id: &CredentialId, - ) -> VcxAnoncredsResult<()> { - Ok(wallet.delete_record(RecordCategory::Cred, cred_id).await?) - } - - async fn issuer_create_schema( - &self, - issuer_did: &Did, - name: &str, - version: &str, - attrs: AttributeNames, - ) -> VcxAnoncredsResult { - Ok(credx::issuer::create_schema( - &issuer_did.convert(())?, - name, - version, - attrs.convert(())?, - None, - )? - .convert((issuer_did.to_string(),))?) - } - - async fn revoke_credential_local( - &self, - wallet: &impl BaseWallet, - rev_reg_id: &RevocationRegistryDefinitionId, - cred_rev_id: u32, - _rev_reg_delta_json: RevocationRegistryDelta, - ) -> VcxAnoncredsResult<()> { - let rev_reg_id_str = &rev_reg_id.to_string(); - - let rev_reg = - Self::get_wallet_record_value(wallet, RecordCategory::RevReg, rev_reg_id_str).await?; - - let rev_reg_def = - Self::get_wallet_record_value(wallet, RecordCategory::RevRegDef, rev_reg_id_str) - .await?; - - let rev_reg_priv = - Self::get_wallet_record_value(wallet, RecordCategory::RevRegDefPriv, rev_reg_id_str) - .await?; - - let mut rev_reg_info: RevocationRegistryInfo = - Self::get_wallet_record_value(wallet, RecordCategory::RevRegInfo, rev_reg_id_str) - .await?; - - let (issuance_type, cred_def_id) = match &rev_reg_def { - CredxRevocationRegistryDefinition::RevocationRegistryDefinitionV1(r) => { - (r.value.issuance_type, r.cred_def_id.0.as_str()) - } - }; - - let cred_def = - Self::get_wallet_record_value(wallet, RecordCategory::CredDef, cred_def_id).await?; - - match issuance_type { - IssuanceType::ISSUANCE_ON_DEMAND => { - if !rev_reg_info.used_ids.remove(&cred_rev_id) { - return Err(VcxAnoncredsError::InvalidInput(format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_rev_id - ))); - }; - } - IssuanceType::ISSUANCE_BY_DEFAULT => { - if !rev_reg_info.used_ids.insert(cred_rev_id) { - return Err(VcxAnoncredsError::InvalidInput(format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_rev_id - ))); - } - } - }; - - let str_rev_reg_info = serde_json::to_string(&rev_reg_info)?; - - let (rev_reg, new_rev_reg_delta) = credx::issuer::revoke_credential( - &cred_def, - &rev_reg_def, - &rev_reg_priv, - &rev_reg, - cred_rev_id, - )?; - - let old_str_rev_reg_delta = self.get_rev_reg_delta(wallet, rev_reg_id).await?; - - let rev_reg_delta = old_str_rev_reg_delta - .to_owned() - .map(|v| v.convert(())) - .transpose()? - .map(|rev_reg_delta: CredxRevocationRegistryDelta| { - credx::issuer::merge_revocation_registry_deltas(&rev_reg_delta, &new_rev_reg_delta) - }) - .transpose()? - .unwrap_or(new_rev_reg_delta); - - let str_rev_reg = serde_json::to_string(&rev_reg)?; - let str_rev_reg_delta = serde_json::to_string(&rev_reg_delta)?; - - wallet - .update_record_value(RecordCategory::RevReg, rev_reg_id_str, &str_rev_reg) - .await?; - - wallet - .update_record_value( - RecordCategory::RevRegInfo, - rev_reg_id_str, - &str_rev_reg_info, - ) - .await?; - - match old_str_rev_reg_delta { - Some(_) => { - wallet - .update_record_value( - RecordCategory::RevRegDelta, - rev_reg_id_str, - &str_rev_reg_delta, - ) - .await? - } - None => { - let record = Record::builder() - .name(rev_reg_id_str.into()) - .category(RecordCategory::RevRegDelta) - .value(str_rev_reg_delta) - .build(); - wallet.add_record(record).await? - } - } - - Ok(()) - } - - async fn get_rev_reg_delta( - &self, - wallet: &impl BaseWallet, - rev_reg_id: &RevocationRegistryDefinitionId, - ) -> VcxAnoncredsResult> { - let res_rev_reg_delta = Self::get_wallet_record_value::( - wallet, - RecordCategory::RevRegDelta, - &rev_reg_id.to_string(), - ) - .await; - - if let Err(err) = &res_rev_reg_delta { - warn!( - "get_rev_reg_delta >> Unable to get rev_reg_delta cache for rev_reg_id: {}, \ - error: {}", - rev_reg_id, err - ); - } - - Ok(res_rev_reg_delta.ok()) - } - - async fn clear_rev_reg_delta( - &self, - wallet: &impl BaseWallet, - rev_reg_id: &RevocationRegistryDefinitionId, - ) -> VcxAnoncredsResult<()> { - if self.get_rev_reg_delta(wallet, rev_reg_id).await?.is_some() { - wallet - .delete_record(RecordCategory::RevRegDelta, &rev_reg_id.to_string()) - .await?; - } - - Ok(()) - } - - async fn generate_nonce(&self) -> VcxAnoncredsResult { - Ok(Nonce::from_dec(credx::verifier::generate_nonce()?.as_ref()).unwrap()) - } -} - -fn get_rev_state( - cred_id: &str, - credential: &CredxCredential, - timestamp: Option, - rev_states: Option<&RevocationStatesMap>, -) -> VcxAnoncredsResult<(Option, Option)> { - let cred_rev_reg_id = credential.rev_reg_id.as_ref().map(|id| id.0.to_string()); - let rev_state = if let (Some(timestamp), Some(cred_rev_reg_id)) = (timestamp, cred_rev_reg_id) { - let rev_state = rev_states - .as_ref() - .and_then(|_rev_states| _rev_states.get(&cred_rev_reg_id)); - let rev_state = rev_state.ok_or(VcxAnoncredsError::InvalidJson(format!( - "No revocation states provided for credential '{}' with rev_reg_id '{}'", - cred_id, cred_rev_reg_id - )))?; - - let rev_state = rev_state - .get(×tamp) - .ok_or(VcxAnoncredsError::InvalidJson(format!( - "No revocation states provided for credential '{}' with rev_reg_id '{}' at \ - timestamp '{}'", - cred_id, cred_rev_reg_id, timestamp - )))?; - - Some(rev_state.clone()) - } else { - None - }; - - Ok((timestamp, rev_state)) -} - -fn _normalize_attr_name(name: &str) -> String { - // "name": string, // attribute name, (case insensitive and ignore spaces) - name.replace(' ', "").to_lowercase() -} - -fn _make_cred_info( - credential_id: &str, - cred: &CredxCredential, -) -> VcxAnoncredsResult { - let cred_sig = serde_json::to_value(&cred.signature)?; - - let rev_info = cred_sig.get("r_credential"); - - let cred_rev_id: Option = rev_info - .and_then(|x| x.get("i")) - .and_then(|i| i.as_u64().map(|i| i as u32)); - - let mut attributes = HashMap::new(); - for (x, y) in cred.values.0.iter() { - attributes.insert(x.to_string(), y.raw.to_string()); - } - - Ok(RetrievedCredentialInfo { - referent: credential_id.to_string(), - attributes, - schema_id: SchemaId::try_from(cred.schema_id.to_string())?, - cred_def_id: cred.cred_def_id.clone().convert(())?, - rev_reg_id: cred.rev_reg_id.as_ref().map(|x| x.0.to_string()), - cred_rev_id, - }) -} - -fn _format_attribute_as_value_tag_name(attribute_name: &str) -> String { - format!("attr::{attribute_name}::value") -} - -fn _format_attribute_as_marker_tag_name(attribute_name: &str) -> String { - format!("attr::{attribute_name}::marker") -} - -// common transformation requirement in credx -fn hashmap_as_ref(map: &HashMap) -> HashMap -where - T: std::hash::Hash, - T: std::cmp::Eq, - T: std::clone::Clone, -{ - let mut new_map: HashMap = HashMap::new(); - for (k, v) in map.iter() { - new_map.insert(k.clone(), v); - } - - new_map -} diff --git a/aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/type_conversion.rs b/aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/type_conversion.rs deleted file mode 100644 index 79e214986a..0000000000 --- a/aries/aries_vcx_anoncreds/src/anoncreds/credx_anoncreds/type_conversion.rs +++ /dev/null @@ -1,513 +0,0 @@ -use std::collections::HashMap; - -use anoncreds_types::data_types::{ - identifiers::{ - cred_def_id::CredentialDefinitionId as OurCredentialDefinitionId, - issuer_id::IssuerId as OurIssuerId, - rev_reg_def_id::RevocationRegistryDefinitionId as OurRevocationRegistryDefinitionId, - schema_id::SchemaId as OurSchemaId, - }, - ledger::{ - cred_def::{ - CredentialDefinition as OurCredentialDefinition, SignatureType as OurSignatureType, - }, - rev_reg::RevocationRegistry as OurRevocationRegistry, - rev_reg_def::{ - RevocationRegistryDefinition as OurRevocationRegistryDefinition, - RevocationRegistryDefinitionValue as OurRevocationRegistryDefinitionValue, - }, - rev_reg_delta::RevocationRegistryDelta as OurRevocationRegistryDelta, - schema::{AttributeNames as OurAttributeNames, Schema as OurSchema}, - }, - messages::{ - cred_definition_config::CredentialDefinitionConfig as OurCredentialDefinitionConfig, - cred_offer::CredentialOffer as OurCredentialOffer, - cred_request::{ - CredentialRequest as OurCredentialRequest, - CredentialRequestMetadata as OurCredentialRequestMetadata, - }, - credential::{Credential as OurCredential, CredentialValues as OurCredentialValues}, - pres_request::PresentationRequest as OurPresentationRequest, - presentation::Presentation as OurPresentation, - revocation_state::CredentialRevocationState as OurCredentialRevocationState, - }, -}; -use did_parser_nom::Did; -use indy_credx::{ - issuer::create_schema, - types::{ - AttributeNames as CredxAttributeNames, Credential as CredxCredential, - CredentialDefinition as CredxCredentialDefinition, - CredentialDefinitionConfig as CredxCredentialDefinitionConfig, - CredentialDefinitionId as CredxCredentialDefinitionId, - CredentialOffer as CredxCredentialOffer, CredentialRequest as CredxCredentialRequest, - CredentialRequestMetadata as CredxCredentialRequestMetadata, - CredentialRevocationState as CredxCredentialRevocationState, - CredentialValues as CredxCredentialValues, DidValue, Presentation as CredxPresentation, - PresentationRequest as CredxPresentationRequest, - RevocationRegistry as CredxRevocationRegistry, - RevocationRegistryDefinition as CredxRevocationRegistryDefinition, - RevocationRegistryDelta as CredxRevocationRegistryDelta, - RevocationRegistryId as CredxRevocationRegistryId, Schema as CredxSchema, - SchemaId as CredxSchemaId, SignatureType as CredxSignatureType, - }, -}; - -pub trait Convert { - type Args; - type Target; - type Error; - - fn convert(self, args: Self::Args) -> Result; -} - -fn serde_convert(arg: T) -> Result> -where - S: serde::Serialize + serde::de::DeserializeOwned, - T: serde::Serialize + serde::de::DeserializeOwned, -{ - Ok(serde_json::from_value(serde_json::to_value(arg)?)?) -} - -impl Convert for OurSchema { - type Args = (); - type Target = CredxSchema; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(create_schema( - &DidValue::new(&self.issuer_id.to_string(), None), - &self.name, - &self.version, - CredxAttributeNames(self.attr_names.into()), - self.seq_no, - )?) - } -} - -impl Convert for CredxSchema { - type Args = (String,); - type Target = OurSchema; - type Error = Box; - - fn convert(self, (issuer_id,): Self::Args) -> Result { - match self { - CredxSchema::SchemaV1(schema) => Ok(OurSchema { - id: OurSchemaId::new(schema.id.to_string())?, - seq_no: schema.seq_no, - name: schema.name, - version: schema.version, - attr_names: OurAttributeNames(schema.attr_names.0.into_iter().collect()), - issuer_id: OurIssuerId::new(issuer_id)?, - }), - } - } -} - -impl Convert for &Did { - type Args = (); - type Target = DidValue; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(DidValue::new(&self.to_string(), None)) - } -} - -impl Convert for CredxCredentialDefinitionId { - type Args = (); - type Target = OurCredentialDefinitionId; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(OurCredentialDefinitionId::new(self.0)?) - } -} - -impl Convert for CredxCredentialDefinition { - type Args = (String,); - type Target = OurCredentialDefinition; - type Error = Box; - - fn convert(self, (issuer_id,): Self::Args) -> Result { - match self { - CredxCredentialDefinition::CredentialDefinitionV1(cred_def) => { - Ok(OurCredentialDefinition { - id: cred_def.id.convert(())?, - schema_id: OurSchemaId::new_unchecked(cred_def.schema_id.0), - signature_type: OurSignatureType::CL, - tag: cred_def.tag, - // credx doesn't expose CredentialDefinitionData - value: serde_convert(cred_def.value)?, - issuer_id: OurIssuerId::new(issuer_id)?, - }) - } - } - } -} - -impl Convert for OurCredentialDefinition { - type Args = (); - type Target = CredxCredentialDefinition; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(CredxCredentialDefinition::CredentialDefinitionV1( - serde_convert(self)?, - )) - } -} - -impl Convert for OurCredentialOffer { - type Args = (); - type Target = CredxCredentialOffer; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for CredxCredentialOffer { - type Args = (); - type Target = OurCredentialOffer; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for OurCredentialRequest { - type Args = (); - type Target = CredxCredentialRequest; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for CredxCredentialRequest { - type Args = (); - type Target = OurCredentialRequest; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for CredxCredentialRequestMetadata { - type Args = (); - type Target = OurCredentialRequestMetadata; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for OurCredentialRequestMetadata { - type Args = (); - type Target = CredxCredentialRequestMetadata; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(CredxCredentialRequestMetadata { - master_secret_blinding_data: serde_convert(self.link_secret_blinding_data)?, - nonce: serde_convert(self.nonce)?, - master_secret_name: self.link_secret_name, - }) - } -} - -impl Convert for HashMap { - type Args = (); - type Target = HashMap; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - self.into_iter() - .map(|(id, def)| { - Ok(( - CredxCredentialDefinitionId::from(id.to_string()), - def.convert(())?, - )) - }) - .collect() - } -} - -impl Convert for OurRevocationRegistryDefinition { - type Args = (); - type Target = CredxRevocationRegistryDefinition; - type Error = Box; - - fn convert(self, (): Self::Args) -> Result { - let mut rev_reg_def = serde_json::to_value(self)?; - rev_reg_def["value"] - .as_object_mut() - .unwrap() - .insert("issuanceType".to_string(), "ISSUANCE_BY_DEFAULT".into()); - Ok( - CredxRevocationRegistryDefinition::RevocationRegistryDefinitionV1( - serde_json::from_value(rev_reg_def)?, - ), - ) - } -} - -impl Convert for CredxRevocationRegistryDefinition { - type Args = (); - type Target = OurRevocationRegistryDefinition; - type Error = Box; - - fn convert(self, (): Self::Args) -> Result { - match self { - CredxRevocationRegistryDefinition::RevocationRegistryDefinitionV1(rev_reg_def) => { - Ok(OurRevocationRegistryDefinition { - id: OurRevocationRegistryDefinitionId::new(rev_reg_def.id.to_string())?, - revoc_def_type: - anoncreds_types::data_types::ledger::rev_reg_def::RegistryType::CL_ACCUM, - tag: rev_reg_def.tag, - cred_def_id: OurCredentialDefinitionId::new( - rev_reg_def.cred_def_id.to_string(), - )?, - value: OurRevocationRegistryDefinitionValue { - max_cred_num: rev_reg_def.value.max_cred_num, - public_keys: serde_convert(rev_reg_def.value.public_keys)?, - tails_hash: rev_reg_def.value.tails_hash, - tails_location: rev_reg_def.value.tails_location, - }, - }) - } - } - } -} - -impl Convert for HashMap { - type Args = (); - type Target = HashMap; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - self.into_iter() - .map(|(id, def)| { - Ok(( - CredxRevocationRegistryId::from(id.to_string()), - def.convert(())?, - )) - }) - .collect() - } -} - -impl Convert for OurRevocationRegistry { - type Args = (); - type Target = CredxRevocationRegistry; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(CredxRevocationRegistry::RevocationRegistryV1( - serde_convert(self)?, - )) - } -} - -impl Convert for CredxRevocationRegistry { - type Args = (); - type Target = OurRevocationRegistry; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - match self { - CredxRevocationRegistry::RevocationRegistryV1(rev_reg) => Ok(OurRevocationRegistry { - value: serde_convert(rev_reg.value)?, - }), - } - } -} - -impl Convert for HashMap> { - type Args = (); - type Target = HashMap>; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - self.into_iter() - .map(|(id, defs)| { - Ok(( - CredxRevocationRegistryId::from(id.to_string()), - defs.into_iter() - .map(|(seq_no, def)| Ok((seq_no, def.convert(())?))) - .collect::, Self::Error>>()?, - )) - }) - .collect() - } -} - -impl Convert for OurRevocationRegistryDelta { - type Args = (); - type Target = CredxRevocationRegistryDelta; - type Error = Box; - - fn convert(self, _: Self::Args) -> Result { - Ok(CredxRevocationRegistryDelta::RevocationRegistryDeltaV1( - serde_convert(self)?, - )) - } -} - -impl Convert for HashMap { - type Args = (); - type Target = HashMap; - type Error = Box; - - fn convert(self, (): Self::Args) -> Result { - self.into_iter() - .map(|(id, schema)| Ok((CredxSchemaId::from(id.to_string()), schema.convert(())?))) - .collect() - } -} - -impl Convert for OurCredential { - type Args = (); - type Target = CredxCredential; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - Ok(CredxCredential { - schema_id: CredxSchemaId::from(self.schema_id.to_string()), - cred_def_id: CredxCredentialDefinitionId::from(self.cred_def_id.to_string()), - rev_reg_id: self - .rev_reg_id - .as_ref() - .map(ToString::to_string) - .map(CredxRevocationRegistryId::try_from) - .transpose()?, - values: serde_convert(self.values)?, - signature: serde_convert(self.signature)?, - signature_correctness_proof: serde_convert(self.signature_correctness_proof)?, - rev_reg: serde_convert(self.rev_reg)?, - witness: serde_convert(self.witness)?, - }) - } -} - -impl Convert for CredxCredential { - type Args = (); - type Target = OurCredential; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - Ok(OurCredential { - schema_id: OurSchemaId::new_unchecked(self.schema_id.0), - cred_def_id: OurCredentialDefinitionId::new(self.cred_def_id.0)?, - rev_reg_id: self - .rev_reg_id - .map(|id| OurRevocationRegistryDefinitionId::new(id.0)) - .transpose()?, - values: serde_convert(self.values)?, - signature: serde_convert(self.signature)?, - signature_correctness_proof: serde_convert(self.signature_correctness_proof)?, - rev_reg: serde_convert(self.rev_reg)?, - witness: serde_convert(self.witness)?, - }) - } -} - -impl Convert for OurPresentationRequest { - type Args = (); - type Target = CredxPresentationRequest; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for OurPresentation { - type Args = (); - type Target = CredxPresentation; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for CredxPresentation { - type Args = (); - type Target = OurPresentation; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for OurCredentialValues { - type Args = (); - type Target = CredxCredentialValues; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for CredxCredentialRevocationState { - type Args = (); - type Target = OurCredentialRevocationState; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for OurCredentialRevocationState { - type Args = (); - type Target = CredxCredentialRevocationState; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - serde_convert(self) - } -} - -impl Convert for OurCredentialDefinitionConfig { - type Args = (); - type Target = CredxCredentialDefinitionConfig; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - Ok(CredxCredentialDefinitionConfig { - support_revocation: self.support_revocation, - }) - } -} - -impl Convert for OurSignatureType { - type Args = (); - type Target = CredxSignatureType; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - match self { - OurSignatureType::CL => Ok(CredxSignatureType::CL), - } - } -} - -impl Convert for OurAttributeNames { - type Args = (); - type Target = CredxAttributeNames; - type Error = Box; - - fn convert(self, _args: Self::Args) -> Result { - Ok(self.0.into()) - } -} diff --git a/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs b/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs index c5f139a4da..6dddd121a0 100644 --- a/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs +++ b/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs @@ -2,5 +2,4 @@ #[cfg(feature = "anoncreds")] pub mod anoncreds; pub mod base_anoncreds; -#[cfg(feature = "credx")] -pub mod credx_anoncreds; + diff --git a/aries/aries_vcx_anoncreds/src/errors/mapping_credx.rs b/aries/aries_vcx_anoncreds/src/errors/mapping_credx.rs deleted file mode 100644 index 60065cbb3a..0000000000 --- a/aries/aries_vcx_anoncreds/src/errors/mapping_credx.rs +++ /dev/null @@ -1,35 +0,0 @@ -use indy_credx::Error as CredxError; - -use super::error::VcxAnoncredsError; - -impl From for VcxAnoncredsError { - fn from(err: CredxError) -> Self { - // Credx will occasionally wrap the real error within the `cause` of an ErrorKind::Input - // error type So we use this cause error if the cause exists and can be downcast to - // an credxerror - let cause = if err.kind() == indy_credx::ErrorKind::Input { - err.cause - .as_ref() - .and_then(|x| x.downcast_ref::()) - } else { - None - }; - let e = cause.unwrap_or(&err); - - match e.kind() { - indy_credx::ErrorKind::Input | indy_credx::ErrorKind::InvalidUserRevocId => { - VcxAnoncredsError::InvalidInput(err.to_string()) - } - indy_credx::ErrorKind::IOError => VcxAnoncredsError::IOError(err.to_string()), - indy_credx::ErrorKind::InvalidState - | indy_credx::ErrorKind::RevocationRegistryFull - | indy_credx::ErrorKind::CredentialRevoked => { - VcxAnoncredsError::InvalidState(err.to_string()) - } - indy_credx::ErrorKind::Unexpected => VcxAnoncredsError::UnknownError(err.to_string()), - indy_credx::ErrorKind::ProofRejected => { - VcxAnoncredsError::ProofRejected(err.to_string()) - } - } - } -} diff --git a/aries/aries_vcx_anoncreds/src/errors/mod.rs b/aries/aries_vcx_anoncreds/src/errors/mod.rs index 4ce0a78edb..ec48b69794 100644 --- a/aries/aries_vcx_anoncreds/src/errors/mod.rs +++ b/aries/aries_vcx_anoncreds/src/errors/mod.rs @@ -1,6 +1,4 @@ pub mod error; #[cfg(feature = "anoncreds")] mod mapping_anoncreds; -#[cfg(feature = "credx")] -mod mapping_credx; mod mapping_others; diff --git a/aries/misc/test_utils/Cargo.toml b/aries/misc/test_utils/Cargo.toml index 518b9ea0aa..d9f027d2ac 100644 --- a/aries/misc/test_utils/Cargo.toml +++ b/aries/misc/test_utils/Cargo.toml @@ -12,11 +12,10 @@ edition.workspace = true askar_wallet = ["aries_vcx_wallet/askar_wallet"] vdr_proxy_ledger = [ "aries_vcx_ledger/vdr_proxy_ledger", - "credx", + "anoncreds", "dep:indy-ledger-response-parser", "dep:indy-vdr-proxy-client", ] -credx = ["aries_vcx_anoncreds/credx"] anoncreds = ["aries_vcx_anoncreds/anoncreds"] [dependencies] diff --git a/aries/misc/test_utils/src/devsetup.rs b/aries/misc/test_utils/src/devsetup.rs index eab64c63ea..770d847f3e 100644 --- a/aries/misc/test_utils/src/devsetup.rs +++ b/aries/misc/test_utils/src/devsetup.rs @@ -169,19 +169,13 @@ pub async fn dev_build_featured_indy_ledger( #[allow(clippy::needless_return)] pub async fn dev_build_featured_anoncreds() -> impl BaseAnonCreds { - #[cfg(feature = "credx")] - { - use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; - return IndyCredxAnonCreds; - } - - #[cfg(all(not(feature = "credx"), feature = "anoncreds"))] + #[cfg(feature = "anoncreds")] { use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; return Anoncreds; } - #[cfg(all(not(feature = "credx"), not(feature = "anoncreds")))] + #[cfg(not(feature = "anoncreds"))] { use crate::mockdata::mock_anoncreds::MockAnoncreds; return MockAnoncreds; diff --git a/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml b/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml index c7ef7dcb10..7732d59334 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml +++ b/aries/wrappers/uniffi-aries-vcx/core/Cargo.toml @@ -18,7 +18,8 @@ path = "uniffi-bindgen.rs" [dependencies] uniffi = { version = "0.23.0", features = ["cli"] } aries_vcx = { path = "../../../aries_vcx", features = [ - "credx", + "anoncreds", + "askar_wallet" ] } aries_vcx_ledger = { path = "../../../aries_vcx_ledger" } aries_vcx_anoncreds = { path = "../../../aries_vcx_anoncreds" } diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/askar.rs b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/askar.rs index 0601494b59..ef272e99aa 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/askar.rs +++ b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/askar.rs @@ -1,14 +1,13 @@ use std::sync::Arc; use aries_vcx::{ - aries_vcx_anoncreds::anoncreds::{ - base_anoncreds::BaseAnonCreds, credx_anoncreds::IndyCredxAnonCreds, - }, + aries_vcx_anoncreds::anoncreds::base_anoncreds::BaseAnonCreds, aries_vcx_wallet::wallet::{ askar::{askar_wallet_config::AskarWalletConfig, AskarWallet}, base_wallet::ManageWallet, }, }; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::{ indy_vdr_ledger::{indyvdr_build_ledger_read, IndyVdrLedgerRead}, request_submitter::vdr_ledger::{IndyVdrLedgerPool, IndyVdrSubmitter}, @@ -23,7 +22,7 @@ use crate::{ #[derive(Debug)] pub struct UniffiProfile { pub wallet: AskarWallet, - pub anoncreds: IndyCredxAnonCreds, + pub anoncreds: Anoncreds, pub ledger_read: IndyVdrLedgerRead, } @@ -37,7 +36,7 @@ pub fn new_indy_profile( block_on(async { let wallet = wallet_config.create_wallet().await?; - let anoncreds = IndyCredxAnonCreds; + let anoncreds = Anoncreds; anoncreds .prover_create_link_secret(&wallet, &"main".to_string()) @@ -53,7 +52,7 @@ pub fn new_indy_profile( let request_submitter = IndyVdrSubmitter::new(ledger_pool); let ledger_read = indyvdr_build_ledger_read(request_submitter, cache_config)?; let profile = UniffiProfile { - anoncreds: IndyCredxAnonCreds, + anoncreds: Anoncreds, wallet, ledger_read, }; diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs index 44292a2e1d..332460c3e7 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs +++ b/aries/wrappers/uniffi-aries-vcx/core/src/core/profile/mod.rs @@ -1,5 +1,5 @@ use aries_vcx::errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}; -use aries_vcx_anoncreds::anoncreds::credx_anoncreds::IndyCredxAnonCreds; +use aries_vcx_anoncreds::anoncreds::anoncreds::Anoncreds; use aries_vcx_ledger::ledger::{ base_ledger::TxnAuthrAgrmtOptions, indy_vdr_ledger::IndyVdrLedgerRead, request_submitter::vdr_ledger::IndyVdrSubmitter, @@ -17,7 +17,7 @@ impl UniffiProfile { &self.ledger_read } - pub fn anoncreds(&self) -> &IndyCredxAnonCreds { + pub fn anoncreds(&self) -> &Anoncreds { &self.anoncreds } From 925fb35bdda6e5285c7e47dae8262c60793271d3 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 10:50:32 +1000 Subject: [PATCH 07/15] fix several askar crypto issues Signed-off-by: gmulhearn --- .github/workflows/main.yml | 6 +- .../src/utils/encryption_envelope.rs | 17 +++--- .../src/wallet/askar/askar_did_wallet.rs | 55 ++++++++++--------- .../src/wallet/askar/askar_utils.rs | 28 +++++++++- .../aries_vcx_wallet/src/wallet/askar/pack.rs | 3 +- .../src/wallet/askar/unpack.rs | 33 ++++++----- justfile | 24 ++------ 7 files changed, 92 insertions(+), 74 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f95f1d1beb..8b3f574c5a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -101,7 +101,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - wallet: ["vdrtools_wallet", "askar_wallet"] + wallet: ["askar_wallet"] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -123,7 +123,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - backend: ["credx,vdrtools_wallet", "vdr_proxy_ledger,vdrtools_wallet"] + backend: ["anoncreds,askar_wallet", "vdr_proxy_ledger,askar_wallet"] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -332,7 +332,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - wallet: ["vdrtools_wallet,credx", "askar_wallet,credx"] + wallet: ["askar_wallet,anoncreds"] steps: - name: "Git checkout" uses: actions/checkout@v3 diff --git a/aries/aries_vcx/src/utils/encryption_envelope.rs b/aries/aries_vcx/src/utils/encryption_envelope.rs index dcd86a953f..9a522e2f1e 100644 --- a/aries/aries_vcx/src/utils/encryption_envelope.rs +++ b/aries/aries_vcx/src/utils/encryption_envelope.rs @@ -336,25 +336,24 @@ pub mod unit_tests { .await .unwrap(); + let sender_vk = sender_data.verkey().base58(); + let recipient_vk = recipient_data.verkey().base58(); + let data_original = "foobar"; let envelope = EncryptionEnvelope::create_from_keys( &setup.wallet, data_original.as_bytes(), - Some(&sender_data.verkey().base58()), - recipient_data.verkey().base58(), + Some(&sender_vk), + recipient_vk.clone(), [].to_vec(), ) .await .unwrap(); - let data_unpacked = EncryptionEnvelope::auth_unpack( - &setup.wallet, - envelope.0, - &sender_data.verkey().base58(), - ) - .await - .unwrap(); + let data_unpacked = EncryptionEnvelope::auth_unpack(&setup.wallet, envelope.0, &sender_vk) + .await + .unwrap(); assert_eq!(data_original, data_unpacked); } diff --git a/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs b/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs index c175b3c8cf..4c5824c12f 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs @@ -6,7 +6,7 @@ use async_trait::async_trait; use public_key::Key; use super::{ - askar_utils::{local_key_to_public_key, seed_from_opt}, + askar_utils::{local_key_to_public_key, public_key_to_local_key, seed_from_opt}, pack::Pack, rng_method::RngMethod, sig_type::SigType, @@ -38,7 +38,7 @@ impl DidWallet for AskarWallet { _did_method_name: Option<&str>, ) -> VcxWalletResult { let mut tx = self.transaction().await?; - let (did, local_key) = self + let (_vk, local_key) = self .insert_key( &mut tx, KeyAlg::Ed25519, @@ -48,16 +48,29 @@ impl DidWallet for AskarWallet { .await?; let verkey = local_key_to_public_key(&local_key)?; + + // construct NYM from first half of verkey as expected output from this method + let nym = { + let pk = verkey.key(); + if pk.len() != 32 { + return Err(VcxWalletError::InvalidInput(format!( + "Invalid key length: {}", + pk.len() + ))); + } + bs58::encode(&pk[0..16]).into_string() + }; + self.insert_did( &mut tx, - &did, + &nym, &RecordCategory::Did.to_string(), &verkey, None, ) .await?; tx.commit().await?; - Ok(DidData::new(&did, &verkey)) + Ok(DidData::new(&nym, &verkey)) } async fn key_for_did(&self, did: &str) -> VcxWalletResult { @@ -131,36 +144,28 @@ impl DidWallet for AskarWallet { } async fn sign(&self, key: &Key, msg: &[u8]) -> VcxWalletResult> { - if let Some(key) = self + let Some(key) = self .session() .await? .fetch_key(&key.base58(), false) .await? - { - let local_key = key.load_local_key()?; - let key_alg = SigType::try_from_key_alg(local_key.algorithm())?; - Ok(local_key.sign_message(msg, Some(key_alg.into()))?) - } else { - Err(VcxWalletError::record_not_found_from_details( + else { + return Err(VcxWalletError::record_not_found_from_details( RecordCategory::Key, &key.base58(), - )) - } + )); + }; + + let local_key = key.load_local_key()?; + let key_alg = SigType::try_from_key_alg(local_key.algorithm())?; + Ok(local_key.sign_message(msg, Some(key_alg.into()))?) } async fn verify(&self, key: &Key, msg: &[u8], signature: &[u8]) -> VcxWalletResult { - if let Some(key) = self - .session() - .await? - .fetch_key(&key.base58(), false) - .await? - { - let local_key = key.load_local_key()?; - let key_alg = SigType::try_from_key_alg(local_key.algorithm())?; - Ok(local_key.verify_signature(msg, signature, Some(key_alg.into()))?) - } else { - Ok(false) - } + let local_key = public_key_to_local_key(&key)?; + + let sig_alg = SigType::try_from_key_alg(local_key.algorithm())?; + Ok(local_key.verify_signature(msg, signature, Some(sig_alg.into()))?) } async fn pack_message( diff --git a/aries/aries_vcx_wallet/src/wallet/askar/askar_utils.rs b/aries/aries_vcx_wallet/src/wallet/askar/askar_utils.rs index 3296557063..eebbf6f6db 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/askar_utils.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/askar_utils.rs @@ -1,12 +1,13 @@ use aries_askar::{ + crypto::alg::{BlsCurves, EcCurves, KeyAlg}, entry::Entry, - kms::{KeyAlg, LocalKey}, + kms::LocalKey, }; use public_key::{Key, KeyType}; use serde::Deserialize; use crate::{ - errors::error::VcxWalletResult, + errors::error::{VcxWalletError, VcxWalletResult}, wallet::{base_wallet::base58_string::Base58String, utils::random_seed}, }; @@ -25,6 +26,29 @@ pub fn local_key_to_public_key(local_key: &LocalKey) -> VcxWalletResult { )?) } +pub fn public_key_to_local_key(key: &Key) -> VcxWalletResult { + let alg = public_key_type_to_askar_key_alg(key.key_type())?; + Ok(LocalKey::from_public_bytes(alg, key.key())?) +} + +pub fn public_key_type_to_askar_key_alg(value: &KeyType) -> VcxWalletResult { + let alg = match value { + KeyType::Ed25519 => KeyAlg::Ed25519, + KeyType::X25519 => KeyAlg::X25519, + KeyType::Bls12381g1g2 => KeyAlg::Bls12_381(BlsCurves::G1G2), + KeyType::Bls12381g1 => KeyAlg::Bls12_381(BlsCurves::G1), + KeyType::Bls12381g2 => KeyAlg::Bls12_381(BlsCurves::G2), + KeyType::P256 => KeyAlg::EcCurve(EcCurves::Secp256r1), + KeyType::P384 => KeyAlg::EcCurve(EcCurves::Secp384r1), + _ => { + return Err(VcxWalletError::Unimplemented(format!( + "Unsupported key type: {value:?}" + ))) + } + }; + Ok(alg) +} + pub fn ed25519_to_x25519(local_key: &LocalKey) -> VcxWalletResult { Ok(local_key.convert_key(KeyAlg::X25519)?) } diff --git a/aries/aries_vcx_wallet/src/wallet/askar/pack.rs b/aries/aries_vcx_wallet/src/wallet/askar/pack.rs index d660e373b7..647248c105 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/pack.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/pack.rs @@ -72,9 +72,10 @@ fn pack_authcrypt_recipients( &nonce, )?; + let sender_ed25519_pk = sender_local_key.to_public_bytes()?; let enc_sender = crypto_box_seal( &recipient_converted_key, - bytes_to_bs58(&sender_local_key.to_public_bytes()?).as_bytes(), + bytes_to_bs58(&sender_ed25519_pk).as_bytes(), )?; encrypted_recipients.push(Recipient::new_authcrypt( diff --git a/aries/aries_vcx_wallet/src/wallet/askar/unpack.rs b/aries/aries_vcx_wallet/src/wallet/askar/unpack.rs index 3b7894b369..c8a24c8d4e 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/unpack.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/unpack.rs @@ -43,6 +43,7 @@ pub async fn unpack(jwe: Jwe, session: &mut Session) -> VcxWalletResult VcxWalletResult { ) } +/// Returns the shared encryption key, and the sender key fn unpack_authcrypt( local_key: &LocalKey, recipient: &AuthcryptRecipient, ) -> VcxWalletResult<(LocalKey, Option)> { - let recipient_key = ed25519_to_x25519(local_key)?; - let sender_vk = crypto_box_seal_open(&recipient_key, &recipient.header.sender.decode()?)?; - let sender_key = ed25519_to_x25519(&LocalKey::from_public_bytes( - Ed25519, - &bs58_to_bytes(&sender_vk.clone())?, + let recipient_x25519_key = ed25519_to_x25519(local_key)?; + + // "sender" : base64URLencode(libsodium.crypto_box_seal(their_vk, base58encode(sender_vk)), + let encrypted_sender_vk = recipient.header.sender.decode()?; + let sender_vk = bs58_to_bytes(&crypto_box_seal_open( + &recipient_x25519_key, + &encrypted_sender_vk, )?)?; + let sender_x25519_key = ed25519_to_x25519(&LocalKey::from_public_bytes(Ed25519, &sender_vk)?)?; let secret = crypto_box_open( - &recipient_key, - &sender_key, + &recipient_x25519_key, + &sender_x25519_key, &recipient.encrypted_key.decode()?, &recipient.header.iv.decode()?, )?; - Ok(( - LocalKey::from_secret_bytes(KeyAlg::Chacha20(Chacha20Types::C20P), &secret)?, - Some(Key::new(sender_vk.to_vec(), KeyType::Ed25519)?), - )) + let shared_enc_key = + LocalKey::from_secret_bytes(KeyAlg::Chacha20(Chacha20Types::C20P), &secret)?; + let sender_ed25519_pk = Key::new(sender_vk, KeyType::Ed25519)?; + Ok((shared_enc_key, Some(sender_ed25519_pk))) } fn unpack_anoncrypt( @@ -103,10 +108,8 @@ fn unpack_anoncrypt( let recipient_key = ed25519_to_x25519(local_key)?; let key = crypto_box_seal_open(&recipient_key, &recipient.encrypted_key.decode()?)?; - Ok(( - LocalKey::from_secret_bytes(KeyAlg::Chacha20(Chacha20Types::C20P), &key)?, - None, - )) + let shared_enc_key = LocalKey::from_secret_bytes(KeyAlg::Chacha20(Chacha20Types::C20P), &key)?; + Ok((shared_enc_key, None)) } async fn find_recipient_key<'a>( diff --git a/justfile b/justfile index cd4d455b9d..b65ce1c0b3 100644 --- a/justfile +++ b/justfile @@ -8,7 +8,7 @@ fmt-check: cargo +nightly-2023-05-08 fmt --check clippy-workspace wallet: - cargo clippy --examples --tests --no-default-features -F credx,anoncreds,vdr_proxy_ledger,legacy_proof,{{wallet}} + cargo clippy --examples --tests --no-default-features -F anoncreds,vdr_proxy_ledger,legacy_proof,{{wallet}} clippy-aries-vcx features: cargo clippy -p aries_vcx --features legacy_proof --features {{features}} --no-default-features @@ -16,32 +16,18 @@ clippy-aries-vcx features: check-workspace: cargo check --tests --all-features +# TODO - this is failing check-aries-vcx-anoncreds: - cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F vdrtools_wallet,anoncreds --tests - -check-aries-vcx-credx: - cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F vdrtools_wallet,credx --tests + cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F askar_wallet,anoncreds --tests test-unit test_name="": - RUST_TEST_THREADS=1 cargo test --workspace --lib --exclude aries-vcx-agent --exclude libvdrtools --exclude wallet_migrator --exclude mediator {{test_name}} -F did_doc/jwk -F public_key/jwk - -test-compatibility-aries-vcx-wallet: - cargo test --manifest-path="aries/aries_vcx_wallet/Cargo.toml" -F vdrtools_wallet,askar_wallet wallet_compatibility_ - -test-wallet-migrator: - cargo test --manifest-path="aries/misc/wallet_migrator/Cargo.toml" -F vdrtools_wallet,askar_wallet + RUST_TEST_THREADS=1 cargo test --workspace --lib --exclude aries-vcx-agent --exclude mediator {{test_name}} -F did_doc/jwk -F public_key/jwk test-integration-aries-vcx features test_name="": cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F {{features}} -- --ignored {{test_name}} -test-integration-aries-vcx-anoncreds-rs test_name="": - cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F anoncreds --test test_revocations --test test_proof_presentation --test test_anoncreds --test test_verifier -- --ignored {{test_name}} - -test-integration-aries-vcx-mysql test_name="": - cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F vdrtools_wallet test_mysql -- --include-ignored {{test_name}} - test-integration-aries-vcx-vdrproxy test_name="": - cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F vdr_proxy_ledger,credx -- --ignored {{test_name}} + cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F vdr_proxy_ledger,anoncreds -- --ignored {{test_name}} test-integration-did-crate test_name="": cargo test --examples -p did_doc -p did_parser_nom -p did_resolver -p did_resolver_registry -p did_resolver_sov -p did_resolver_web -p did_key -p did_peer -F did_doc/jwk --test "*" From 14a9d0186328ea970ea1f5b57e22a21053a37544 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 11:05:48 +1000 Subject: [PATCH 08/15] remove some old tests Signed-off-by: gmulhearn --- .github/workflows/main.yml | 64 ------------------- .../src/wallet/askar/askar_did_wallet.rs | 2 +- justfile | 1 - 3 files changed, 1 insertion(+), 66 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8b3f574c5a..f07e601724 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -293,40 +293,6 @@ jobs: - name: "Run workspace unit tests" run: just test-unit - test-wallet-migrator: - needs: workflow-setup - runs-on: ubuntu-20.04 - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - name: "Setup rust testing environment" - uses: ./.github/actions/setup-testing-rust - with: - rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSION }} - default: true - skip-docker-setup: true - - name: "Install just" - run: sudo snap install --edge --classic just - - name: "Run wallet migration tests" - run: just test-wallet-migrator - - test-compatibility-aries-vcx-wallet: - needs: workflow-setup - runs-on: ubuntu-20.04 - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - name: "Setup rust testing environment" - uses: ./.github/actions/setup-testing-rust - with: - rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSION }} - default: true - skip-docker-setup: true - - name: "Install just" - run: sudo snap install --edge --classic just - - name: "Run aries-vcx-wallet compatibility tests" - run: just test-compatibility-aries-vcx-wallet - test-integration-aries-vcx: needs: workflow-setup runs-on: ubuntu-20.04 @@ -345,36 +311,6 @@ jobs: - name: "Run aries-vcx integration tests" run: just test-integration-aries-vcx ${{ matrix.wallet }} - test-integration-aries-vcx-anoncreds-rs: - needs: workflow-setup - runs-on: ubuntu-20.04 - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - name: "Setup rust testing environment" - uses: ./.github/actions/setup-testing-rust - with: - rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSION }} - - name: "Install just" - run: sudo snap install --edge --classic just - - name: "Run anoncreds-rs integration tests" - run: just test-integration-aries-vcx-anoncreds-rs - - test-integration-aries-vcx-mysql: - needs: workflow-setup - runs-on: ubuntu-20.04 - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - name: "Setup rust testing environment" - uses: ./.github/actions/setup-testing-rust - with: - rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSION }} - - name: "Install just" - run: sudo snap install --edge --classic just - - name: "Run aries_vcx tests: mysql_test" - run: just test-integration-aries-vcx-mysql - test-integration-aries-vcx-vdrproxy: needs: [workflow-setup, build-docker-vdrproxy] runs-on: ubuntu-20.04 diff --git a/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs b/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs index 4c5824c12f..051c81145f 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs @@ -162,7 +162,7 @@ impl DidWallet for AskarWallet { } async fn verify(&self, key: &Key, msg: &[u8], signature: &[u8]) -> VcxWalletResult { - let local_key = public_key_to_local_key(&key)?; + let local_key = public_key_to_local_key(key)?; let sig_alg = SigType::try_from_key_alg(local_key.algorithm())?; Ok(local_key.verify_signature(msg, signature, Some(sig_alg.into()))?) diff --git a/justfile b/justfile index b65ce1c0b3..832f8bd117 100644 --- a/justfile +++ b/justfile @@ -16,7 +16,6 @@ clippy-aries-vcx features: check-workspace: cargo check --tests --all-features -# TODO - this is failing check-aries-vcx-anoncreds: cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F askar_wallet,anoncreds --tests From 440f868b9596377eb044cea6d1d57aede55f0cb9 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 14:35:56 +1000 Subject: [PATCH 09/15] rerun Signed-off-by: gmulhearn --- .github/workflows/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f07e601724..276634dd93 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -367,7 +367,6 @@ jobs: - workflow-setup - test-unit-workspace - test-integration-aries-vcx - - test-integration-aries-vcx-mysql if: ${{ needs.workflow-setup.outputs.RELEASE == 'true' || needs.workflow-setup.outputs.PRERELEASE == 'true' }} outputs: RELEASE_UPLOAD_URL: ${{ steps.create-release.outputs.upload_url }} From 96dad34c0a6ab87eb08187d4552b043e7473ffb8 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 14:38:47 +1000 Subject: [PATCH 10/15] fmt Signed-off-by: gmulhearn --- aries/agents/mediator/src/aries_agent/mod.rs | 8 ++++---- aries/aries_vcx_anoncreds/src/anoncreds/mod.rs | 1 - .../protocols/cred_issuance/v1/mod.rs | 17 ++++++++--------- .../protocols/cred_issuance/v2/mod.rs | 17 ++++++++--------- .../test_utils/src/devsetup/askar_wallet.rs | 1 - .../src/peer_did/numalgos/numalgo2/helpers.rs | 14 ++++++-------- 6 files changed, 26 insertions(+), 32 deletions(-) diff --git a/aries/agents/mediator/src/aries_agent/mod.rs b/aries/agents/mediator/src/aries_agent/mod.rs index a7f6fad089..2b1312bfea 100644 --- a/aries/agents/mediator/src/aries_agent/mod.rs +++ b/aries/agents/mediator/src/aries_agent/mod.rs @@ -8,7 +8,9 @@ use aries_vcx::{ use aries_vcx_wallet::{ errors::error::VcxWalletError, wallet::{ - askar::{askar_wallet_config::AskarWalletConfig, key_method::KeyMethod}, base_wallet::{BaseWallet, ManageWallet}, structs_io::UnpackMessageOutput + askar::{askar_wallet_config::AskarWalletConfig, key_method::KeyMethod}, + base_wallet::{BaseWallet, ManageWallet}, + structs_io::UnpackMessageOutput, }, }; use diddoc_legacy::aries::{diddoc::AriesDidDoc, service::AriesService}; @@ -245,9 +247,7 @@ mod test { pub async fn test_pack_unpack() { let message: Value = serde_json::from_str("{}").unwrap(); let message_bytes = serde_json::to_vec(&message).unwrap(); - let mut agent = AgentBuilder::::new_demo_agent() - .await - .unwrap(); + let mut agent = AgentBuilder::::new_demo_agent().await.unwrap(); agent .init_service( vec![], diff --git a/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs b/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs index 6dddd121a0..7efa1779e0 100644 --- a/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs +++ b/aries/aries_vcx_anoncreds/src/anoncreds/mod.rs @@ -2,4 +2,3 @@ #[cfg(feature = "anoncreds")] pub mod anoncreds; pub mod base_anoncreds; - diff --git a/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs b/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs index 95b75f4194..feb1b0df59 100644 --- a/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs +++ b/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs @@ -63,16 +63,15 @@ impl DelayedSerde for CredentialIssuanceV1 { D: Deserializer<'de>, { let (protocol, kind_str) = msg_type; - let kind = match protocol { - CredentialIssuanceKind::V1(CredentialIssuanceTypeV1::V1_0(kind)) => { - kind.kind_from_str(kind_str) - } - CredentialIssuanceKind::V2(_) => { - return Err(D::Error::custom( + let kind = + match protocol { + CredentialIssuanceKind::V1(CredentialIssuanceTypeV1::V1_0(kind)) => { + kind.kind_from_str(kind_str) + } + CredentialIssuanceKind::V2(_) => return Err(D::Error::custom( "Cannot deserialize issue-credential-v2 message type into issue-credential-v1", - )) - } - }; + )), + }; match kind.map_err(D::Error::custom)? { CredentialIssuanceTypeV1_0::OfferCredential => { diff --git a/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs b/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs index 200ff03b91..2253562b76 100644 --- a/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs +++ b/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs @@ -62,16 +62,15 @@ impl DelayedSerde for CredentialIssuanceV2 { D: Deserializer<'de>, { let (protocol, kind_str) = msg_type; - let kind = match protocol { - CredentialIssuanceKind::V2(CredentialIssuanceTypeV2::V2_0(kind)) => { - kind.kind_from_str(kind_str) - } - CredentialIssuanceKind::V1(_) => { - return Err(D::Error::custom( + let kind = + match protocol { + CredentialIssuanceKind::V2(CredentialIssuanceTypeV2::V2_0(kind)) => { + kind.kind_from_str(kind_str) + } + CredentialIssuanceKind::V1(_) => return Err(D::Error::custom( "Cannot deserialize issue-credential-v1 message type into issue-credential-v2", - )) - } - }; + )), + }; match kind.map_err(D::Error::custom)? { CredentialIssuanceTypeV2_0::OfferCredential => { diff --git a/aries/misc/test_utils/src/devsetup/askar_wallet.rs b/aries/misc/test_utils/src/devsetup/askar_wallet.rs index 33d6065eee..0c17f139cb 100644 --- a/aries/misc/test_utils/src/devsetup/askar_wallet.rs +++ b/aries/misc/test_utils/src/devsetup/askar_wallet.rs @@ -5,7 +5,6 @@ use aries_vcx_wallet::wallet::{ use log::info; use uuid::Uuid; - pub async fn dev_setup_wallet_askar(key_seed: &str) -> (String, AskarWallet) { info!("dev_setup_wallet_askar >>"); // TODO - actually impl this diff --git a/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs b/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs index 1e19b3b089..b6a47a6eed 100644 --- a/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs +++ b/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs @@ -144,11 +144,10 @@ mod tests { #[test] fn test_process_elements_with_multiple_elements() { - let did: Did = - "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ + let did: Did = "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ SeyJpZCI6IiNzZXJ2aWNlLTAiLCJ0IjoiZG0iLCJzIjoiaHR0cHM6Ly9leGFtcGxlLmNvbS9lbmRwb2ludCJ9" - .parse() - .unwrap(); + .parse() + .unwrap(); let did_doc = diddoc_from_peerdid2_elements( DidDocument::new(did.clone()), @@ -164,12 +163,11 @@ mod tests { #[test] fn test_process_elements_error_on_invalid_element() { - let did: Did = - "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ + let did: Did = "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ SeyJpZCI6IiNzZXJ2aWNlLTAiLCJ0IjoiZG0iLCJzIjoiaHR0cHM6Ly9leGFtcGxlLmNvbS9lbmRwb2ludCJ9.\ Xinvalid" - .parse() - .unwrap(); + .parse() + .unwrap(); match diddoc_from_peerdid2_elements( DidDocument::new(did.clone()), From 3a78e21dd3a16c63a4b18ca400d7e230056a693f Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 15:09:34 +1000 Subject: [PATCH 11/15] change askar key from seed to match indy/acapy etc, tolerate unexpanded verkeys in legacy verkey resolution Signed-off-by: gmulhearn --- aries/aries_vcx/src/common/keys.rs | 33 +++++++++++++++++-- .../src/wallet/askar/askar_did_wallet.rs | 15 ++------- .../aries_vcx_wallet/src/wallet/askar/mod.rs | 8 ++--- 3 files changed, 35 insertions(+), 21 deletions(-) diff --git a/aries/aries_vcx/src/common/keys.rs b/aries/aries_vcx/src/common/keys.rs index d96ce249de..fe7cb6105c 100644 --- a/aries/aries_vcx/src/common/keys.rs +++ b/aries/aries_vcx/src/common/keys.rs @@ -94,13 +94,42 @@ pub async fn get_verkey_from_ledger( ), ) })?; - Ok(nym_data["verkey"] + let unparsed_verkey = nym_data["verkey"] .as_str() .ok_or(AriesVcxError::from_msg( AriesVcxErrorKind::SerializationError, format!("Cannot deserialize {:?} into String", nym_data["verkey"]), ))? - .to_string()) + .to_string(); + + expand_abbreviated_verkey(did.id(), &unparsed_verkey) +} + +/// Indy ledgers may return abbreviated verkeys, where the abbreviation only makes sense +/// with the context of the NYM, this function expands them to full verkeys +fn expand_abbreviated_verkey(nym: &str, verkey: &str) -> VcxResult { + let Some(stripped_verkey) = verkey.strip_prefix('~') else { + // expansion not needed + return Ok(verkey.to_string()); + }; + let mut decoded_nym = bs58::decode(nym).into_vec().map_err(|e| { + AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidLedgerResponse, + format!("Failed to decode did from base58: {} (error: {})", nym, e), + ) + })?; + let decoded_stripped_verkey = bs58::decode(stripped_verkey).into_vec().map_err(|e| { + AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidLedgerResponse, + format!( + "Failed to decode verkey from base58: {} (error: {})", + stripped_verkey, e + ), + ) + })?; + decoded_nym.extend(&decoded_stripped_verkey); + + Ok(bs58::encode(decoded_nym).into_string()) } // todo: was originally written for vdrtool ledger implementation, ideally we should moc out diff --git a/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs b/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs index 051c81145f..0a9ffd0db4 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/askar_did_wallet.rs @@ -8,7 +8,6 @@ use public_key::Key; use super::{ askar_utils::{local_key_to_public_key, public_key_to_local_key, seed_from_opt}, pack::Pack, - rng_method::RngMethod, sig_type::SigType, unpack::unpack, AskarWallet, @@ -39,12 +38,7 @@ impl DidWallet for AskarWallet { ) -> VcxWalletResult { let mut tx = self.transaction().await?; let (_vk, local_key) = self - .insert_key( - &mut tx, - KeyAlg::Ed25519, - seed_from_opt(seed).as_bytes(), - RngMethod::RandomDet, - ) + .insert_key(&mut tx, KeyAlg::Ed25519, seed_from_opt(seed).as_bytes()) .await?; let verkey = local_key_to_public_key(&local_key)?; @@ -92,12 +86,7 @@ impl DidWallet for AskarWallet { let mut tx = self.transaction().await?; if self.find_current_did(&mut tx, did).await?.is_some() { let (_, local_key) = self - .insert_key( - &mut tx, - KeyAlg::Ed25519, - seed_from_opt(seed).as_bytes(), - RngMethod::RandomDet, - ) + .insert_key(&mut tx, KeyAlg::Ed25519, seed_from_opt(seed).as_bytes()) .await?; let verkey = local_key_to_public_key(&local_key)?; diff --git a/aries/aries_vcx_wallet/src/wallet/askar/mod.rs b/aries/aries_vcx_wallet/src/wallet/askar/mod.rs index 15cbb113dc..dcde92610d 100644 --- a/aries/aries_vcx_wallet/src/wallet/askar/mod.rs +++ b/aries/aries_vcx_wallet/src/wallet/askar/mod.rs @@ -6,10 +6,7 @@ use aries_askar::{ use async_trait::async_trait; use public_key::Key; -use self::{ - askar_utils::local_key_to_bs58_public_key, askar_wallet_config::AskarWalletConfig, - rng_method::RngMethod, -}; +use self::{askar_utils::local_key_to_bs58_public_key, askar_wallet_config::AskarWalletConfig}; use super::{ base_wallet::{ did_value::DidValue, key_value::KeyValue, record_category::RecordCategory, BaseWallet, @@ -144,9 +141,8 @@ impl AskarWallet { session: &mut Session, alg: KeyAlg, seed: &[u8], - rng_method: RngMethod, ) -> VcxWalletResult<(String, LocalKey)> { - let key = LocalKey::from_seed(alg, seed, rng_method.into())?; + let key = LocalKey::from_secret_bytes(alg, seed)?; let key_name = local_key_to_bs58_public_key(&key)?.into_inner(); session .insert_key(&key_name, &key, None, None, None) From cd5784dbc38101b65395c7dd1ba163ca273ef3b2 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 15:26:59 +1000 Subject: [PATCH 12/15] small test fixes Signed-off-by: gmulhearn --- aries/aries_vcx/tests/test_pool.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/aries/aries_vcx/tests/test_pool.rs b/aries/aries_vcx/tests/test_pool.rs index f6302820b3..87a44db890 100644 --- a/aries/aries_vcx/tests/test_pool.rs +++ b/aries/aries_vcx/tests/test_pool.rs @@ -23,7 +23,7 @@ use aries_vcx::{ }; use aries_vcx_anoncreds::anoncreds::base_anoncreds::BaseAnonCreds; use aries_vcx_ledger::ledger::{ - base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite}, + base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerWrite}, indy::pool::test_utils::get_temp_file_path, }; use aries_vcx_wallet::wallet::base_wallet::{did_wallet::DidWallet, BaseWallet}; @@ -296,6 +296,12 @@ async fn test_pool_multiple_service_formats() -> Result<(), Box> { let setup = build_setup_profile().await; let did = setup.institution_did.clone(); + // clear all + let c = json!({ "service": serde_json::Value::Null }).to_string(); + setup.ledger_write.add_attr(&setup.wallet, &did, &c).await?; + let c = json!({ "endpoint": serde_json::Value::Null }).to_string(); + setup.ledger_write.add_attr(&setup.wallet, &did, &c).await?; + // Write legacy service format let service_1 = AriesService::create() .set_service_endpoint("https://example1.org".parse()?) @@ -427,7 +433,7 @@ async fn test_pool_rev_reg_def_fails_for_cred_def_created_without_revocation( ) .await; - assert_eq!(rc.unwrap_err().kind(), AriesVcxErrorKind::InvalidInput); + assert_eq!(rc.unwrap_err().kind(), AriesVcxErrorKind::InvalidState); Ok(()) } From 24dc152032c8bc289ab7616b37632e66afd28008 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 15:30:20 +1000 Subject: [PATCH 13/15] fmt with nightly Signed-off-by: gmulhearn --- .../protocols/cred_issuance/v1/mod.rs | 17 +++++++++-------- .../protocols/cred_issuance/v2/mod.rs | 17 +++++++++-------- aries/wrappers/uniffi-aries-vcx/core/src/lib.rs | 10 ++++++---- .../src/peer_did/numalgos/numalgo2/helpers.rs | 14 ++++++++------ 4 files changed, 32 insertions(+), 26 deletions(-) diff --git a/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs b/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs index feb1b0df59..95b75f4194 100644 --- a/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs +++ b/aries/messages/src/msg_fields/protocols/cred_issuance/v1/mod.rs @@ -63,15 +63,16 @@ impl DelayedSerde for CredentialIssuanceV1 { D: Deserializer<'de>, { let (protocol, kind_str) = msg_type; - let kind = - match protocol { - CredentialIssuanceKind::V1(CredentialIssuanceTypeV1::V1_0(kind)) => { - kind.kind_from_str(kind_str) - } - CredentialIssuanceKind::V2(_) => return Err(D::Error::custom( + let kind = match protocol { + CredentialIssuanceKind::V1(CredentialIssuanceTypeV1::V1_0(kind)) => { + kind.kind_from_str(kind_str) + } + CredentialIssuanceKind::V2(_) => { + return Err(D::Error::custom( "Cannot deserialize issue-credential-v2 message type into issue-credential-v1", - )), - }; + )) + } + }; match kind.map_err(D::Error::custom)? { CredentialIssuanceTypeV1_0::OfferCredential => { diff --git a/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs b/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs index 2253562b76..200ff03b91 100644 --- a/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs +++ b/aries/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs @@ -62,15 +62,16 @@ impl DelayedSerde for CredentialIssuanceV2 { D: Deserializer<'de>, { let (protocol, kind_str) = msg_type; - let kind = - match protocol { - CredentialIssuanceKind::V2(CredentialIssuanceTypeV2::V2_0(kind)) => { - kind.kind_from_str(kind_str) - } - CredentialIssuanceKind::V1(_) => return Err(D::Error::custom( + let kind = match protocol { + CredentialIssuanceKind::V2(CredentialIssuanceTypeV2::V2_0(kind)) => { + kind.kind_from_str(kind_str) + } + CredentialIssuanceKind::V1(_) => { + return Err(D::Error::custom( "Cannot deserialize issue-credential-v1 message type into issue-credential-v2", - )), - }; + )) + } + }; match kind.map_err(D::Error::custom)? { CredentialIssuanceTypeV2_0::OfferCredential => { diff --git a/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs b/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs index 0899e03716..0703e7f664 100644 --- a/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs +++ b/aries/wrappers/uniffi-aries-vcx/core/src/lib.rs @@ -5,11 +5,13 @@ pub mod errors; pub mod handlers; pub mod runtime; -use aries_vcx::aries_vcx_wallet::wallet::askar::{ - askar_wallet_config::AskarWalletConfig, - key_method::{ArgonLevel, AskarKdfMethod, KeyMethod}, +use aries_vcx::{ + aries_vcx_wallet::wallet::askar::{ + askar_wallet_config::AskarWalletConfig, + key_method::{ArgonLevel, AskarKdfMethod, KeyMethod}, + }, + protocols::connection::pairwise_info::PairwiseInfo, }; -use aries_vcx::protocols::connection::pairwise_info::PairwiseInfo; use handlers::{connection::*, holder::*}; use crate::{ diff --git a/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs b/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs index b6a47a6eed..1e19b3b089 100644 --- a/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs +++ b/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/helpers.rs @@ -144,10 +144,11 @@ mod tests { #[test] fn test_process_elements_with_multiple_elements() { - let did: Did = "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ + let did: Did = + "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ SeyJpZCI6IiNzZXJ2aWNlLTAiLCJ0IjoiZG0iLCJzIjoiaHR0cHM6Ly9leGFtcGxlLmNvbS9lbmRwb2ludCJ9" - .parse() - .unwrap(); + .parse() + .unwrap(); let did_doc = diddoc_from_peerdid2_elements( DidDocument::new(did.clone()), @@ -163,11 +164,12 @@ mod tests { #[test] fn test_process_elements_error_on_invalid_element() { - let did: Did = "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ + let did: Did = + "did:peer:2.Vz6MkqRYqQiSgvZQdnBytw86Qbs2ZWUkGv22od935YF4s8M7V.\ SeyJpZCI6IiNzZXJ2aWNlLTAiLCJ0IjoiZG0iLCJzIjoiaHR0cHM6Ly9leGFtcGxlLmNvbS9lbmRwb2ludCJ9.\ Xinvalid" - .parse() - .unwrap(); + .parse() + .unwrap(); match diddoc_from_peerdid2_elements( DidDocument::new(did.clone()), From dbbf23db295793e963a376324ea2bae9f32fee08 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 16:30:58 +1000 Subject: [PATCH 14/15] fix mediator and clean some todos Signed-off-by: gmulhearn --- .../agents/aries-vcx-agent/src/agent/init.rs | 3 ++- aries/agents/mediator/src/bin/mediator.rs | 26 ++++++++++++------- .../test_utils/src/devsetup/askar_wallet.rs | 10 +------ 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/aries/agents/aries-vcx-agent/src/agent/init.rs b/aries/agents/aries-vcx-agent/src/agent/init.rs index ecd39ca56a..01e0a39af5 100644 --- a/aries/agents/aries-vcx-agent/src/agent/init.rs +++ b/aries/agents/aries-vcx-agent/src/agent/init.rs @@ -52,7 +52,8 @@ pub async fn build_askar_wallet( _wallet_config: WalletInitConfig, issuer_seed: String, ) -> (AskarWallet, IssuerConfig) { - // TODO - actually impl this + // TODO - use actual config with storage path etc + // simple in-memory wallet let config_wallet = AskarWalletConfig::new( "sqlite://:memory:", KeyMethod::Unprotected, diff --git a/aries/agents/mediator/src/bin/mediator.rs b/aries/agents/mediator/src/bin/mediator.rs index 885cc39f4a..05734c6db2 100644 --- a/aries/agents/mediator/src/bin/mediator.rs +++ b/aries/agents/mediator/src/bin/mediator.rs @@ -1,6 +1,9 @@ -use aries_vcx_wallet::wallet::askar::{askar_wallet_config::AskarWalletConfig, AskarWallet}; +use aries_vcx_wallet::wallet::askar::{ + askar_wallet_config::AskarWalletConfig, key_method::KeyMethod, AskarWallet, +}; use log::info; use mediator::aries_agent::AgentBuilder; +use uuid::Uuid; #[tokio::main] async fn main() { @@ -9,16 +12,19 @@ async fn main() { info!("Starting up mediator! ⚙️⚙️"); let endpoint_root = std::env::var("ENDPOINT_ROOT").unwrap_or("127.0.0.1:8005".into()); info!("Mediator endpoint root address: {}", endpoint_root); - let wallet_config_json = std::env::var("INDY_WALLET_CONFIG").unwrap_or( - "{ - \"wallet_name\": \"demo-wallet\", - \"wallet_key\" : \"8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY\", - \"wallet_key_derivation\": \"RAW\" - }" - .to_string(), + // default wallet config for a dummy in-memory wallet + let default_wallet_config = AskarWalletConfig::new( + "sqlite://:memory:", + KeyMethod::Unprotected, + "", + &Uuid::new_v4().to_string(), ); - // TODO - actually impl this - let wallet_config: AskarWalletConfig = serde_json::from_str(&wallet_config_json).unwrap(); + let wallet_config_json = std::env::var("INDY_WALLET_CONFIG"); + let wallet_config = wallet_config_json + .ok() + .map_or(default_wallet_config, |json| { + serde_json::from_str(&json).unwrap() + }); info!("Wallet Config: {:?}", wallet_config); let mut agent = AgentBuilder::::new_from_wallet_config(wallet_config) .await diff --git a/aries/misc/test_utils/src/devsetup/askar_wallet.rs b/aries/misc/test_utils/src/devsetup/askar_wallet.rs index 0c17f139cb..dc3878b1f2 100644 --- a/aries/misc/test_utils/src/devsetup/askar_wallet.rs +++ b/aries/misc/test_utils/src/devsetup/askar_wallet.rs @@ -7,21 +7,13 @@ use uuid::Uuid; pub async fn dev_setup_wallet_askar(key_seed: &str) -> (String, AskarWallet) { info!("dev_setup_wallet_askar >>"); - // TODO - actually impl this + // simple in-memory wallet let config_wallet = AskarWalletConfig::new( "sqlite://:memory:", KeyMethod::Unprotected, "", &Uuid::new_v4().to_string(), ); - // wallet_name: format!("wallet_{}", uuid::Uuid::new_v4()), - // wallet_key: DEFAULT_WALLET_KEY.into(), - // wallet_key_derivation: WALLET_KDF_RAW.into(), - // wallet_type: None, - // storage_config: None, - // storage_credentials: None, - // rekey: None, - // rekey_derivation_method: None, let wallet = config_wallet.create_wallet().await.unwrap(); From 70eacc7c2ceae391abe28d688952840bbf0dffe6 Mon Sep 17 00:00:00 2001 From: gmulhearn Date: Sat, 12 Oct 2024 18:27:23 +1000 Subject: [PATCH 15/15] try target specific rust ver with mediator image Signed-off-by: gmulhearn --- aries/agents/aath-backchannel/readme.md | 6 +++--- aries/agents/mediator/Dockerfile | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/aries/agents/aath-backchannel/readme.md b/aries/agents/aath-backchannel/readme.md index 6e6ea699c2..366165663d 100644 --- a/aries/agents/aath-backchannel/readme.md +++ b/aries/agents/aath-backchannel/readme.md @@ -16,12 +16,12 @@ A general rule of thumb, is to make sure the AATH backchannel works against itse Use the following to run 2 VCX AATH instances and test them with specific test suites in the AATH: 1. clone the [AATH repo](https://github.com/hyperledger/aries-agent-test-harness/tree/main) -2. in the root of the AATH repo, start the standard AATH services (ledger, DID resolver, tails server): `./manage start` +2. in the root of the AATH repo, start the standard AATH services (ledger, DID resolver, tails server): `./manage start`. If this fails due to von service, you may have to build von seperately first: `./manage service build von-network` 3. from within this directory (aries/agent/aath-backchannel), run the server twice, on port 9020 and 9030, with config to use the AATH components (in two different terminals, leave them running): 1. `LEDGER_URL=http://localhost:9000 GENESIS_FILE=resource/indypool.txn cargo run -- -p 9020`, 2. `LEDGER_URL=http://localhost:9000 GENESIS_FILE=resource/indypool.txn cargo run -- -p 9030` 4. cd into `aries-test-harness`, create a python venv (e.g. `python3 -m venv venv`) and enter it (e.g. `source venv/bin/activate`) -5. install deps: `pip install -r requirements.txt` (if step 5 fails, also install `aiohttp`: `pip3 install aiohttp`, and perhaps `setuptools`: `pip3 install setuptools`) +5. install deps: `pip install -r requirements.txt` (if step 6 fails, also install `aiohttp`: `pip3 install aiohttp`, and perhaps `setuptools`: `pip3 install setuptools`) 6. run specific tests between the two agents, using the `behave` CLI with it's tagging system. e.g. `behave -D Faber=http://0.0.0.0:9020 -D Acme=http://0.0.0.0:9020 -D Bob=http://0.0.0.0:9030 -t @T001-RFC0160` to run the first RFC0160 (connection) test. Check behave docs for more details. 1. e.g. run a test with ledger operations: `behave -D Faber=http://0.0.0.0:9020 -D Acme=http://0.0.0.0:9020 -D Bob=http://0.0.0.0:9030 -t @T001-RFC0036` 2. e.g. to simulate the ariesvcx-ariesvcx "runset" defined in the aath test suite `behave -D Faber=http://0.0.0.0:9020 -D Acme=http://0.0.0.0:9020 -D Bob=http://0.0.0.0:9030 -t @RFC0036,@RFC0037,@RFC0160,@RFC0023,@RFC0793 -t ~@wip -t ~@RFC0434 -t ~@RFC0453 -t ~@RFC0211 -t ~@DIDExchangeConnection -t ~@Transport_Ws`. See the `TEST_SCOPE` of [test-harness-ariesvcx-ariesvcx.yml](https://github.com/hyperledger/aries-agent-test-harness/blob/main/.github/workflows/test-harness-ariesvcx-ariesvcx.yml) for the latest. @@ -29,7 +29,7 @@ Use the following to run 2 VCX AATH instances and test them with specific test s ## VCX AATH to ACAPy AATH Testing To test the a VCX AATH instance against another agent, such as ACApy, the following modified steps can be followed: 1. clone the [AATH repo](https://github.com/hyperledger/aries-agent-test-harness/tree/main) -2. in the root of the AATH repo, start the standard AATH services (ledger, DID resolver, tails server) AND an ACApy agent on port 9030 (Bob agent): `AGENT_PUBLIC_ENDPOINT=http://localhost:9032 ./manage start -b acapy-main` +2. in the root of the AATH repo, start the standard AATH services (ledger, DID resolver, tails server) AND an ACApy agent on port 9030 (Bob agent): `AGENT_PUBLIC_ENDPOINT=http://localhost:9032 ./manage start -b acapy-main`. If this fails, you may have to build acapy-main seperately first `./manage build -a acapy-main` 3. from within this directory (aries/agent/aath-backchannel), run the server on port 9020, with config to use the AATH components: 1. `DOCKERHOST=host.docker.internal LEDGER_URL=http://localhost:9000 GENESIS_FILE=resource/indypool.txn cargo run -- -p 9020` 2. may need to replace `DOCKERHOST` with your appropriate host dependent on OS. (check `./manage dockerhost`) diff --git a/aries/agents/mediator/Dockerfile b/aries/agents/mediator/Dockerfile index 66a4e17eab..c14991c19f 100644 --- a/aries/agents/mediator/Dockerfile +++ b/aries/agents/mediator/Dockerfile @@ -1,4 +1,4 @@ -FROM rust as builder +FROM rust:1.79.0 as builder RUN apt update && apt install -y libssl-dev libzmq3-dev cmake WORKDIR /usr/src/aries-vcx