diff --git a/.env.example b/.env.example index 1723f701..e6fc1eef 100644 --- a/.env.example +++ b/.env.example @@ -34,6 +34,15 @@ SQS_JOB_VERIFICATION_QUEUE_URL= AWS_S3_BUCKET_NAME= AWS_S3_BUCKET_REGION= + # Ethereum Settlement DEFAULT_SETTLEMENT_CLIENT_RPC= -DEFAULT_L1_CORE_CONTRACT_ADDRESS= \ No newline at end of file +DEFAULT_L1_CORE_CONTRACT_ADDRESS= + +# Sharp Services +SHARP_CUSTOMER_ID= +SHARP_USER_CRT= +SHARP_USER_KEY= +SHARP_SERVER_CRT= +SHARP_PROOF_LAYOUT= + diff --git a/.env.test b/.env.test index b7b73e2c..b4b17d6f 100644 --- a/.env.test +++ b/.env.test @@ -30,4 +30,13 @@ DEFAULT_SETTLEMENT_CLIENT_RPC="http://localhost:3000" # Ethereum Settlement DEFAULT_L1_CORE_CONTRACT_ADDRESS="0xc662c410C0ECf747543f5bA90660f6ABeBD9C8c4" SHOULD_IMPERSONATE_ACCOUNT="true" -TEST_DUMMY_CONTRACT_ADDRESS="0xE5b6F5e695BA6E4aeD92B68c4CC8Df1160D69A81" \ No newline at end of file +TEST_DUMMY_CONTRACT_ADDRESS="0xE5b6F5e695BA6E4aeD92B68c4CC8Df1160D69A81" + +# Sharp Services +SHARP_CUSTOMER_ID="sharp_consumer_id" +SHARP_URL="http://127.0.0.1:5000" +# [IMP!!!] These are test certificates (they don't work) +SHARP_USER_CRT="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUR4ekNDQXErZ0F3SUJBZ0lVTjBSK0xpb1MzL2ZadUZsK291RjZNNFk2RnRZd0RRWUpLb1pJaHZjTkFRRUwKQlFBd2N6RUxNQWtHQTFVRUJoTUNTVTR4RXpBUkJnTlZCQWdNQ2xOdmJXVXRVM1JoZEdVeElUQWZCZ05WQkFvTQpHRWx1ZEdWeWJtVjBJRmRwWkdkcGRITWdVSFI1SUV4MFpERU5NQXNHQTFVRUF3d0VVMVJTU3pFZE1Cc0dDU3FHClNJYjNEUUVKQVJZT1lXSmpRR3RoY201dmRDNTRlWG93SGhjTk1qUXdPREV6TVRNd05UTTBXaGNOTWpVd09ERXoKTVRNd05UTTBXakJ6TVFzd0NRWURWUVFHRXdKSlRqRVRNQkVHQTFVRUNBd0tVMjl0WlMxVGRHRjBaVEVoTUI4RwpBMVVFQ2d3WVNXNTBaWEp1WlhRZ1YybGtaMmwwY3lCUWRIa2dUSFJrTVEwd0N3WURWUVFEREFSVFZGSkxNUjB3Ckd3WUpLb1pJaHZjTkFRa0JGZzVoWW1OQWEyRnlibTkwTG5oNWVqQ0NBU0l3RFFZSktvWklodmNOQVFFQkJRQUQKZ2dFUEFEQ0NBUW9DZ2dFQkFOSEtaUGRqWSs4QWo4ZFV2V0xReEl5NTNrK1BHY001T2FlYnpTV3FER0xGSlBOdgpkVzJvWjFCSnNEb2hobWZFSCt5ZEFoQXEvbzc4NDljblg2VDJTOVhta25wdnNud2dRckU5Z3lqSmV3MUxBRzNHCm10U0lOMWJJSm9peWJ3QUR5NGxPd0xrVzUzdFdueHBSazVVVmZUU1hLYVRRTnlHd2o3Q2xMSGthcnlZYVk3OVkKOXlHMFJ2RkFkb1IzczBveWthNkFLV0d1WjhOdWd4NTY2bysyWllRenJteWVNU1NGYkhNdW1aUkxYb0hpazhBSgpLZXJ0bnNBRC9LMVJRYm80Y21ubHFoTVRhQktiTEFVVjVteFVvMlpveFBJVU9tREE5N3IyMmRTYkRkRlVjeC9kCjhQcDB6VXNycXdQckJlcW5SMXdLOE80MUlHajUzRnUzVmxDeS94MENBd0VBQWFOVE1GRXdIUVlEVlIwT0JCWUUKRkc0T0lvKzcvckJyZlR4S2FFMGx2L1dwRDJ3UE1COEdBMVVkSXdRWU1CYUFGRzRPSW8rNy9yQnJmVHhLYUUwbAp2L1dwRDJ3UE1BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFEMURDZkR3CnpoSXRGMWd5YVdhWURZRHErZjJSUHBFRWVaWk1BSDdJV0ZTajRrTzhmVHN1RnN6bFoyNXNlR3ZHYW4xQ3F4alQKYnJ3MXliVlJQeGZMUWgxRlZMMGhFeDZWYXhGditxMmtqUmlCQmZURFBxWGxYcmpaaUYrZTNPS3lKSVhnNkpIUAppbVpBV0dyRFBHNkorQi90bHRaQ3VLZVhLK1FUcnRSOVVCL29hOWVaQWc5RXNkOVJsZDRNeVo5b0NtdUNPU1hmCnk1THFkVlgrNENpTnJXQ3BwM1B2M2MyL28rZ0RMQjUzZ252R056RjR6Q1FIZ0RtN0RNZnpmZlY1TUMwV1MvWXkKVnpyUG11Sys0Y0tSK3dMOFZITVNEeC9ybTFhYnh0dEN2VW92MUw5dVZ1QUNGc29yNmdsR0N1RDNNQ0dIa0pNNgpxaS8rM1haeHhxeGw1Rzg9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K" +SHARP_USER_KEY="LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRRFJ5bVQzWTJQdkFJL0gKVkwxaTBNU011ZDVQanhuRE9UbW5tODBscWd4aXhTVHpiM1Z0cUdkUVNiQTZJWVpueEIvc25RSVFLdjZPL09QWApKMStrOWt2VjVwSjZiN0o4SUVLeFBZTW95WHNOU3dCdHhwclVpRGRXeUNhSXNtOEFBOHVKVHNDNUZ1ZDdWcDhhClVaT1ZGWDAwbHltazBEY2hzSSt3cFN4NUdxOG1HbU8vV1BjaHRFYnhRSGFFZDdOS01wR3VnQ2xocm1mRGJvTWUKZXVxUHRtV0VNNjVzbmpFa2hXeHpMcG1VUzE2QjRwUEFDU25xN1o3QUEveXRVVUc2T0hKcDVhb1RFMmdTbXl3RgpGZVpzVktObWFNVHlGRHBnd1BlNjl0blVtdzNSVkhNZjNmRDZkTTFMSzZzRDZ3WHFwMGRjQ3ZEdU5TQm8rZHhiCnQxWlFzdjhkQWdNQkFBRUNnZ0VBQU9mcDFiT2xLOVFKeXVlUHhjeDIvTkNVcUMxTEJDL01FdkEyUzVKWGFWbkcKbGhLR0pFb1U0Q0RoVk83dUlLYVZLTFZvMjk4RHFHUnBLM1d0RVE1TE40bytXYTcveTA5c1drMlVzbWxrVWFOZwpSaGtVZEJSK2dLNXVsQ3FKRml2dUJoTEQvRWlnQ1VWUGZKS2JtNG96TnpYcjVSMU5ENlV1aWFtODdtenlFcTBLCmZsVXlhR0RZNGdIdFNBOVBENVBFYlUveFpKeitKaHk5T2l3aVRXV0MrSHoyb2c3UWRDRDE2RlhGcit2VHpQN0MKb2tFb0VDZFNPRWlMalVENjBhS2ZxRmFCVm5MTkVudC9QSytmY1RBM05mNGtSMnFDNk9ZWjVFb09zYm1ka29ZTgpyU3NJZW9XblMxOEhvekZud2w3Z05wTUtjNmRzQzRBTldOVDFsTkhCb1FLQmdRRHlaUDFJSlppZUh6NlExaUVTCm5zd2tnblZCQUQ0SlVLR1ZDMHA3dk4yclNDZXh4c05ZZXFPTEEyZGZCUGpOVjd3blFKcUgxT05XellOMUJVSUUKeThLTCtFZVl6Q3RZa21LL21wSGJIMzNjd2tJODBuMHJROU1BalZMTlJ2YVVEOWp1NFBsRzFqaEFZUVVyTkViZQpKRlVpSk83aDVQa1llZG50SitqSHFpQnRoUUtCZ1FEZGtPbndmL0szYk4xenR0bXZQd0VicjhkVWJjRVh5NDFOCkl5VWwrZW1WSlgzYktKM0duNDZnQ2RsTTdkYmpwS3JVZ3oxL2JsZTgvMkVFckJvSEFRNkMrU2pEaGhvL01CbnIKekZheTBoK3YxbjBnZnNNVzRoOEF4cEFwc25OYnh6K2g1Wm5uSnRTd0srUjB3U0VJVVEzRjAxL2hMWWhLQ2l5OApwbW5HQi9hU3VRS0JnRzdxd1cvVExGd214ZlYyMXBsenFzeUdHZXVObGRXalhOMGIxcEI2b3lDdW11TmhwYUFHCk5uSDFNOGNxT2tPVWd4ZWZHMWRPbGx6eEc5ZGZlWTlDUWhyVW1NYVZucndmK0NuZkxDRU43d1VtcXpLenl1MFMKVXlwc2dOaElRYXNNK1dLTjllTnhRVHBNYXhZVERONjMxM0VSWDNKazJZdFdydDh6cFBSQXFDZ1ZBb0dCQU54egpUa0NMbmJ6aFphbTNlZm9DenlCMEVma3dSdHBkSGxkc3E0NlFqTmRuK1VSd3NpTXBLR2lWeEE3bDZsU1B4NlV3CmU2VHA3Z1JQZUlHRWwxVDJ1VENacGZSODNtcVdlb1FCeVJXZE9nZmplcFkxYWZpL3ZhY3c2Y21ERTRKeXloNVUKYTMveFE5ZVJwSHFDbWxKREMxZ1V5eVlwL3B2a2FjUytNeW5sVEhHSkFvR0FQekdTSzdXOHBUYldSVEFoaTVrSQpwZk5kWk1tcnRodUxNT3F6TGhyRjZublpldk9OdTBoYXVhZktlVElFd2w0clhYZHFKQlJBaWZKMFFsLzZKWFFkCmd1VzFrZWk1Ui8rUFZ5eUhab042c3NXSTNWYklwUUloUmt6UENnTDZhbHEwSzFpT1dlV1lIOHdORGRRdlB1T2UKRkZPOEovSzNxV0NtWjU0ODBBbTNhT0U9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K" +SHARP_SERVER_CRT="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURhekNDQWxPZ0F3SUJBZ0lVRUR0Rjd0YVNTUnVQQTJ6Uk1aNWNzY2JCRm5jd0RRWUpLb1pJaHZjTkFRRUwKQlFBd1JURUxNQWtHQTFVRUJoTUNTVTR4RXpBUkJnTlZCQWdNQ2xOdmJXVXRVM1JoZEdVeElUQWZCZ05WQkFvTQpHRWx1ZEdWeWJtVjBJRmRwWkdkcGRITWdVSFI1SUV4MFpEQWVGdzB5TkRBNE1UTXhNekEzTVROYUZ3MHlOVEE0Ck1UTXhNekEzTVROYU1FVXhDekFKQmdOVkJBWVRBa2xPTVJNd0VRWURWUVFJREFwVGIyMWxMVk4wWVhSbE1TRXcKSHdZRFZRUUtEQmhKYm5SbGNtNWxkQ0JYYVdSbmFYUnpJRkIwZVNCTWRHUXdnZ0VpTUEwR0NTcUdTSWIzRFFFQgpBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFRHcEEwNEZ1QlNFaE5PNVYvMGxTaDkvSEgxeVRZT2dRVFdoOG43eDlRCnZGMHpvZFZueVFIdjE5elU5eVdia2xvOEkvOXFBVm9lRzdXTnpUVFg2Q295ZlNjb1YvazN0Q2UwVnVWMlFJTVQKdW82SzJSU05CVHB1TlNqNTlzUiszVTQ2OFRBQnY0YVpsYjU4TU5CRXM3MVRieVpLRHBGRVRkMkg3T0ZKajg4QQpNRi9MaXJkeDZPOFdZL0tDeisxd1ZXL1JRdytYYjRJSWx4bXJFOC9UZ3FNSEo4dFUxYkZiOWJNcTEvOTN5YWtJClU1V2J2NVhXKzFwZFVyTUFNcTFFaC9vZThMN2pFaFdvZXZrNzgyU0kwUk0xeG5MaEtrUUVBYXd6Zkg2ODZiR2YKUHQ3RkFIQ1pGaWJ4KzZzSkg0R1M3S25iK0x5bk9ud3phMWZPUXZEZmcvRm5BZ01CQUFHalV6QlJNQjBHQTFVZApEZ1FXQkJUYlFUdmlUTW1xNXlNK2ZJRVI4VjdTZk1pK3B6QWZCZ05WSFNNRUdEQVdnQlRiUVR2aVRNbXE1eU0rCmZJRVI4VjdTZk1pK3B6QVBCZ05WSFJNQkFmOEVCVEFEQVFIL01BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRREYKTllyRnpBa2RIVkhjUkd5SUNsTi9IVGswaldOcTVSdTB1RUpDQ21Dbm9ZY1pRSTlDQlcwTkl3dGpZUkpTKzR1UwordWh4VWpSYTA5YXdOWDhvYmU0dDZjK25HRnhZMGZqamk0cGZnbU1kMWNJeGdsM3E3Nlp0ZkllRGR6alRLRXN1CjRFUTVadnEwMnJvTEZ0ZjEvL3dRVG0xNkNKdFpGWnhNZ1phYnNxc2JRc3M2dWdMUGtTTmdBWjI1L2VhcWhnQ20KTjFUV2FxL0xJMVBLSkxPK085NFlMa2FsNVpyOTJCOXk4Q0VKVUVuSTA1R1N1MmJUOFM2a0ZBMEpadEszTW9SbwpqRWZWV1lQVHR5TFR4amNvRndCcDlHaXZYSDdSdHBxMDlmSmFhU1pNekxmNGlyNHpBdXprbExBNWZvampPNXlKCllnYlVaQUU2aS81N1NFWjR3VmxTCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K" +SHARP_PROOF_LAYOUT="small" diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index e7aaac05..0351156c 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -1,5 +1,4 @@ ---- -name: Task - Rust Tests & Coverage +name: Rust Test & Coverage on: pull_request_target: @@ -14,7 +13,6 @@ on: jobs: coverage: - # sadly, for now we have to "rebuild" for the coverage runs-on: ubuntu-latest services: @@ -39,8 +37,15 @@ jobs: # before the plugin, as the cache uses the current rustc version as its cache key - run: rustup show - - uses: taiki-e/install-action@cargo-llvm-cov - - uses: taiki-e/install-action@nextest + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Rust Cache + uses: Swatinem/rust-cache@v2 - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 @@ -56,21 +61,23 @@ jobs: exit 1 fi - - name: Clean workspace + - name: Install cargo-llvm-cov & nextest + uses: taiki-e/install-action@cargo-llvm-cov + + - name: Install nextest + uses: taiki-e/install-action@nextest + + - name: Getting neccesary files for testing run: | - cargo llvm-cov clean --workspace + wget -P ./crates/prover-services/sharp-service/tests/artifacts https://madara-orchestrator-sharp-pie.s3.amazonaws.com/238996-SN.zip - - name: Run llvm-cov + - name: Run llvm-cov tests env: ETHEREUM_BLAST_RPC_URL: ${{ secrets.ETHEREUM_BLAST_RPC_URL }} - run: | - cargo llvm-cov nextest --release --lcov --output-path lcov.info --test-threads=1 + run: cargo llvm-cov nextest --release --lcov --output-path lcov.info --test-threads=1 - name: Coveralls uses: coverallsapp/github-action@v2 with: files: lcov.info debug: true - - - uses: colpal/actions-clean@v1 - if: ${{ always() }} # To ensure this step runs even when earlier steps fail diff --git a/.gitignore b/.gitignore index cc424bf3..12b6524c 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,6 @@ *.code-workspace .vscode -lcov.info \ No newline at end of file +lcov.info + +**/*-SN.zip \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 6fd4e120..cd50e625 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## Added - Tests for Settlement client. +- Tests for prover client. +- Added Rust Cache for Coverage Test CI. +- support for fetching PIE file from storage client in proving job. - added coveralls support - moved mongodb serde behind feature flag - implemented DA worker. @@ -33,8 +36,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). ## Removed +- `init_config` from all the tests. - `fetch_from_test` argument ## Fixed +- Get Fact Info logic. - Fixed state update worker logic as per the new implementation. diff --git a/Cargo.lock b/Cargo.lock index 973fa262..8414b14e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8338,11 +8338,16 @@ version = "0.1.0" dependencies = [ "alloy 0.2.1", "async-trait", + "base64 0.22.1", "cairo-vm 1.0.0-rc3", + "dotenvy", "gps-fact-checker", "hex", + "httpmock", + "lazy_static", "prover-client-interface", "reqwest 0.11.27", + "rstest 0.18.2", "serde", "serde_json", "snos", diff --git a/Cargo.toml b/Cargo.toml index 62d01399..14d3212a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,7 +34,7 @@ dotenvy = "0.15.7" futures = "0.3.30" mongodb = { version = "2.8.1" } omniqueue = { version = "0.2.0" } -reqwest = { version = "0.11.24" } +reqwest = { version = "0.11.24", features = ["rustls-tls", "native-tls"] } rstest = "0.18.2" serde = { version = "1.0.197" } serde_json = "1.0.114" @@ -48,7 +48,7 @@ tracing = "0.1.40" tracing-subscriber = { version = "0.3.18" } url = { version = "2.5.0", features = ["serde"] } uuid = { version = "1.7.0", features = ["v4", "serde"] } -httpmock = { version = "0.7.0" } +httpmock = { version = "0.7.0", features = ["remote"] } num-bigint = { version = "0.4.4" } arc-swap = { version = "1.7.1" } num-traits = "0.2" diff --git a/README.md b/README.md index 39d9c777..8ed59c5f 100644 --- a/README.md +++ b/README.md @@ -11,3 +11,17 @@ The tentative flow of the orchestrator looks like this but this is subject to change as we learn more about external systems and the constraints involved. ![orchestrator_da_sequencer_diagram](./docs/orchestrator_da_sequencer_diagram.png) + +## Testing + +- Files needed for tests can be fetched through s3 : + + ```shell + wget -P ./crates/prover-services/sharp-service/tests/artifacts https://madara-orchestrator-sharp-pie.s3.amazonaws.com/238996-SN.zip + ``` + +- To run all the tests : + + ```shell + cargo llvm-cov nextest --release --lcov --output-path lcov.info --test-threads=1 + ``` diff --git a/crates/orchestrator/src/jobs/constants.rs b/crates/orchestrator/src/jobs/constants.rs index b295e1ea..5beff7a5 100644 --- a/crates/orchestrator/src/jobs/constants.rs +++ b/crates/orchestrator/src/jobs/constants.rs @@ -1,9 +1,5 @@ pub const JOB_PROCESS_ATTEMPT_METADATA_KEY: &str = "process_attempt_no"; - pub const JOB_VERIFICATION_ATTEMPT_METADATA_KEY: &str = "verification_attempt_no"; - -pub const JOB_METADATA_CAIRO_PIE_PATH_KEY: &str = "cairo_pie_path"; - pub const JOB_METADATA_STATE_UPDATE_BLOCKS_TO_SETTLE_KEY: &str = "blocks_number_to_settle"; pub const JOB_METADATA_STATE_UPDATE_FETCH_FROM_TESTS: &str = "fetch_from_test_data"; pub const JOB_METADATA_STATE_UPDATE_ATTEMPT_PREFIX: &str = "attempt_tx_hashes_"; diff --git a/crates/orchestrator/src/jobs/da_job/mod.rs b/crates/orchestrator/src/jobs/da_job/mod.rs index 592e6ad1..8148b113 100644 --- a/crates/orchestrator/src/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/jobs/da_job/mod.rs @@ -374,8 +374,11 @@ pub mod test { use std::fs; use std::fs::File; use std::io::Read; + use std::sync::Arc; + use crate::config::config; use crate::data_storage::MockDataStorage; + use crate::tests::config::TestConfigBuilder; use ::serde::{Deserialize, Serialize}; use color_eyre::Result; use da_client_interface::MockDaClient; @@ -385,9 +388,10 @@ pub mod test { use majin_blob_types::state_diffs::UnorderedEq; use rstest::rstest; use serde_json::json; + use starknet::providers::jsonrpc::HttpTransport; + use starknet::providers::JsonRpcClient; use starknet_core::types::{FieldElement, StateUpdate}; - - use crate::tests::common::init_config; + use url::Url; /// Tests `da_word` function with various inputs for class flag, new nonce, and number of changes. /// Verifies that `da_word` produces the correct FieldElement based on the provided parameters. @@ -453,16 +457,19 @@ pub mod test { // Mocking storage client storage_client.expect_put_data().returning(|_, _| Result::Ok(())).times(1); - let config = init_config( - Some(format!("http://localhost:{}", server.port())), - None, - None, - Some(da_client), - None, - None, - Some(storage_client), - ) - .await; + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(format!("http://localhost:{}", server.port()).as_str()).expect("Failed to parse URL"), + )); + + // mock block number (madara) : 5 + TestConfigBuilder::new() + .mock_starknet_client(Arc::new(provider)) + .mock_da_client(Box::new(da_client)) + .mock_storage_client(Box::new(storage_client)) + .build() + .await; + + let config = config().await; get_nonce_attached(&server, nonce_file_path); diff --git a/crates/orchestrator/src/jobs/proving_job/mod.rs b/crates/orchestrator/src/jobs/proving_job/mod.rs index c9c38d2b..ab6cf1c1 100644 --- a/crates/orchestrator/src/jobs/proving_job/mod.rs +++ b/crates/orchestrator/src/jobs/proving_job/mod.rs @@ -1,6 +1,4 @@ use std::collections::HashMap; -use std::path::PathBuf; -use std::str::FromStr; use async_trait::async_trait; use cairo_vm::vm::runners::cairo_pie::CairoPie; @@ -11,7 +9,6 @@ use tracing::log::log; use tracing::log::Level::Error; use uuid::Uuid; -use super::constants::JOB_METADATA_CAIRO_PIE_PATH_KEY; use super::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; use super::{Job, JobError, OtherError}; use crate::config::Config; @@ -22,7 +19,10 @@ pub enum ProvingError { CairoPIEWrongPath { internal_id: String }, #[error("Not able to read the cairo PIE file from the zip file provided.")] - CairoPIENotReadable, + CairoPIENotReadable(String), + + #[error("Not able to get the PIE file from AWS S3 bucket.")] + CairoPIEFileFetchFailed(String), #[error("Other error: {0}")] Other(#[from] OtherError), @@ -38,10 +38,6 @@ impl Job for ProvingJob { internal_id: String, metadata: HashMap, ) -> Result { - if !metadata.contains_key(JOB_METADATA_CAIRO_PIE_PATH_KEY) { - // TODO: validate the usage of `.clone()` here, ensure lightweight borrowing of variables - Err(ProvingError::CairoPIEWrongPath { internal_id: internal_id.clone() })? - } Ok(JobItem { id: Uuid::new_v4(), internal_id, @@ -54,15 +50,15 @@ impl Job for ProvingJob { } async fn process_job(&self, config: &Config, job: &mut JobItem) -> Result { - // TODO: allow to download PIE from storage - let cairo_pie_path = job - .metadata - .get(JOB_METADATA_CAIRO_PIE_PATH_KEY) - .map(|s| PathBuf::from_str(s)) - .ok_or_else(|| ProvingError::CairoPIEWrongPath { internal_id: job.internal_id.clone() })? - .map_err(|_| ProvingError::CairoPIENotReadable)?; - - let cairo_pie = CairoPie::read_zip_file(&cairo_pie_path).map_err(|_| ProvingError::CairoPIENotReadable)?; + // Cairo Pie path in s3 storage client + let cairo_pie_path = job.internal_id.to_string() + "/pie.zip"; + let cairo_pie_file = config + .storage() + .get_data(&cairo_pie_path) + .await + .map_err(|e| ProvingError::CairoPIEFileFetchFailed(e.to_string()))?; + let cairo_pie = CairoPie::from_bytes(cairo_pie_file.to_vec().as_slice()) + .map_err(|e| ProvingError::CairoPIENotReadable(e.to_string()))?; let external_id = config .prover_client() diff --git a/crates/orchestrator/src/tests/common/mod.rs b/crates/orchestrator/src/tests/common/mod.rs index 619591d5..9d56b7e3 100644 --- a/crates/orchestrator/src/tests/common/mod.rs +++ b/crates/orchestrator/src/tests/common/mod.rs @@ -1,66 +1,23 @@ pub mod constants; use std::collections::HashMap; -use std::sync::Arc; use ::uuid::Uuid; use aws_config::Region; -use constants::*; -use da_client_interface::MockDaClient; use mongodb::Client; -use prover_client_interface::MockProverClient; use rstest::*; use serde::Deserialize; -use settlement_client_interface::MockSettlementClient; -use starknet::providers::jsonrpc::HttpTransport; -use starknet::providers::JsonRpcClient; -use url::Url; -use crate::config::Config; use crate::data_storage::aws_s3::config::{AWSS3ConfigType, S3LocalStackConfig}; use crate::data_storage::aws_s3::AWSS3; -use crate::data_storage::{DataStorage, DataStorageConfig, MockDataStorage}; +use crate::data_storage::{DataStorage, DataStorageConfig}; use crate::database::mongodb::config::MongoDbConfig; use crate::database::mongodb::MongoDb; -use crate::database::{DatabaseConfig, MockDatabase}; +use crate::database::DatabaseConfig; use crate::jobs::types::JobStatus::Created; use crate::jobs::types::JobType::DataSubmission; use crate::jobs::types::{ExternalId, JobItem}; use crate::queue::job_queue::{JOB_PROCESSING_QUEUE, JOB_VERIFICATION_QUEUE}; -use crate::queue::MockQueueProvider; - -pub async fn init_config( - rpc_url: Option, - database: Option, - queue: Option, - da_client: Option, - prover_client: Option, - settlement_client: Option, - storage_client: Option, -) -> Config { - let _ = tracing_subscriber::fmt().with_max_level(tracing::Level::INFO).with_target(false).try_init(); - - let rpc_url = rpc_url.unwrap_or(MADARA_RPC_URL.to_string()); - let database = database.unwrap_or_default(); - let queue = queue.unwrap_or_default(); - let da_client = da_client.unwrap_or_default(); - let prover_client = prover_client.unwrap_or_default(); - let settlement_client = settlement_client.unwrap_or_default(); - let storage_client = storage_client.unwrap_or_default(); - - // init starknet client - let provider = JsonRpcClient::new(HttpTransport::new(Url::parse(rpc_url.as_str()).expect("Failed to parse URL"))); - - Config::new( - Arc::new(provider), - Box::new(da_client), - Box::new(prover_client), - Box::new(settlement_client), - Box::new(database), - Box::new(queue), - Box::new(storage_client), - ) -} #[fixture] pub fn default_job_item() -> JobItem { diff --git a/crates/orchestrator/src/tests/config.rs b/crates/orchestrator/src/tests/config.rs index ebe60256..21510375 100644 --- a/crates/orchestrator/src/tests/config.rs +++ b/crates/orchestrator/src/tests/config.rs @@ -76,6 +76,26 @@ impl TestConfigBuilder { self } + pub fn mock_starknet_client(mut self, starknet_client: Arc>) -> TestConfigBuilder { + self.starknet_client = Some(starknet_client); + self + } + + pub fn mock_prover_client(mut self, prover_client: Box) -> TestConfigBuilder { + self.prover_client = Some(prover_client); + self + } + + pub fn mock_storage_client(mut self, storage_client: Box) -> TestConfigBuilder { + self.storage = Some(storage_client); + self + } + + pub fn mock_queue(mut self, queue: Box) -> TestConfigBuilder { + self.queue = Some(queue); + self + } + pub async fn build(mut self) -> MockServer { dotenvy::from_filename("../.env.test").expect("Failed to load the .env file"); diff --git a/crates/orchestrator/src/tests/jobs/proving_job/mod.rs b/crates/orchestrator/src/tests/jobs/proving_job/mod.rs index 6eeaaef7..d35b2670 100644 --- a/crates/orchestrator/src/tests/jobs/proving_job/mod.rs +++ b/crates/orchestrator/src/tests/jobs/proving_job/mod.rs @@ -1,28 +1,34 @@ +use bytes::Bytes; use std::collections::HashMap; +use std::fs::File; +use std::io::Read; +use std::path::Path; +use std::sync::Arc; -use crate::config::{config, config_force_init}; +use crate::config::config; +use crate::data_storage::MockDataStorage; use httpmock::prelude::*; +use mockall::predicate::eq; use prover_client_interface::{MockProverClient, TaskStatus}; use rstest::*; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use url::Url; use uuid::Uuid; -use super::super::common::{default_job_item, init_config}; -use crate::jobs::constants::JOB_METADATA_CAIRO_PIE_PATH_KEY; +use super::super::common::default_job_item; use crate::jobs::proving_job::ProvingJob; use crate::jobs::types::{JobItem, JobStatus, JobType}; use crate::jobs::Job; +use crate::tests::config::TestConfigBuilder; #[rstest] #[tokio::test] async fn test_create_job() { - let config = init_config(None, None, None, None, None, None, None).await; - let job = ProvingJob - .create_job( - &config, - String::from("0"), - HashMap::from([(JOB_METADATA_CAIRO_PIE_PATH_KEY.into(), "pie.zip".into())]), - ) - .await; + TestConfigBuilder::new().build().await; + let config = config().await; + + let job = ProvingJob.create_job(&config, String::from("0"), HashMap::new()).await; assert!(job.is_ok()); let job = job.unwrap(); @@ -41,7 +47,9 @@ async fn test_verify_job(#[from(default_job_item)] mut job_item: JobItem) { let mut prover_client = MockProverClient::new(); prover_client.expect_get_task_status().times(1).returning(|_| Ok(TaskStatus::Succeeded)); - let config = init_config(None, None, None, None, Some(prover_client), None, None).await; + TestConfigBuilder::new().mock_prover_client(Box::new(prover_client)).build().await; + + let config = config().await; assert!(ProvingJob.verify_job(&config, &mut job_item).await.is_ok()); } @@ -49,24 +57,28 @@ async fn test_verify_job(#[from(default_job_item)] mut job_item: JobItem) { #[tokio::test] async fn test_process_job() { let server = MockServer::start(); - let mut prover_client = MockProverClient::new(); + prover_client.expect_submit_task().times(1).returning(|_| Ok("task_id".to_string())); + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(format!("http://localhost:{}", server.port()).as_str()).expect("Failed to parse URL"), + )); - let config_init = init_config( - Some(format!("http://localhost:{}", server.port())), - None, - None, - None, - Some(prover_client), - None, - None, - ) - .await; + let mut file = + File::open(Path::new(&format!("{}/src/tests/artifacts/fibonacci.zip", env!("CARGO_MANIFEST_DIR")))).unwrap(); + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer).unwrap(); - config_force_init(config_init).await; + let mut storage = MockDataStorage::new(); + let buffer_bytes = Bytes::from(buffer); + storage.expect_get_data().with(eq("0/pie.zip")).return_once(move |_| Ok(buffer_bytes)); - let cairo_pie_path = format!("{}/src/tests/artifacts/fibonacci.zip", env!("CARGO_MANIFEST_DIR")); + TestConfigBuilder::new() + .mock_starknet_client(Arc::new(provider)) + .mock_prover_client(Box::new(prover_client)) + .mock_storage_client(Box::new(storage)) + .build() + .await; assert_eq!( ProvingJob @@ -78,7 +90,7 @@ async fn test_process_job() { job_type: JobType::ProofCreation, status: JobStatus::Created, external_id: String::new().into(), - metadata: HashMap::from([(JOB_METADATA_CAIRO_PIE_PATH_KEY.into(), cairo_pie_path)]), + metadata: HashMap::new(), version: 0, } ) diff --git a/crates/orchestrator/src/tests/jobs/state_update_job/mod.rs b/crates/orchestrator/src/tests/jobs/state_update_job/mod.rs index 00508d1f..dfd40a24 100644 --- a/crates/orchestrator/src/tests/jobs/state_update_job/mod.rs +++ b/crates/orchestrator/src/tests/jobs/state_update_job/mod.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use std::fs; use std::path::PathBuf; +use std::sync::Arc; use assert_matches::assert_matches; use bytes::Bytes; @@ -12,8 +13,7 @@ use settlement_client_interface::MockSettlementClient; use color_eyre::eyre::eyre; use utils::env_utils::get_env_var_or_panic; -use super::super::common::init_config; -use crate::config::{config, config_force_init}; +use crate::config::config; use crate::constants::{BLOB_DATA_FILE_NAME, SNOS_OUTPUT_FILE_NAME}; use crate::data_storage::MockDataStorage; use crate::jobs::constants::JOB_METADATA_STATE_UPDATE_LAST_FAILED_BLOCK_NO; @@ -28,6 +28,9 @@ use crate::jobs::{Job, JobError}; use crate::tests::common::{default_job_item, get_storage_client}; use crate::tests::config::TestConfigBuilder; use lazy_static::lazy_static; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use url::Url; lazy_static! { pub static ref CURRENT_PATH: PathBuf = std::env::current_dir().unwrap(); @@ -125,7 +128,9 @@ async fn test_process_job_works( #[rstest] #[tokio::test] async fn create_job_works() { - let config = init_config(None, None, None, None, None, None, None).await; + TestConfigBuilder::new().build().await; + + let config = config().await; let job = StateUpdateJob.create_job(&config, String::from("0"), HashMap::default()).await; assert!(job.is_ok()); @@ -143,7 +148,6 @@ async fn create_job_works() { #[rstest] #[tokio::test] async fn process_job_works() { - let server = MockServer::start(); let mut settlement_client = MockSettlementClient::new(); let mut storage_client = MockDataStorage::new(); @@ -197,17 +201,11 @@ async fn process_job_works() { .returning(|_, _, _| Ok(String::from("0x5d17fac98d9454030426606019364f6e68d915b91f6210ef1e2628cd6987442"))); } - let config_init = init_config( - Some(format!("http://localhost:{}", server.port())), - None, - None, - None, - None, - Some(settlement_client), - Some(storage_client), - ) - .await; - config_force_init(config_init).await; + TestConfigBuilder::new() + .mock_settlement_client(Box::new(settlement_client)) + .mock_storage_client(Box::new(storage_client)) + .build() + .await; let mut metadata: HashMap = HashMap::new(); metadata.insert(String::from(JOB_METADATA_STATE_UPDATE_FETCH_FROM_TESTS), String::from("TRUE")); @@ -229,16 +227,17 @@ async fn process_job_works() { async fn process_job_invalid_inputs_errors(#[case] block_numbers_to_settle: String, #[case] expected_error: &str) { let server = MockServer::start(); let settlement_client = MockSettlementClient::new(); - let config = init_config( - Some(format!("http://localhost:{}", server.port())), - None, - None, - None, - None, - Some(settlement_client), - None, - ) - .await; + + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(format!("http://localhost:{}", server.port()).as_str()).expect("Failed to parse URL"), + )); + + TestConfigBuilder::new() + .mock_starknet_client(Arc::new(provider)) + .mock_settlement_client(Box::new(settlement_client)) + .build() + .await; + let config = config().await; let mut metadata: HashMap = HashMap::new(); metadata.insert(String::from(JOB_METADATA_STATE_UPDATE_BLOCKS_TO_SETTLE_KEY), block_numbers_to_settle); @@ -266,18 +265,15 @@ async fn process_job_invalid_input_gap_panics() { settlement_client.expect_get_last_settled_block().returning(|| Ok(4_u64)); - let config_init = init_config( - Some(format!("http://localhost:{}", server.port())), - None, - None, - None, - None, - Some(settlement_client), - None, - ) - .await; - - config_force_init(config_init).await; + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(format!("http://localhost:{}", server.port()).as_str()).expect("Failed to parse URL"), + )); + + TestConfigBuilder::new() + .mock_starknet_client(Arc::new(provider)) + .mock_settlement_client(Box::new(settlement_client)) + .build() + .await; let mut metadata: HashMap = HashMap::new(); metadata.insert(String::from(JOB_METADATA_STATE_UPDATE_BLOCKS_TO_SETTLE_KEY), String::from("6, 7, 8")); diff --git a/crates/orchestrator/src/tests/server/mod.rs b/crates/orchestrator/src/tests/server/mod.rs index 36905360..33b7f657 100644 --- a/crates/orchestrator/src/tests/server/mod.rs +++ b/crates/orchestrator/src/tests/server/mod.rs @@ -1,19 +1,27 @@ use std::io::Read; use std::net::SocketAddr; +use std::sync::Arc; use axum::http::StatusCode; use hyper::body::Buf; use hyper::{Body, Request}; use rstest::*; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use url::Url; use utils::env_utils::get_env_var_or_default; -use super::common::init_config; use crate::queue::init_consumers; use crate::routes::app_router; +use crate::tests::config::TestConfigBuilder; #[fixture] pub async fn setup_server() -> SocketAddr { - let _config = init_config(Some("http://localhost:9944".to_string()), None, None, None, None, None, None).await; + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse("http://localhost:9944".to_string().as_str()).expect("Failed to parse URL"), + )); + + TestConfigBuilder::new().mock_starknet_client(Arc::new(provider)).build().await; let host = get_env_var_or_default("HOST", "127.0.0.1"); let port = get_env_var_or_default("PORT", "3000").parse::().expect("PORT must be a u16"); diff --git a/crates/orchestrator/src/tests/workers/proving/mod.rs b/crates/orchestrator/src/tests/workers/proving/mod.rs index 4bf0ef75..22f4a36c 100644 --- a/crates/orchestrator/src/tests/workers/proving/mod.rs +++ b/crates/orchestrator/src/tests/workers/proving/mod.rs @@ -7,14 +7,16 @@ use mockall::predicate::eq; use prover_client_interface::MockProverClient; use rstest::rstest; use settlement_client_interface::MockSettlementClient; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use url::Url; -use crate::config::config_force_init; use crate::database::MockDatabase; use crate::jobs::job_handler_factory::mock_factory; use crate::jobs::types::{JobItem, JobStatus, JobType}; use crate::jobs::{Job, MockJob}; use crate::queue::MockQueueProvider; -use crate::tests::common::init_config; +use crate::tests::config::TestConfigBuilder; use crate::tests::workers::utils::{db_checks_proving_worker, get_job_by_mock_id_vector}; use crate::workers::proving::ProvingWorker; @@ -85,17 +87,19 @@ async fn test_proving_worker(#[case] incomplete_runs: bool) -> Result<(), Box> = Arc::new(Box::new(job_handler)); let ctx = mock_factory::get_job_handler_context(); diff --git a/crates/orchestrator/src/tests/workers/snos/mod.rs b/crates/orchestrator/src/tests/workers/snos/mod.rs index 45917b30..ca3524a3 100644 --- a/crates/orchestrator/src/tests/workers/snos/mod.rs +++ b/crates/orchestrator/src/tests/workers/snos/mod.rs @@ -6,16 +6,18 @@ use httpmock::MockServer; use mockall::predicate::eq; use rstest::rstest; use serde_json::json; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use url::Url; use uuid::Uuid; -use crate::config::config_force_init; use crate::database::MockDatabase; use crate::jobs::job_handler_factory::mock_factory; use crate::jobs::types::{JobStatus, JobType}; use crate::jobs::{Job, MockJob}; use crate::queue::job_queue::JOB_PROCESSING_QUEUE; use crate::queue::MockQueueProvider; -use crate::tests::common::init_config; +use crate::tests::config::TestConfigBuilder; use crate::tests::workers::utils::get_job_item_mock_by_id; use crate::workers::snos::SnosWorker; use crate::workers::Worker; @@ -88,17 +90,18 @@ async fn test_snos_worker(#[case] db_val: bool) -> Result<(), Box> { // mock block number (madara) : 5 let rpc_response_block_number = block; let response = json!({ "id": 1,"jsonrpc":"2.0","result": rpc_response_block_number }); - let config = init_config( - Some(format!("http://localhost:{}", server.port())), - Some(db), - Some(queue), - Some(da_client), - None, - None, - None, - ) - .await; - config_force_init(config).await; + + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(format!("http://localhost:{}", server.port()).as_str()).expect("Failed to parse URL"), + )); + + TestConfigBuilder::new() + .mock_starknet_client(Arc::new(provider)) + .mock_db_client(Box::new(db)) + .mock_queue(Box::new(queue)) + .mock_da_client(Box::new(da_client)) + .build() + .await; // mocking block call let rpc_block_call_mock = server.mock(|when, then| { diff --git a/crates/orchestrator/src/tests/workers/update_state/mod.rs b/crates/orchestrator/src/tests/workers/update_state/mod.rs index 15a3f3ba..a15cc62a 100644 --- a/crates/orchestrator/src/tests/workers/update_state/mod.rs +++ b/crates/orchestrator/src/tests/workers/update_state/mod.rs @@ -5,15 +5,17 @@ use da_client_interface::MockDaClient; use httpmock::MockServer; use mockall::predicate::eq; use rstest::rstest; +use starknet::providers::jsonrpc::HttpTransport; +use starknet::providers::JsonRpcClient; +use url::Url; use uuid::Uuid; -use crate::config::config_force_init; use crate::database::MockDatabase; use crate::jobs::job_handler_factory::mock_factory; use crate::jobs::types::{JobStatus, JobType}; use crate::jobs::{Job, MockJob}; use crate::queue::MockQueueProvider; -use crate::tests::common::init_config; +use crate::tests::config::TestConfigBuilder; use crate::tests::workers::utils::{get_job_by_mock_id_vector, get_job_item_mock_by_id}; use crate::workers::update_state::UpdateStateWorker; use crate::workers::Worker; @@ -100,18 +102,18 @@ async fn test_update_state_worker( .returning(|_, _, _| Ok(())) .withf(|queue, _payload, _delay| queue == JOB_PROCESSING_QUEUE); + let provider = JsonRpcClient::new(HttpTransport::new( + Url::parse(format!("http://localhost:{}", server.port()).as_str()).expect("Failed to parse URL"), + )); + // mock block number (madara) : 5 - let config = init_config( - Some(format!("http://localhost:{}", server.port())), - Some(db), - Some(queue), - Some(da_client), - None, - None, - None, - ) - .await; - config_force_init(config).await; + TestConfigBuilder::new() + .mock_starknet_client(Arc::new(provider)) + .mock_db_client(Box::new(db)) + .mock_queue(Box::new(queue)) + .mock_da_client(Box::new(da_client)) + .build() + .await; let update_state_worker = UpdateStateWorker {}; update_state_worker.run_worker().await?; diff --git a/crates/orchestrator/src/tests/workers/utils/mod.rs b/crates/orchestrator/src/tests/workers/utils/mod.rs index edb9307a..9ae812f4 100644 --- a/crates/orchestrator/src/tests/workers/utils/mod.rs +++ b/crates/orchestrator/src/tests/workers/utils/mod.rs @@ -1,5 +1,4 @@ use crate::database::MockDatabase; -use crate::jobs::constants::JOB_METADATA_CAIRO_PIE_PATH_KEY; use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; use crate::jobs::MockJob; use mockall::predicate::eq; @@ -45,7 +44,7 @@ pub fn get_job_by_mock_id_vector( job_type: job_type.clone(), status: job_status.clone(), external_id: ExternalId::Number(0), - metadata: get_hashmap(), + metadata: HashMap::new(), version: 0, }) } @@ -62,7 +61,7 @@ pub fn db_checks_proving_worker(id: i32, db: &mut MockDatabase, mock_job: &mut M job_type: JobType::ProofCreation, status: JobStatus::Created, external_id: ExternalId::Number(0), - metadata: get_hashmap(), + metadata: HashMap::new(), version: 0, } } @@ -82,8 +81,3 @@ pub fn db_checks_proving_worker(id: i32, db: &mut MockDatabase, mock_job: &mut M .withf(move |item| item.internal_id == id.clone().to_string()) .returning(move |_| Ok(job_item_cloned.clone())); } - -pub fn get_hashmap() -> HashMap { - let cairo_pie_path = format!("{}/src/tests/artifacts/fibonacci.zip", env!("CARGO_MANIFEST_DIR")); - HashMap::from([(JOB_METADATA_CAIRO_PIE_PATH_KEY.into(), cairo_pie_path)]) -} diff --git a/crates/prover-services/gps-fact-checker/src/error.rs b/crates/prover-services/gps-fact-checker/src/error.rs index 166011be..6dcfb9b8 100644 --- a/crates/prover-services/gps-fact-checker/src/error.rs +++ b/crates/prover-services/gps-fact-checker/src/error.rs @@ -50,4 +50,6 @@ pub enum FactCheckerError { TreeStructureEndOffsetInvalid(usize, usize), #[error("Tree structure: root offset {0} does not match the output length {1}")] TreeStructureRootOffsetInvalid(usize, usize), + #[error("Program output doesn't match the segment size.")] + InvalidSegment, } diff --git a/crates/prover-services/gps-fact-checker/src/fact_info.rs b/crates/prover-services/gps-fact-checker/src/fact_info.rs index e7156ef9..d203cf2c 100644 --- a/crates/prover-services/gps-fact-checker/src/fact_info.rs +++ b/crates/prover-services/gps-fact-checker/src/fact_info.rs @@ -9,7 +9,6 @@ use cairo_vm::types::relocatable::MaybeRelocatable; use cairo_vm::vm::runners::cairo_pie::CairoPie; use cairo_vm::Felt252; use starknet::core::types::FieldElement; -use utils::ensure; use super::error::FactCheckerError; use super::fact_node::generate_merkle_root; @@ -45,33 +44,26 @@ pub fn get_program_output(cairo_pie: &CairoPie) -> Result, FactChec .get(&BuiltinName::output) .ok_or(FactCheckerError::OutputBuiltinNoSegmentInfo)?; - let segment_start = cairo_pie - .memory - .0 - .iter() - .enumerate() - .find_map(|(ptr, ((index, _), _))| if *index == segment_info.index as usize { Some(ptr) } else { None }) - .ok_or(FactCheckerError::OutputSegmentNotFound)?; - - let mut output = Vec::with_capacity(segment_info.size); - let mut expected_offset = 0; - - #[allow(clippy::explicit_counter_loop)] - for i in segment_start..segment_start + segment_info.size { - let ((_, offset), value) = cairo_pie.memory.0.get(i).ok_or(FactCheckerError::OutputSegmentInvalidRange)?; - - ensure!( - *offset == expected_offset, - FactCheckerError::OutputSegmentInconsistentOffset(*offset, expected_offset) - ); - match value { - MaybeRelocatable::Int(felt) => output.push(*felt), - MaybeRelocatable::RelocatableValue(_) => { - return Err(FactCheckerError::OutputSegmentUnexpectedRelocatable(*offset)); + let mut output = vec![Felt252::from(0); segment_info.size]; + let mut insertion_count = 0; + let cairo_program_memory = &cairo_pie.memory.0; + + for ((index, offset), value) in cairo_program_memory.iter() { + if *index == segment_info.index as usize { + match value { + MaybeRelocatable::Int(felt) => { + output[*offset] = *felt; + insertion_count += 1; + } + MaybeRelocatable::RelocatableValue(_) => { + return Err(FactCheckerError::OutputSegmentUnexpectedRelocatable(*offset)); + } } } + } - expected_offset += 1; + if insertion_count != segment_info.size { + return Err(FactCheckerError::InvalidSegment); } Ok(output) diff --git a/crates/prover-services/sharp-service/Cargo.toml b/crates/prover-services/sharp-service/Cargo.toml index 87e44a49..dd46b05a 100644 --- a/crates/prover-services/sharp-service/Cargo.toml +++ b/crates/prover-services/sharp-service/Cargo.toml @@ -6,15 +6,21 @@ edition.workspace = true [dependencies] alloy.workspace = true async-trait.workspace = true +base64 = "0.22.1" cairo-vm.workspace = true +dotenvy.workspace = true gps-fact-checker.workspace = true hex.workspace = true +httpmock.workspace = true +lazy_static.workspace = true prover-client-interface.workspace = true reqwest.workspace = true +rstest.workspace = true serde.workspace = true serde_json.workspace = true snos.workspace = true thiserror.workspace = true +tokio.workspace = true tracing.workspace = true url.workspace = true utils.workspace = true @@ -22,3 +28,4 @@ uuid.workspace = true [dev-dependencies] tokio.workspace = true +httpmock.workspace = true diff --git a/crates/prover-services/sharp-service/src/client.rs b/crates/prover-services/sharp-service/src/client.rs index e1aa8c14..71550924 100644 --- a/crates/prover-services/sharp-service/src/client.rs +++ b/crates/prover-services/sharp-service/src/client.rs @@ -1,12 +1,16 @@ -use serde_json::json; -use snos::sharp::{CairoJobResponse, CairoStatusResponse}; +use base64::engine::general_purpose; +use base64::Engine; +use reqwest::{Certificate, ClientBuilder, Identity}; +use std::clone::Clone; use url::Url; +use utils::env_utils::get_env_var_or_panic; use uuid::Uuid; use crate::error::SharpError; +use crate::types::{SharpAddJobResponse, SharpGetStatusResponse}; /// SHARP endpoint for Sepolia testnet -pub const DEFAULT_SHARP_URL: &str = "https://testnet.provingservice.io"; +pub const DEFAULT_SHARP_URL: &str = "https://sepolia-recursive.public-testnet.provingservice.io/v1/gateway"; /// SHARP API async wrapper pub struct SharpClient { @@ -15,25 +19,85 @@ pub struct SharpClient { } impl SharpClient { + /// We need to set up the client with the provided certificates. + /// We need to have three secrets : + /// - base64(SHARP_USER_CRT) + /// - base64(SHARP_USER_KEY) + /// - base64(SHARP_SERVER_CRT) + /// + /// You can run this command in terminal to convert a file output into base64 + /// and then copy it and paste it into .env file : + /// + /// `cat | base64` pub fn new(url: Url) -> Self { - Self { base_url: url, client: reqwest::Client::new() } + // Getting the cert files from the .env and then decoding it from base64 + let cert = general_purpose::STANDARD.decode(get_env_var_or_panic("SHARP_USER_CRT")).unwrap(); + let key = general_purpose::STANDARD.decode(get_env_var_or_panic("SHARP_USER_KEY")).unwrap(); + let server_cert = general_purpose::STANDARD.decode(get_env_var_or_panic("SHARP_SERVER_CRT")).unwrap(); + + // Adding Customer ID to the url + let mut url_mut = url.clone(); + let customer_id = get_env_var_or_panic("SHARP_CUSTOMER_ID"); + url_mut.query_pairs_mut().append_pair("customer_id", customer_id.as_str()); + + Self { + base_url: url_mut, + client: ClientBuilder::new() + .identity(Identity::from_pkcs8_pem(&cert, &key).unwrap()) + .add_root_certificate(Certificate::from_pem(server_cert.as_slice()).unwrap()) + .build() + .unwrap(), + } } - pub async fn add_job(&self, encoded_pie: &str) -> Result { - let data = json!({ "action": "add_job", "request": { "cairo_pie": encoded_pie } }); - let url = self.base_url.join("add_job").unwrap(); - let res = self.client.post(url).json(&data).send().await.map_err(SharpError::AddJobFailure)?; + pub async fn add_job(&self, encoded_pie: &str) -> Result<(SharpAddJobResponse, Uuid), SharpError> { + let mut base_url = self.base_url.clone(); + + base_url.path_segments_mut().map_err(|_| SharpError::PathSegmentMutFailOnUrl)?.push("add_job"); + + let cairo_key = Uuid::new_v4(); + let cairo_key_string = cairo_key.to_string(); + let proof_layout = get_env_var_or_panic("SHARP_PROOF_LAYOUT"); + + // Params for sending the PIE file to the prover + // for temporary reference you can check this doc : + // https://docs.google.com/document/d/1-9ggQoYmjqAtLBGNNR2Z5eLreBmlckGYjbVl0khtpU0 + let params = vec![ + ("cairo_job_key", cairo_key_string.as_str()), + ("offchain_proof", "true"), + ("proof_layout", proof_layout.as_str()), + ]; + + // Adding params to the URL + add_params_to_url(&mut base_url, params); + + let res = + self.client.post(base_url).body(encoded_pie.to_string()).send().await.map_err(SharpError::AddJobFailure)?; match res.status() { - reqwest::StatusCode::OK => res.json().await.map_err(SharpError::AddJobFailure), + reqwest::StatusCode::OK => { + let result: SharpAddJobResponse = res.json().await.map_err(SharpError::AddJobFailure)?; + Ok((result, cairo_key)) + } code => Err(SharpError::SharpService(code)), } } - pub async fn get_job_status(&self, job_key: &Uuid) -> Result { - let data = json!({ "action": "get_status", "request": { "cairo_job_key": job_key } }); - let url = self.base_url.join("get_status").unwrap(); - let res = self.client.post(url).json(&data).send().await.map_err(SharpError::GetJobStatusFailure)?; + pub async fn get_job_status(&self, job_key: &Uuid) -> Result { + let mut base_url = self.base_url.clone(); + + base_url.path_segments_mut().map_err(|_| SharpError::PathSegmentMutFailOnUrl)?.push("get_status"); + let cairo_key_string = job_key.to_string(); + + // Params for getting the prover job status + // for temporary reference you can check this doc : + // https://docs.google.com/document/d/1-9ggQoYmjqAtLBGNNR2Z5eLreBmlckGYjbVl0khtpU0 + let params = vec![("cairo_job_key", cairo_key_string.as_str())]; + + // Adding params to the url + add_params_to_url(&mut base_url, params); + + let res = self.client.post(base_url).send().await.map_err(SharpError::GetJobStatusFailure)?; match res.status() { reqwest::StatusCode::OK => res.json().await.map_err(SharpError::GetJobStatusFailure), @@ -42,6 +106,13 @@ impl SharpClient { } } +fn add_params_to_url(url: &mut Url, params: Vec<(&str, &str)>) { + let mut pairs = url.query_pairs_mut(); + for (key, value) in params { + pairs.append_pair(key, value); + } +} + impl Default for SharpClient { fn default() -> Self { Self::new(DEFAULT_SHARP_URL.parse().unwrap()) diff --git a/crates/prover-services/sharp-service/src/config.rs b/crates/prover-services/sharp-service/src/config.rs index 6567448c..eddd4768 100644 --- a/crates/prover-services/sharp-service/src/config.rs +++ b/crates/prover-services/sharp-service/src/config.rs @@ -20,7 +20,7 @@ impl Default for SharpConfig { fn default() -> Self { Self { service_url: DEFAULT_SHARP_URL.parse().unwrap(), - rpc_node_url: "https://sepolia.drpc.org".parse().unwrap(), + rpc_node_url: "https://ethereum-sepolia-rpc.publicnode.com".parse().unwrap(), verifier_address: "0x07ec0D28e50322Eb0C159B9090ecF3aeA8346DFe".parse().unwrap(), } } diff --git a/crates/prover-services/sharp-service/src/error.rs b/crates/prover-services/sharp-service/src/error.rs index 7ea08582..c17e9feb 100644 --- a/crates/prover-services/sharp-service/src/error.rs +++ b/crates/prover-services/sharp-service/src/error.rs @@ -21,6 +21,8 @@ pub enum SharpError { TaskIdSplit, #[error("Failed to encode PIE")] PieEncode(#[source] snos::error::SnOsError), + #[error("Failed to get url as path segment mut. URL is cannot-be-a-base.")] + PathSegmentMutFailOnUrl, } impl From for ProverClientError { diff --git a/crates/prover-services/sharp-service/src/lib.rs b/crates/prover-services/sharp-service/src/lib.rs index 800fc835..1ef26004 100644 --- a/crates/prover-services/sharp-service/src/lib.rs +++ b/crates/prover-services/sharp-service/src/lib.rs @@ -1,6 +1,7 @@ pub mod client; pub mod config; pub mod error; +mod types; use std::str::FromStr; @@ -33,12 +34,8 @@ impl ProverClient for SharpProverService { let fact_info = get_fact_info(&cairo_pie, None)?; let encoded_pie = snos::sharp::pie::encode_pie_mem(cairo_pie).map_err(ProverClientError::PieEncoding)?; - let res = self.sharp_client.add_job(&encoded_pie).await?; - if let Some(job_key) = res.cairo_job_key { - Ok(combine_task_id(&job_key, &fact_info.fact)) - } else { - Err(ProverClientError::TaskInvalid(res.error_message.unwrap_or_default())) - } + let (_, job_key) = self.sharp_client.add_job(&encoded_pie).await?; + Ok(combine_task_id(&job_key, &fact_info.fact)) } } } @@ -47,6 +44,9 @@ impl ProverClient for SharpProverService { let (job_key, fact) = split_task_id(task_id)?; let res = self.sharp_client.get_job_status(&job_key).await?; match res.status { + // TODO : We would need to remove the FAILED, UNKNOWN, NOT_CREATED status as it is not in the sharp client response specs : + // https://docs.google.com/document/d/1-9ggQoYmjqAtLBGNNR2Z5eLreBmlckGYjbVl0khtpU0 + // We are waiting for the official public API spec before making changes CairoJobStatus::FAILED => Ok(TaskStatus::Failed(res.error_log.unwrap_or_default())), CairoJobStatus::INVALID => { Ok(TaskStatus::Failed(format!("Task is invalid: {:?}", res.invalid_reason.unwrap_or_default()))) @@ -59,7 +59,7 @@ impl ProverClient for SharpProverService { if self.fact_checker.is_valid(&fact).await? { Ok(TaskStatus::Succeeded) } else { - Ok(TaskStatus::Failed(format!("Fact {} is not valid or not registed", hex::encode(fact)))) + Ok(TaskStatus::Failed(format!("Fact {} is not valid or not registered", hex::encode(fact)))) } } } @@ -77,6 +77,13 @@ impl SharpProverService { let fact_checker = FactChecker::new(sharp_cfg.rpc_node_url, sharp_cfg.verifier_address); Self::new(sharp_client, fact_checker) } + + pub fn with_test_settings(settings: &impl SettingsProvider, port: u16) -> Self { + let sharp_cfg: SharpConfig = settings.get_settings(SHARP_SETTINGS_NAME).unwrap(); + let sharp_client = SharpClient::new(format!("http://127.0.0.1:{}", port).parse().unwrap()); + let fact_checker = FactChecker::new(sharp_cfg.rpc_node_url, sharp_cfg.verifier_address); + Self::new(sharp_client, fact_checker) + } } /// Construct SHARP specific task ID from job key and proof fact diff --git a/crates/prover-services/sharp-service/src/types.rs b/crates/prover-services/sharp-service/src/types.rs new file mode 100644 index 00000000..89c18352 --- /dev/null +++ b/crates/prover-services/sharp-service/src/types.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; +use snos::sharp::{CairoJobStatus, InvalidReason}; + +#[derive(Default, Debug, Clone, Serialize, Deserialize)] +pub struct SharpAddJobResponse { + pub code: Option, + pub message: Option, +} + +#[derive(Default, Debug, Clone, Serialize, Deserialize)] +pub struct SharpGetProofResponse { + pub code: Option, +} + +#[derive(Default, Debug, Clone, Deserialize)] +pub struct SharpGetStatusResponse { + pub status: CairoJobStatus, + pub invalid_reason: Option, + pub error_log: Option, + pub validation_done: Option, +} diff --git a/crates/prover-services/sharp-service/tests/constants.rs b/crates/prover-services/sharp-service/tests/constants.rs new file mode 100644 index 00000000..ae2db6cb --- /dev/null +++ b/crates/prover-services/sharp-service/tests/constants.rs @@ -0,0 +1,2 @@ +pub const CAIRO_PIE_PATH: &str = "/tests/artifacts/238996-SN.zip"; +pub const TEST_FACT: &str = "924cf8d0b955a889fd254b355bb7b29aa9582a370f26943acbe85b2c1a0b201b"; diff --git a/crates/prover-services/sharp-service/tests/lib.rs b/crates/prover-services/sharp-service/tests/lib.rs new file mode 100644 index 00000000..814b98f6 --- /dev/null +++ b/crates/prover-services/sharp-service/tests/lib.rs @@ -0,0 +1,132 @@ +use crate::constants::{CAIRO_PIE_PATH, TEST_FACT}; +use alloy::primitives::B256; +use cairo_vm::vm::runners::cairo_pie::CairoPie; +use httpmock::MockServer; +use prover_client_interface::{ProverClient, Task, TaskId, TaskStatus}; +use rstest::rstest; +use serde_json::json; +use sharp_service::{split_task_id, SharpProverService}; +use snos::sharp::CairoJobStatus; +use std::str::FromStr; +use utils::env_utils::get_env_var_or_panic; +use utils::settings::default::DefaultSettingsProvider; + +mod constants; + +#[rstest] +#[tokio::test] +async fn prover_client_submit_task_works() { + dotenvy::from_filename("../.env.test").expect("Failed to load the .env file"); + + let server = MockServer::start(); + let sharp_service = SharpProverService::with_test_settings(&DefaultSettingsProvider {}, server.port()); + let cairo_pie_path = env!("CARGO_MANIFEST_DIR").to_string() + CAIRO_PIE_PATH; + let cairo_pie = CairoPie::read_zip_file(cairo_pie_path.as_ref()).unwrap(); + + let sharp_response = json!( + { + "code" : "JOB_RECEIVED_SUCCESSFULLY" + } + ); + let customer_id = get_env_var_or_panic("SHARP_CUSTOMER_ID"); + let sharp_add_job_call = server.mock(|when, then| { + when.path_contains("/add_job").query_param("customer_id", customer_id.as_str()); + then.status(200).body(serde_json::to_vec(&sharp_response).unwrap()); + }); + + let task_id = sharp_service.submit_task(Task::CairoPie(cairo_pie)).await.unwrap(); + let (_, fact) = split_task_id(&task_id).unwrap(); + + // Comparing the calculated fact with on chain verified fact. + // You can check on etherscan by calling `isValid` function on GpsStatementVerifier.sol + // Contract Link : https://etherscan.io/address/0x9fb7F48dCB26b7bFA4e580b2dEFf637B13751942#readContract#F9 + assert_eq!(fact, B256::from_str("0xec8fa9cdfe069ed59b8f17aeecfd95c6abd616379269d2fa16a80955b6e0f068").unwrap()); + + sharp_add_job_call.assert(); +} + +#[rstest] +#[case(CairoJobStatus::FAILED)] +#[case(CairoJobStatus::INVALID)] +#[case(CairoJobStatus::UNKNOWN)] +#[case(CairoJobStatus::IN_PROGRESS)] +#[case(CairoJobStatus::NOT_CREATED)] +#[case(CairoJobStatus::PROCESSED)] +#[case(CairoJobStatus::ONCHAIN)] +#[tokio::test] +async fn prover_client_get_task_status_works(#[case] cairo_job_status: CairoJobStatus) { + dotenvy::from_filename("../.env.test").expect("Failed to load the .env file"); + + let server = MockServer::start(); + let sharp_service = SharpProverService::with_test_settings(&DefaultSettingsProvider {}, server.port()); + let customer_id = get_env_var_or_panic("SHARP_CUSTOMER_ID"); + + let sharp_add_job_call = server.mock(|when, then| { + when.path_contains("/get_status").query_param("customer_id", customer_id.as_str()); + then.status(200).body(serde_json::to_vec(&get_task_status_sharp_response(&cairo_job_status)).unwrap()); + }); + + let task_status = sharp_service + .get_task_status(&TaskId::from(format!("c31381bf-4739-4667-b5b8-b08af1c6b1c7:0x{}", TEST_FACT))) + .await + .unwrap(); + assert_eq!(task_status, get_task_status_expectation(&cairo_job_status), "Cairo Job Status assertion failed"); + + sharp_add_job_call.assert(); +} + +fn get_task_status_expectation(cairo_job_status: &CairoJobStatus) -> TaskStatus { + match cairo_job_status { + CairoJobStatus::FAILED => TaskStatus::Failed("Sharp task failed".to_string()), + CairoJobStatus::INVALID => TaskStatus::Failed("Task is invalid: INVALID_CAIRO_PIE_FILE_FORMAT".to_string()), + CairoJobStatus::UNKNOWN => TaskStatus::Failed("".to_string()), + CairoJobStatus::IN_PROGRESS | CairoJobStatus::NOT_CREATED | CairoJobStatus::PROCESSED => TaskStatus::Processing, + CairoJobStatus::ONCHAIN => TaskStatus::Failed(format!("Fact {} is not valid or not registered", TEST_FACT)), + } +} + +fn get_task_status_sharp_response(cairo_job_status: &CairoJobStatus) -> serde_json::Value { + match cairo_job_status { + CairoJobStatus::FAILED => json!( + { + "status" : "FAILED", + "error_log" : "Sharp task failed" + } + ), + CairoJobStatus::INVALID => json!( + { + "status": "INVALID", + "invalid_reason": "INVALID_CAIRO_PIE_FILE_FORMAT", + "error_log": "The Cairo PIE file has a wrong format. Deserialization ended with exception: Invalid prefix for zip file.."} + ), + CairoJobStatus::UNKNOWN => json!( + { + "status" : "FAILED" + } + ), + CairoJobStatus::IN_PROGRESS => json!( + { + "status": "IN_PROGRESS", + "validation_done": false + } + ), + CairoJobStatus::NOT_CREATED => json!( + { + "status": "NOT_CREATED", + "validation_done": false + } + ), + CairoJobStatus::PROCESSED => json!( + { + "status": "PROCESSED", + "validation_done": false + } + ), + CairoJobStatus::ONCHAIN => json!( + { + "status": "ONCHAIN", + "validation_done": true + } + ), + } +}