Skip to content

Commit

Permalink
snos release patches (#162)
Browse files Browse the repository at this point in the history
* feat : added snos failing job handling

* chore : refactor and state update worker updated

* merge

* feat : added snos latest block env

* feat : updated env vars

* feat : updated workers

* feat : updated and fixed workers and jobs

* fix : tests

* feat : rmeoved hacky code

* refactor

* update snos version is ci

* add block time to anvil testing

* changes is settlement client

* fix blob test

* increase visibility timeout

* fixes

* fix prio fees and verif q bug

* coverage fix

* Add no fail fast

* fixes

* fix comment

* fix clippy

---------

Co-authored-by: Arun Jangra <[email protected]>
  • Loading branch information
apoorvsadana and ocdbytes authored Oct 18, 2024
1 parent 1b21d4d commit 82024c7
Show file tree
Hide file tree
Showing 28 changed files with 475 additions and 385 deletions.
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

HOST=
PORT=
MAX_BLOCK_TO_PROCESS=

##### AWS CONFIG #####

Expand Down
6 changes: 4 additions & 2 deletions .env.test
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

HOST=127.0.0.1
PORT=3000
MAX_BLOCK_TO_PROCESS=
MIN_BLOCK_TO_PROCESS=

##### AWS CONFIG #####

Expand Down Expand Up @@ -31,7 +33,7 @@ SQS_PROVING_JOB_VERIFICATION_QUEUE_URL="http://sqs.us-east-1.localhost.localstac
SQS_DATA_SUBMISSION_JOB_PROCESSING_QUEUE_URL="http://sqs.us-east-1.localhost.localstack.cloud:4566/000000000000/madara_orchestrator_data_submission_job_processing_queue"
SQS_DATA_SUBMISSION_JOB_VERIFICATION_QUEUE_URL="http://sqs.us-east-1.localhost.localstack.cloud:4566/000000000000/madara_orchestrator_data_submission_job_verification_queue"

SQS_UPDATE_STATE_JOB_PROCESSING_QUEUE_URL="http://sqs.us-east-1.localhost.localstack.cloud:4566/000000000000/madara_orchestrator_update_state_job_verification_queue"
SQS_UPDATE_STATE_JOB_PROCESSING_QUEUE_URL="http://sqs.us-east-1.localhost.localstack.cloud:4566/000000000000/madara_orchestrator_update_state_job_processing_queue"
SQS_UPDATE_STATE_JOB_VERIFICATION_QUEUE_URL="http://sqs.us-east-1.localhost.localstack.cloud:4566/000000000000/madara_orchestrator_update_state_job_verification_queue"

SQS_JOB_HANDLE_FAILURE_QUEUE_URL="http://sqs.us-east-1.localhost.localstack.cloud:4566/000000000000/madara_orchestrator_job_handle_failure_queue"
Expand All @@ -52,7 +54,7 @@ DATABASE_NAME="orchestrator"

PROVER_SERVICE="sharp"
SHARP_CUSTOMER_ID="sharp_consumer_id"
SHARP_URL="http://127.0.0.1:5000"
SHARP_URL="http://127.0.0.1:6000"
# [IMP!!!] These are test certificates (they don't work)
SHARP_USER_CRT="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUR4ekNDQXErZ0F3SUJBZ0lVTjBSK0xpb1MzL2ZadUZsK291RjZNNFk2RnRZd0RRWUpLb1pJaHZjTkFRRUwKQlFBd2N6RUxNQWtHQTFVRUJoTUNTVTR4RXpBUkJnTlZCQWdNQ2xOdmJXVXRVM1JoZEdVeElUQWZCZ05WQkFvTQpHRWx1ZEdWeWJtVjBJRmRwWkdkcGRITWdVSFI1SUV4MFpERU5NQXNHQTFVRUF3d0VVMVJTU3pFZE1Cc0dDU3FHClNJYjNEUUVKQVJZT1lXSmpRR3RoY201dmRDNTRlWG93SGhjTk1qUXdPREV6TVRNd05UTTBXaGNOTWpVd09ERXoKTVRNd05UTTBXakJ6TVFzd0NRWURWUVFHRXdKSlRqRVRNQkVHQTFVRUNBd0tVMjl0WlMxVGRHRjBaVEVoTUI4RwpBMVVFQ2d3WVNXNTBaWEp1WlhRZ1YybGtaMmwwY3lCUWRIa2dUSFJrTVEwd0N3WURWUVFEREFSVFZGSkxNUjB3Ckd3WUpLb1pJaHZjTkFRa0JGZzVoWW1OQWEyRnlibTkwTG5oNWVqQ0NBU0l3RFFZSktvWklodmNOQVFFQkJRQUQKZ2dFUEFEQ0NBUW9DZ2dFQkFOSEtaUGRqWSs4QWo4ZFV2V0xReEl5NTNrK1BHY001T2FlYnpTV3FER0xGSlBOdgpkVzJvWjFCSnNEb2hobWZFSCt5ZEFoQXEvbzc4NDljblg2VDJTOVhta25wdnNud2dRckU5Z3lqSmV3MUxBRzNHCm10U0lOMWJJSm9peWJ3QUR5NGxPd0xrVzUzdFdueHBSazVVVmZUU1hLYVRRTnlHd2o3Q2xMSGthcnlZYVk3OVkKOXlHMFJ2RkFkb1IzczBveWthNkFLV0d1WjhOdWd4NTY2bysyWllRenJteWVNU1NGYkhNdW1aUkxYb0hpazhBSgpLZXJ0bnNBRC9LMVJRYm80Y21ubHFoTVRhQktiTEFVVjVteFVvMlpveFBJVU9tREE5N3IyMmRTYkRkRlVjeC9kCjhQcDB6VXNycXdQckJlcW5SMXdLOE80MUlHajUzRnUzVmxDeS94MENBd0VBQWFOVE1GRXdIUVlEVlIwT0JCWUUKRkc0T0lvKzcvckJyZlR4S2FFMGx2L1dwRDJ3UE1COEdBMVVkSXdRWU1CYUFGRzRPSW8rNy9yQnJmVHhLYUUwbAp2L1dwRDJ3UE1BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFEMURDZkR3CnpoSXRGMWd5YVdhWURZRHErZjJSUHBFRWVaWk1BSDdJV0ZTajRrTzhmVHN1RnN6bFoyNXNlR3ZHYW4xQ3F4alQKYnJ3MXliVlJQeGZMUWgxRlZMMGhFeDZWYXhGditxMmtqUmlCQmZURFBxWGxYcmpaaUYrZTNPS3lKSVhnNkpIUAppbVpBV0dyRFBHNkorQi90bHRaQ3VLZVhLK1FUcnRSOVVCL29hOWVaQWc5RXNkOVJsZDRNeVo5b0NtdUNPU1hmCnk1THFkVlgrNENpTnJXQ3BwM1B2M2MyL28rZ0RMQjUzZ252R056RjR6Q1FIZ0RtN0RNZnpmZlY1TUMwV1MvWXkKVnpyUG11Sys0Y0tSK3dMOFZITVNEeC9ybTFhYnh0dEN2VW92MUw5dVZ1QUNGc29yNmdsR0N1RDNNQ0dIa0pNNgpxaS8rM1haeHhxeGw1Rzg9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K"
SHARP_USER_KEY="LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRRFJ5bVQzWTJQdkFJL0gKVkwxaTBNU011ZDVQanhuRE9UbW5tODBscWd4aXhTVHpiM1Z0cUdkUVNiQTZJWVpueEIvc25RSVFLdjZPL09QWApKMStrOWt2VjVwSjZiN0o4SUVLeFBZTW95WHNOU3dCdHhwclVpRGRXeUNhSXNtOEFBOHVKVHNDNUZ1ZDdWcDhhClVaT1ZGWDAwbHltazBEY2hzSSt3cFN4NUdxOG1HbU8vV1BjaHRFYnhRSGFFZDdOS01wR3VnQ2xocm1mRGJvTWUKZXVxUHRtV0VNNjVzbmpFa2hXeHpMcG1VUzE2QjRwUEFDU25xN1o3QUEveXRVVUc2T0hKcDVhb1RFMmdTbXl3RgpGZVpzVktObWFNVHlGRHBnd1BlNjl0blVtdzNSVkhNZjNmRDZkTTFMSzZzRDZ3WHFwMGRjQ3ZEdU5TQm8rZHhiCnQxWlFzdjhkQWdNQkFBRUNnZ0VBQU9mcDFiT2xLOVFKeXVlUHhjeDIvTkNVcUMxTEJDL01FdkEyUzVKWGFWbkcKbGhLR0pFb1U0Q0RoVk83dUlLYVZLTFZvMjk4RHFHUnBLM1d0RVE1TE40bytXYTcveTA5c1drMlVzbWxrVWFOZwpSaGtVZEJSK2dLNXVsQ3FKRml2dUJoTEQvRWlnQ1VWUGZKS2JtNG96TnpYcjVSMU5ENlV1aWFtODdtenlFcTBLCmZsVXlhR0RZNGdIdFNBOVBENVBFYlUveFpKeitKaHk5T2l3aVRXV0MrSHoyb2c3UWRDRDE2RlhGcit2VHpQN0MKb2tFb0VDZFNPRWlMalVENjBhS2ZxRmFCVm5MTkVudC9QSytmY1RBM05mNGtSMnFDNk9ZWjVFb09zYm1ka29ZTgpyU3NJZW9XblMxOEhvekZud2w3Z05wTUtjNmRzQzRBTldOVDFsTkhCb1FLQmdRRHlaUDFJSlppZUh6NlExaUVTCm5zd2tnblZCQUQ0SlVLR1ZDMHA3dk4yclNDZXh4c05ZZXFPTEEyZGZCUGpOVjd3blFKcUgxT05XellOMUJVSUUKeThLTCtFZVl6Q3RZa21LL21wSGJIMzNjd2tJODBuMHJROU1BalZMTlJ2YVVEOWp1NFBsRzFqaEFZUVVyTkViZQpKRlVpSk83aDVQa1llZG50SitqSHFpQnRoUUtCZ1FEZGtPbndmL0szYk4xenR0bXZQd0VicjhkVWJjRVh5NDFOCkl5VWwrZW1WSlgzYktKM0duNDZnQ2RsTTdkYmpwS3JVZ3oxL2JsZTgvMkVFckJvSEFRNkMrU2pEaGhvL01CbnIKekZheTBoK3YxbjBnZnNNVzRoOEF4cEFwc25OYnh6K2g1Wm5uSnRTd0srUjB3U0VJVVEzRjAxL2hMWWhLQ2l5OApwbW5HQi9hU3VRS0JnRzdxd1cvVExGd214ZlYyMXBsenFzeUdHZXVObGRXalhOMGIxcEI2b3lDdW11TmhwYUFHCk5uSDFNOGNxT2tPVWd4ZWZHMWRPbGx6eEc5ZGZlWTlDUWhyVW1NYVZucndmK0NuZkxDRU43d1VtcXpLenl1MFMKVXlwc2dOaElRYXNNK1dLTjllTnhRVHBNYXhZVERONjMxM0VSWDNKazJZdFdydDh6cFBSQXFDZ1ZBb0dCQU54egpUa0NMbmJ6aFphbTNlZm9DenlCMEVma3dSdHBkSGxkc3E0NlFqTmRuK1VSd3NpTXBLR2lWeEE3bDZsU1B4NlV3CmU2VHA3Z1JQZUlHRWwxVDJ1VENacGZSODNtcVdlb1FCeVJXZE9nZmplcFkxYWZpL3ZhY3c2Y21ERTRKeXloNVUKYTMveFE5ZVJwSHFDbWxKREMxZ1V5eVlwL3B2a2FjUytNeW5sVEhHSkFvR0FQekdTSzdXOHBUYldSVEFoaTVrSQpwZk5kWk1tcnRodUxNT3F6TGhyRjZublpldk9OdTBoYXVhZktlVElFd2w0clhYZHFKQlJBaWZKMFFsLzZKWFFkCmd1VzFrZWk1Ui8rUFZ5eUhab042c3NXSTNWYklwUUloUmt6UENnTDZhbHEwSzFpT1dlV1lIOHdORGRRdlB1T2UKRkZPOEovSzNxV0NtWjU0ODBBbTNhT0U9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ jobs:
# Navigating to snos
cd snos-59fe8329bb16fe65
# Navigating to the build
cd 3bd95bf
cd af74c75
# Activating the venv
source ~/cairo_venv/bin/activate
# Building the cairo lang repo requirements
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ jobs:
then
echo "Anvil is installed. Version information:"
anvil --version
anvil &
anvil --block-time 6 &
else
echo "Anvil is not installed or not in PATH"
exit 1
Expand All @@ -86,7 +86,7 @@ jobs:
# Navigating to snos
cd snos-59fe8329bb16fe65
# Navigating to the build
cd 3bd95bf
cd af74c75
# Activating the venv
source ~/cairo_venv/bin/activate
# Building the cairo lang repo requirements
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/linters-cargo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
# Navigating to snos
cd snos-59fe8329bb16fe65
# Navigating to the build
cd 3bd95bf
cd af74c75
# Activating the venv
source ~/cairo_venv/bin/activate
# Building the cairo lang repo requirements
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/rust-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
# Navigating to snos
cd snos-59fe8329bb16fe65
# Navigating to the build
cd 3bd95bf
cd af74c75
# Activating the venv
source ~/cairo_venv/bin/activate
# Building the cairo lang repo requirements
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).

## Fixed

- fixes after sepolia testing
- all failed jobs should move to failed state
- Fixes all unwraps() in code to improve error logging
- Simplified Update_Job for Database.
Expand Down
12 changes: 6 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ cairo-vm = { git = "https://github.com/Moonsong-Labs/cairo-vm", branch = "notles

# Snos & Sharp (Starkware)
# TODO: Update branch to main once the PR is merged (PR #368 in the snos repository)
starknet-os = { git = "https://github.com/keep-starknet-strange/snos", rev = "3bd95bfb315a596519b5b1d3c81114fbfa06f8a6" }
prove_block = { git = "https://github.com/keep-starknet-strange/snos", rev = "3bd95bfb315a596519b5b1d3c81114fbfa06f8a6" }
starknet-os = { git = "https://github.com/keep-starknet-strange/snos", rev = "af74c7599231c25ad9c342f6888668680a9782d9" }
prove_block = { git = "https://github.com/keep-starknet-strange/snos", rev = "af74c7599231c25ad9c342f6888668680a9782d9" }

# Madara prover API
madara-prover-common = { git = "https://github.com/Moonsong-Labs/madara-prover-api", branch = "od/use-latest-cairo-vm" }
Expand Down
5 changes: 5 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,11 @@ COPY --from=builder /usr/src/madara-orchestrator/package.json .
COPY --from=builder /usr/src/madara-orchestrator/migrate-mongo-config.js .
COPY --from=builder /usr/src/madara-orchestrator/migrations ./migrations

# To be fixed by this https://github.com/keep-starknet-strange/snos/issues/404
RUN mkdir -p /usr/local/cargo/git/checkouts/snos-59fe8329bb16fe65/af74c75/crates/starknet-os/kzg
COPY ./crates/da-clients/ethereum/trusted_setup.txt /usr/local/cargo/git/checkouts/snos-59fe8329bb16fe65/af74c75/crates/starknet-os/kzg/trusted_setup.txt
COPY ./crates/da-clients/ethereum/trusted_setup.txt /usr/src/madara-orchestrator/crates/settlement-clients/ethereum/src/trusted_setup.txt

# Create a startup script
RUN echo '#!/bin/bash\n\
npm run migrate up\n\
Expand Down
6 changes: 5 additions & 1 deletion crates/orchestrator/src/database/mongodb/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,11 @@ impl MongoDb {
// Get a handle to the cluster
let client = Client::with_options(client_options).expect("Failed to create MongoDB client");
// Ping the server to see if you can connect to the cluster
client.database("admin").run_command(doc! {"ping": 1}, None).await.expect("Failed to ping MongoDB deployment");
client
.database("orchestrator")
.run_command(doc! {"ping": 1}, None)
.await
.expect("Failed to ping MongoDB deployment");
tracing::debug!("Pinged your deployment. You successfully connected to MongoDB!");

Self { client, database_name: mongo_db_settings.database_name }
Expand Down
2 changes: 1 addition & 1 deletion crates/orchestrator/src/jobs/da_job/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ impl Job for DaJob {
}
}

#[tracing::instrument(skip(elements), ret)]
#[tracing::instrument(skip(elements))]
pub fn fft_transformation(elements: Vec<BigUint>) -> Vec<BigUint> {
let xs: Vec<BigUint> = (0..*BLOB_LEN)
.map(|i| {
Expand Down
22 changes: 13 additions & 9 deletions crates/orchestrator/src/jobs/state_update_job/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,16 +130,19 @@ impl Job for StateUpdateJob {
let mut sent_tx_hashes: Vec<String> = Vec::with_capacity(block_numbers.len());
for block_no in block_numbers.iter() {
tracing::debug!(job_id = %job.internal_id, block_no = %block_no, "Processing block");

let snos = self.fetch_snos_for_block(*block_no, config.clone()).await;
let tx_hash = self.update_state_for_block(config.clone(), *block_no, snos, nonce).await.map_err(|e| {
tracing::error!(job_id = %job.internal_id, block_no = %block_no, error = %e, "Error updating state for block");
job.metadata.insert(JOB_METADATA_STATE_UPDATE_LAST_FAILED_BLOCK_NO.into(), block_no.to_string());
self.insert_attempts_into_metadata(job, &attempt_no, &sent_tx_hashes);
StateUpdateError::Other(OtherError(eyre!(
"Block #{block_no} - Error occurred during the state update: {e}"
)))
})?;
sent_tx_hashes.push(tx_hash);
let txn_hash = self
.update_state_for_block(config.clone(), *block_no, snos, nonce)
.await
.map_err(|e| {
tracing::error!(job_id = %job.internal_id, block_no = %block_no, error = %e, "Error updating state for block");
job.metadata.insert(JOB_METADATA_STATE_UPDATE_LAST_FAILED_BLOCK_NO.into(), block_no.to_string());
self.insert_attempts_into_metadata(job, &attempt_no, &sent_tx_hashes);
OtherError(eyre!("Block #{block_no} - Error occurred during the state update: {e}"));
})
.unwrap();
sent_tx_hashes.push(txn_hash);
nonce += 1;
}

Expand Down Expand Up @@ -318,6 +321,7 @@ impl StateUpdateJob {
.map_err(|e| JobError::Other(OtherError(e)))?;

let program_output = self.fetch_program_output_for_block(block_no, config.clone()).await;

// TODO :
// Fetching nonce before the transaction is run
// Sending update_state transaction from the settlement client
Expand Down
5 changes: 4 additions & 1 deletion crates/orchestrator/src/queue/job_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,10 @@ pub async fn init_consumers(config: Arc<Config>) -> Result<(), JobError> {

/// To spawn the worker by passing the worker struct
async fn spawn_worker(worker: Box<dyn Worker>, config: Arc<Config>) -> color_eyre::Result<()> {
worker.run_worker_if_enabled(config).await.expect("Error in running the worker.");
if let Err(e) = worker.run_worker_if_enabled(config).await {
log::error!("Failed to spawn worker. Error: {}", e);
return Err(e);
}
Ok(())
}
async fn add_job_to_queue(id: Uuid, queue: String, delay: Option<Duration>, config: Arc<Config>) -> EyreResult<()> {
Expand Down
14 changes: 2 additions & 12 deletions crates/orchestrator/src/tests/jobs/state_update_job/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::fs;
use std::fs::read_to_string;
use std::path::PathBuf;
use std::{env, fs};

use assert_matches::assert_matches;
use bytes::Bytes;
Expand All @@ -27,7 +27,7 @@ use crate::jobs::state_update_job::{StateUpdateError, StateUpdateJob};
use crate::jobs::types::{JobStatus, JobType};
use crate::jobs::{Job, JobError};
use crate::tests::common::default_job_item;
use crate::tests::config::TestConfigBuilder;
use crate::tests::config::{ConfigType, TestConfigBuilder};

lazy_static! {
pub static ref CURRENT_PATH: PathBuf = std::env::current_dir().expect("Failed to get Current Path");
Expand Down Expand Up @@ -60,18 +60,8 @@ async fn test_process_job_works(
#[case] blocks_to_process: String,
#[case] processing_start_index: u8,
) {
// Will be used by storage client which we call while storing the data.

use crate::tests::config::ConfigType;

let aws_region = env::var("AWS_REGION").unwrap();
println!("AWS_REGION: {}", aws_region);

dotenvy::from_filename("../.env.test").expect("Failed to load the .env file");

let aws_region = env::var("AWS_REGION").unwrap();
println!("AWS_REGION: {}", aws_region);

// Mocking the settlement client.
let mut settlement_client = MockSettlementClient::new();

Expand Down
34 changes: 21 additions & 13 deletions crates/orchestrator/src/tests/workers/snos/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use uuid::Uuid;

use crate::database::MockDatabase;
use crate::jobs::job_handler_factory::mock_factory;
use crate::jobs::types::{JobStatus, JobType};
use crate::jobs::types::JobType;
use crate::jobs::{Job, MockJob};
use crate::queue::job_queue::SNOS_JOB_PROCESSING_QUEUE;
use crate::queue::MockQueueProvider;
Expand All @@ -39,20 +39,30 @@ async fn test_snos_worker(#[case] db_val: bool) -> Result<(), Box<dyn Error>> {

// Mocking db function expectations
if !db_val {
db.expect_get_latest_job_by_type_and_status()
.times(1)
.with(eq(JobType::SnosRun), eq(JobStatus::Completed))
db.expect_get_latest_job_by_type().with(eq(JobType::SnosRun)).returning(|_| Ok(None));
db.expect_get_job_by_internal_id_and_type()
.with(eq(0.to_string()), eq(JobType::SnosRun))
.returning(|_, _| Ok(None));
db.expect_get_job_by_internal_id_and_type()
.with(eq(1.to_string()), eq(JobType::SnosRun))
.returning(|_, _| Ok(None));
start_job_index = 1;

start_job_index = 0;
block = 5;
} else {
let uuid_temp = Uuid::new_v4();

db.expect_get_latest_job_by_type_and_status()
.with(eq(JobType::SnosRun), eq(JobStatus::Completed))
db.expect_get_latest_job_by_type()
.with(eq(JobType::SnosRun))
.returning(move |_| Ok(Some(get_job_item_mock_by_id("1".to_string(), uuid_temp))));
db.expect_get_job_by_internal_id_and_type()
.with(eq(0.to_string()), eq(JobType::SnosRun))
.returning(move |_, _| Ok(Some(get_job_item_mock_by_id("0".to_string(), uuid_temp))));
db.expect_get_job_by_internal_id_and_type()
.with(eq(1.to_string()), eq(JobType::SnosRun))
.returning(move |_, _| Ok(Some(get_job_item_mock_by_id("1".to_string(), uuid_temp))));
block = 6;

start_job_index = 2;
block = 6;
}

for i in start_job_index..block + 1 {
Expand All @@ -65,20 +75,18 @@ async fn test_snos_worker(#[case] db_val: bool) -> Result<(), Box<dyn Error>> {

let job_item = get_job_item_mock_by_id(i.clone().to_string(), uuid);
let job_item_cloned = job_item.clone();

job_handler.expect_create_job().times(1).returning(move |_, _, _| Ok(job_item_cloned.clone()));
job_handler.expect_create_job().returning(move |_, _, _| Ok(job_item_cloned.clone()));

// creating jobs call expectations
db.expect_create_job()
.times(1)
.withf(move |item| item.internal_id == i.clone().to_string())
.returning(move |_| Ok(job_item.clone()));
}

let job_handler: Arc<Box<dyn Job>> = Arc::new(Box::new(job_handler));
let ctx = mock_factory::get_job_handler_context();
// Mocking the `get_job_handler` call in create_job function.
ctx.expect().times(5).with(eq(JobType::SnosRun)).returning(move |_| Arc::clone(&job_handler));
ctx.expect().with(eq(JobType::SnosRun)).returning(move |_| Arc::clone(&job_handler));

// Queue function call simulations
queue
Expand Down
Loading

0 comments on commit 82024c7

Please sign in to comment.