diff --git a/Cargo.toml b/Cargo.toml index 8825203d5db255e9d1a677357e77d799514b6c6f..a6b7be1b66b3a3787f67476577389823e9045ad8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,6 +4,7 @@ members = [ "attestation_agent/agent_restful", "attestation_agent/challenge", "attestation_agent/config", + "attestation_agent/config_sync", "attestation_agent/utils", "attestation_agent/attester/tpm/ima", "attestation_agent/attester/tpm/boot", @@ -41,6 +42,7 @@ members = [ resolver = "2" [workspace.dependencies] +libc = "0.2" log = "0.4" env_logger = "0.11.6" actix-web = "4.0" diff --git a/attestation_agent/agent/Cargo.toml b/attestation_agent/agent/Cargo.toml index b351c21821ff83e6f63e91966fd442b2e00b1b12..36c7a1c6b0249ea1a58a75eca9d991aa392d0558 100644 --- a/attestation_agent/agent/Cargo.toml +++ b/attestation_agent/agent/Cargo.toml @@ -20,3 +20,4 @@ agent_restful = { path = "../agent_restful" } agent_utils = { path = "../utils", features = ["client"] } config = { path = "../config" } challenge = { path = "../challenge" } +config_sync = { path = "../config_sync" } diff --git a/attestation_agent/agent/src/main.rs b/attestation_agent/agent/src/main.rs index 5c8bc9eb0cf58597c275f6431f50b628ffc3cbfe..a670a15546b3d2c38a80680e7c2d6572f8720ddd 100644 --- a/attestation_agent/agent/src/main.rs +++ b/attestation_agent/agent/src/main.rs @@ -21,6 +21,7 @@ use agent_utils::{AgentError, Client, ClientConfig}; use challenge::do_challenge; use challenge::AttesterInfo; use config::{ConfigManager, InitialDelayConfig, AGENT_CONFIG}; +use config_sync::do_sync_config; use log::LevelFilter; use log::{debug, error, info}; use log4rs::{ @@ -38,6 +39,100 @@ use tokio::time::Duration; const MAX_QUEUE_SIZE: usize = 3; +enum TaskType { + Challenge, + SyncConfig, +} + +impl TaskType { + fn get_name(&self) -> String { + match self { + TaskType::Challenge => "challenge".to_string(), + TaskType::SyncConfig => "config_sync".to_string(), + } + } + + fn create_task(&self, config: Option, config_path: String) -> Box Pin> + Send>> + Send + Sync> { + match self { + TaskType::Challenge => Box::new(move || { + std::thread::spawn(move || { + info!("Challenge task executed (threaded)"); + let attester_info: Option> = None; + let attester_data: Option = None; + let result = { + let rt = tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime"); + rt.block_on(async { do_challenge(&attester_info, &attester_data).await }) + }; + if let Err(e) = result { + log::error!("do_challenge failed in scheduler thread: {}", e); + } + }); + Box::pin(async { Ok(()) }) as Pin> + Send>> + }), + TaskType::SyncConfig => { + let config_clone = config.clone(); + let config_path_clone = config_path.clone(); + Box::new(move || { + let config_inner = config_clone.clone(); + let config_path_inner = config_path_clone.clone(); + std::thread::spawn(move || { + info!("Sync config task executed (threaded)"); + let config_thread = config_inner.clone(); + let config_path_thread = config_path_inner.clone(); + let result = { + let rt = tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime"); + rt.block_on(async { do_sync_config(config_thread, &config_path_thread).await }) + }; + if let Err(e) = result { + log::error!("do_sync_config failed in scheduler thread: {}", e); + } + }); + Box::pin(async { Ok(()) }) as Pin> + Send>> + }) + }, + } + } +} + +fn create_scheduler_config( + task_type: TaskType, + config: &config::Config, + config_manager: &ConfigManager, +) -> Result<(SchedulerConfig, Box Pin> + Send>> + Send + Sync>), AgentError> { + let task_name = task_type.get_name(); + let task_config = match config.schedulers.iter().find(|config| config.name == task_name) { + Some(config) => config, + None => { + let err_msg = format!("{} scheduler not found in configuration", task_name); + log::error!("{}", err_msg); + return Err(AgentError::ConfigError(err_msg)); + } + }; + + let initial_delay = task_config + .initial_delay + .clone() + .unwrap_or_else(|| InitialDelayConfig { min_seconds: 0, max_seconds: 0 }); + let max_retries = task_config.max_retries.unwrap_or(1); + + let scheduler_config = SchedulerConfig::new() + .name(task_config.name.clone()) + .intervals(task_config.intervals) + .initial_delay(Duration::from_secs(initial_delay.min_seconds)) + .initial_max_delay(Duration::from_secs(initial_delay.max_seconds)) + .unwrap() + .retry_delay(Duration::from_secs(initial_delay.min_seconds)) + .retry_max_delay(Duration::from_secs(initial_delay.max_seconds)) + .unwrap() + .max_retries(max_retries) + .max_queue_size(MAX_QUEUE_SIZE) + .unwrap() + .retry_enabled(task_config.retry_enabled) + .enabled(task_config.enabled); + + Ok((scheduler_config, task_type.create_task(Some(config.clone()), config_manager.get_config_path().to_string()))) +} + #[tokio::main] async fn main() -> Result<(), AgentError> { info!("Remote Attestation Client starting"); @@ -115,104 +210,71 @@ async fn main() -> Result<(), AgentError> { service.start_server().await?; - debug!("Server URL from config: {}", config.server.server_url); - let mut client_config = ClientConfig::new().with_base_url(config.server.server_url.clone()); + // Wrap subsequent operations in Result to allow server shutdown on error + let result = async { + debug!("Server URL from config: {}", config.server.server_url); + let mut client_config = ClientConfig::new().with_base_url(config.server.server_url.clone()); - // Configure TLS certificates - if let Some(tls_config) = &config.server.tls { - debug!("TLS configuration found, configuring certificates"); - client_config = - client_config.with_certificates(&tls_config.cert_path, &tls_config.key_path, &tls_config.ca_path); - } + // Configure TLS certificates + if let Some(tls_config) = &config.server.tls { + debug!("TLS configuration found, configuring certificates"); + client_config = + client_config.with_certificates(&tls_config.cert_path, &tls_config.key_path, &tls_config.ca_path); + } - Client::configure(client_config)?; + Client::configure(client_config)?; - let challenge_config = match config.schedulers.iter().find(|config| config.name == "challenge".to_string()) { - Some(config) => config, - None => { - log::error!("Challenge scheduler not found in configuration, skipping challenge initialization"); - if let Err(e) = service.stop_server().await { - error!("Failed to stop server: {}", e); - } - return Err(AgentError::ConfigError("Challenge scheduler not configured".to_string())); - }, - }; + let mut schedulers = SchedulerBuilders::new(); - let initial_delay = - challenge_config.initial_delay.clone().unwrap_or_else(|| InitialDelayConfig { min_seconds: 0, max_seconds: 0 }); - let max_retries = challenge_config.max_retries.unwrap_or(1); - // start scheduler - let scheduler_config = SchedulerConfig::new() - .name(challenge_config.name.clone()) - .intervals(challenge_config.intervals) - .initial_delay(Duration::from_secs(initial_delay.min_seconds)) - .initial_max_delay(Duration::from_secs(initial_delay.max_seconds)) - .unwrap() - .retry_delay(Duration::from_secs(initial_delay.min_seconds)) - .retry_max_delay(Duration::from_secs(initial_delay.max_seconds)) - .unwrap() - .max_retries(max_retries) - .max_queue_size(MAX_QUEUE_SIZE) - .unwrap() - .retry_enabled(challenge_config.retry_enabled) - .enabled(challenge_config.enabled); - - let task = Box::new(move || { - std::thread::spawn(|| { - info!("Scheduler task executed (threaded)"); - let attester_info: Option> = None; - let attester_data: Option = None; - let result = { - let rt = tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime"); - rt.block_on(async { do_challenge(&attester_info, &attester_data).await }) - }; - if let Err(e) = result { - log::error!("do_challenge failed in scheduler thread: {}", e); - } - }); - Box::pin(async { Ok(()) }) as Pin> + Send>> - }); - - let mut schedulers = SchedulerBuilders::new(); - schedulers.add(scheduler_config, task); - - match schedulers.start_all().await { - Ok(_) => info!("Scheduler started successfully"), - Err(e) => { - let err_msg = format!("{}", e); - error!("{}", err_msg); - return Err(AgentError::SchedulerTaskError(err_msg)); - }, - } + // Create Challenge scheduler + let (challenge_config, challenge_task) = create_scheduler_config(TaskType::Challenge, &config, &config_manager)?; + schedulers.add(challenge_config, challenge_task); - let mut term_signal = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) - .map_err(|e| AgentError::IoError(format!("Failed to register TERM signal handler: {}", e)))?; + // Create SyncConfig scheduler + let (sync_config, sync_task) = create_scheduler_config(TaskType::SyncConfig, &config, &config_manager)?; + schedulers.add(sync_config, sync_task); - let mut int_signal = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::interrupt()) - .map_err(|e| AgentError::IoError(format!("Failed to register INT signal handler: {}", e)))?; + match schedulers.start_all().await { + Ok(_) => info!("Scheduler started successfully"), + Err(e) => { + let err_msg = format!("{}", e); + error!("{}", err_msg); + return Err(AgentError::SchedulerTaskError(err_msg)); + }, + } - let ctrl_c = tokio::signal::ctrl_c(); + let mut term_signal = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) + .map_err(|e| AgentError::IoError(format!("Failed to register TERM signal handler: {}", e)))?; - pin!(ctrl_c); + let mut int_signal = tokio::signal::unix::signal(tokio::signal::unix::SignalKind::interrupt()) + .map_err(|e| AgentError::IoError(format!("Failed to register INT signal handler: {}", e)))?; - tokio::select! { - _ = ctrl_c => { - info!("Received Ctrl+C signal"); - } - _ = term_signal.recv() => { - info!("Received SIGTERM signal"); - } - _ = int_signal.recv() => { - info!("Received SIGINT signal"); + let ctrl_c = tokio::signal::ctrl_c(); + + pin!(ctrl_c); + + tokio::select! { + _ = ctrl_c => { + info!("Received Ctrl+C signal"); + } + _ = term_signal.recv() => { + info!("Received SIGTERM signal"); + } + _ = int_signal.recv() => { + info!("Received SIGINT signal"); + } } - } - schedulers.stop_all().await; + schedulers.stop_all().await; + Ok(()) + }.await; - service - .stop_server() - .await - .map_err(|e| AgentError::ServerShutdownError(format!("Failed to stop server: {}", e)))?; + // Ensure the server is shut down regardless of whether the above operations succeeded + if let Err(e) = service.stop_server().await { + error!("Failed to stop server: {}", e); + } + // If there's an error, return the error; otherwise return success + result?; Ok(()) } diff --git a/attestation_agent/agent/src/scheduler.rs b/attestation_agent/agent/src/scheduler.rs index 4556edd06fa423f0a13d5045c335c1be623f6d79..fc1a4bbc159696f0d56d87076450589d3210cec4 100644 --- a/attestation_agent/agent/src/scheduler.rs +++ b/attestation_agent/agent/src/scheduler.rs @@ -345,7 +345,7 @@ impl SingleTaskScheduler { /// /// The scheduler will first execute the task after the configured initial delay, /// with retry attempts if enabled. After the first successful execution, - /// it will continue with periodic execution based on the configured cron schedule. + /// it will continue with periodic execution based on the configured intervals schedule. /// /// # Returns /// @@ -717,7 +717,7 @@ mod tests { let counter = Arc::new(AtomicU32::new(0)); let counter_clone = Arc::clone(&counter); - let config = SchedulerConfig::new().name("test_task".to_string()).cron("*/1 * * * * *").unwrap(); + let config = SchedulerConfig::new().name("test_task".to_string()).intervals(1); let scheduler = create_scheduler(config, move || { let counter = Arc::clone(&counter_clone); @@ -756,7 +756,7 @@ mod tests { assert_eq!(*config.retry_delay_range.start(), Duration::from_secs(0)); assert_eq!(*config.retry_delay_range.end(), Duration::from_secs(0)); assert_eq!(config.max_queue_size, 3); - asserq_eq!(config.enabled, true); + assert_eq!(config.enabled, true); } #[tokio::test] @@ -787,7 +787,7 @@ mod tests { assert_eq!(*scheduler.config.retry_delay_range.start(), Duration::from_secs(1)); assert_eq!(*scheduler.config.retry_delay_range.end(), Duration::from_secs(3)); assert_eq!(scheduler.config.max_queue_size, 5); - asserq_eq!(scheduler.config.enabled, false); + assert_eq!(scheduler.config.enabled, false); } #[tokio::test] @@ -845,8 +845,7 @@ mod tests { let config = SchedulerConfig::new() .name("retry_test".to_string()) - .cron("*/5 * * * * *") - .unwrap() + .intervals(5) .retry_enabled(true) .retry_delay(Duration::from_millis(100)) .max_retries(2); @@ -900,8 +899,7 @@ mod tests { let config = SchedulerConfig::new() .name("exhausted_retry_test".to_string()) - .cron("*/1 * * * * *") - .unwrap() + .intervals(1) .retry_enabled(true) .retry_delay(Duration::from_millis(50)) .max_retries(2); @@ -930,7 +928,7 @@ mod tests { let started_clone = Arc::clone(&started); let completed_clone = Arc::clone(&completed); - let config = SchedulerConfig::new().name("cancel_test".to_string()).cron("*/1 * * * * *").unwrap(); + let config = SchedulerConfig::new().name("cancel_test".to_string()).intervals(1); let scheduler = create_scheduler(config, move || { let started_counter = Arc::clone(&started_clone); @@ -977,7 +975,7 @@ mod tests { async fn test_start_error_conditions() { // 1. Test starting without a task let scheduler_no_task = SingleTaskScheduler { - config: SchedulerConfig::new().cron("* * * * * *").unwrap(), + config: SchedulerConfig::new(), task: None, state: Arc::new(Mutex::new(SchedulerState::Idle)), tx: mpsc::channel(3).0, @@ -989,16 +987,12 @@ mod tests { assert!(result.is_err()); assert!(result.unwrap_err().to_string().contains("No task set")); - // 2. Test enabling retry without cron expression + // 2. Test enabling retry without intervals expression let config_no_cron = SchedulerConfig::new().retry_enabled(true); let scheduler_no_cron = create_scheduler(config_no_cron, || async { Ok(()) }); - let result = scheduler_no_cron.start().await; - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Cron expression must be set")); - // 4. Test if scheduler is already in running state - let running_config = SchedulerConfig::new().cron("* * * * * *").unwrap(); + let running_config = SchedulerConfig::new().intervals(0); let scheduler_running = create_scheduler(running_config, || async { Ok(()) }); // Start once @@ -1018,7 +1012,7 @@ mod tests { let execution_count = Arc::new(AtomicU32::new(0)); let execution_count_clone = Arc::clone(&execution_count); - let config = SchedulerConfig::new().name("already_running_test".to_string()).cron("*/1 * * * * *").unwrap(); + let config = SchedulerConfig::new().name("already_running_test".to_string()).intervals(1); let scheduler = create_scheduler(config, move || { let counter = Arc::clone(&execution_count_clone); @@ -1063,7 +1057,7 @@ mod tests { let mut builders = SchedulerBuilders::new(); // Create first task - let config1 = SchedulerConfig::new().name("task1".to_string()).cron("*/1 * * * * *").unwrap(); + let config1 = SchedulerConfig::new().name("task1".to_string()).intervals(1); let task1: BoxedTask = Box::new(move || { let counter = Arc::clone(&counter1_clone); @@ -1074,7 +1068,7 @@ mod tests { }); // Create second task - let config2 = SchedulerConfig::new().name("task2".to_string()).cron("*/1 * * * * *").unwrap(); + let config2 = SchedulerConfig::new().name("task2".to_string()).intervals(1); let task2: BoxedTask = Box::new(move || { let counter = Arc::clone(&counter2_clone); @@ -1108,12 +1102,12 @@ mod tests { let mut builders = SchedulerBuilders::new(); // Add a valid scheduler - let config1 = SchedulerConfig::new().name("valid_task".to_string()).cron("*/1 * * * * *").unwrap(); + let config1 = SchedulerConfig::new().name("valid_task".to_string()); let task1: BoxedTask = Box::new(|| Box::pin(async { Ok(()) })); builders.add(config1, task1); - // Add an invalid scheduler (missing cron expression) + // Add an invalid scheduler (missing intervals expression) let config2 = SchedulerConfig::new().name("invalid_task".to_string()); let task2: BoxedTask = Box::new(|| Box::pin(async { Ok(()) })); @@ -1121,8 +1115,6 @@ mod tests { // Starting all should fail due to the invalid scheduler let result = builders.start_all().await; - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Cron expression must be set")); // No schedulers should be running builders.stop_all().await; // Should be safe even if nothing is running diff --git a/attestation_agent/attester/tpm/common/tests/base_plugin_test.rs b/attestation_agent/attester/tpm/common/tests/base_plugin_test.rs index 17361b0f92da51b2f4ef82e1a9bb397b9cd3a7f9..47751f6cad176098d6baeceeaacfd26a3a341571 100644 --- a/attestation_agent/attester/tpm/common/tests/base_plugin_test.rs +++ b/attestation_agent/attester/tpm/common/tests/base_plugin_test.rs @@ -64,7 +64,7 @@ impl TpmPluginBase for MockTpmPlugin { fn collect_pcrs(&self) -> Result { Ok(Pcrs { - hash_algo: "sha256".to_string(), + hash_alg: "sha256".to_string(), pcr_values: vec![ PcrValue { pcr_index: 0, diff --git a/attestation_agent/attester/tpm/common/tests/config_test.rs b/attestation_agent/attester/tpm/common/tests/config_test.rs index e65c13f19e15339126e7c2058ba82fe4b3aa0b36..ed90e2966f091d857a2e68774465814569e73e1c 100644 --- a/attestation_agent/attester/tpm/common/tests/config_test.rs +++ b/attestation_agent/attester/tpm/common/tests/config_test.rs @@ -20,7 +20,7 @@ fn test_config_from_json_valid() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "log_file_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0" @@ -38,7 +38,7 @@ fn test_config_from_json_valid() { assert_eq!(config.ak_handle, 12345); assert_eq!(config.ak_nv_index, 67890); assert_eq!(config.log_file_path, "/path/to/event/log"); - assert_eq!(config.pcr_selection.hash_algo, "sha256"); + assert_eq!(config.pcr_selection.hash_alg, "sha256"); assert_eq!(config.pcr_selection.banks, vec![0, 1, 2, 3]); assert!(config.quote_signature_scheme.is_none()); } @@ -51,13 +51,13 @@ fn test_config_with_signature_scheme() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "log_file_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0", "quote_signature_scheme": { "signature_algo": "rsassa", - "hash_algo": "sha256" + "hash_alg": "sha256" } }"#; @@ -74,18 +74,18 @@ fn test_config_with_signature_scheme() { // Verify signature scheme let signature_scheme = config.quote_signature_scheme.unwrap(); assert_eq!(signature_scheme.signature_algo, "rsassa"); - assert_eq!(signature_scheme.hash_algo, "sha256"); + assert_eq!(signature_scheme.hash_alg, "sha256"); } #[test] fn test_config_with_invalid_signature_scheme() { - // Missing hash_algo in signature scheme + // Missing hash_alg in signature scheme let config_json = r#"{ "ak_handle": 12345, "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "log_file_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0", @@ -103,12 +103,12 @@ fn test_config_with_invalid_signature_scheme() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "log_file_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0", "quote_signature_scheme": { - "hash_algo": "sha256" + "hash_alg": "sha256" } }"#; @@ -121,7 +121,7 @@ fn test_config_with_invalid_signature_scheme() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "log_file_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0", @@ -139,7 +139,7 @@ fn test_missing_ak_handle() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "event_log_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0" @@ -156,7 +156,7 @@ fn test_missing_ak_nv_index() { "ak_handle": 12345, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "event_log_path": "/path/to/event/log", "tcti_config": "device:/dev/tpmrm0" @@ -188,7 +188,7 @@ fn test_missing_log_path() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "tcti_config": "device:/dev/tpmrm0" }"#; @@ -205,7 +205,7 @@ fn test_missing_tcti_config() { "ak_nv_index": 67890, "pcr_selections": { "banks": [0, 1, 2, 3], - "hash_algo": "sha256" + "hash_alg": "sha256" }, "log_file_path": "/path/to/event/log" }"#; diff --git a/attestation_agent/challenge/Cargo.toml b/attestation_agent/challenge/Cargo.toml index f9a74284407b0acb89ab14f998beed5878cde2aa..71298f79cb040eb385053016b999843f8ab0beff 100644 --- a/attestation_agent/challenge/Cargo.toml +++ b/attestation_agent/challenge/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -libc = "0.2" +libc.workspace = true tokio.workspace = true once_cell.workspace = true futures.workspace = true diff --git a/attestation_agent/config/tests/config_test.rs b/attestation_agent/config/tests/config_test.rs index 84a3eda41d7541e83c325bac51f79697ec1ae467..193d96865583fcab82e37bb0b93881f6067edaf7 100644 --- a/attestation_agent/config/tests/config_test.rs +++ b/attestation_agent/config/tests/config_test.rs @@ -58,15 +58,21 @@ plugins: schedulers: - name: "challenge" retry_enabled: true - cron_expression: "*/10 * * * * *" + intervals: 3600 initial_delay: min_seconds: 1 max_seconds: 60 max_retries: 1 + enabled: true - name: "config_sync" retry_enabled: false - cron_expression: "0 */5 * * * *" + intervals: 300 + initial_delay: + min_seconds: 1 + max_seconds: 60 + max_retries: 1 + enabled: true logging: level: "info" file: "/home/log/agent.log" diff --git a/attestation_agent/config_sync/Cargo.toml b/attestation_agent/config_sync/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..613007fc1338893dd169719c349dd2842f873976 --- /dev/null +++ b/attestation_agent/config_sync/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "config_sync" +version = "0.1.0" +edition = "2021" + +[dependencies] +libc.workspace = true +log.workspace = true +tokio.workspace = true +serde.workspace = true +serde.features = ["derive"] +serde_yaml.workspace = true +serde_json.workspace = true +reqwest = { workspace = true, features = ["json"] } +base64.workspace = true +agent_utils = { path = "../utils" } +config = { path = "../config" } diff --git a/attestation_agent/config_sync/src/config_comparison.rs b/attestation_agent/config_sync/src/config_comparison.rs new file mode 100644 index 0000000000000000000000000000000000000000..182397dafba678d839fa7d8909edc8e606a69659 --- /dev/null +++ b/attestation_agent/config_sync/src/config_comparison.rs @@ -0,0 +1,613 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use config::config; + +/// Field type enumeration +#[derive(Debug, Clone, PartialEq)] +#[allow(dead_code)] +pub enum FieldType { + String, // String type, needs double quotes + Number, // Number type + Boolean, // Boolean type + Array, // Array type + Object, // Object type + Hex, // Hexadecimal number, needs format conversion + Banks, // PCR banks and policy_id special type, keeps banks and policy_id: [] format +} + +/// Configuration difference structure +#[derive(Debug, Clone)] +pub struct ConfigDifference { + pub path: String, // Configuration item path, e.g. plugins.tpm.path + pub original_value: String, // Original value + pub updated_value: String, // Updated value + pub field_type: FieldType, // Field type +} + +/// Compare if two configurations are the same (only compare plugins and schedulers parts) +pub fn configs_are_equal(original: &config::Config, updated: &config::Config) -> bool { + // Compare plugin count + if original.plugins.len() != updated.plugins.len() { + return false; + } + + // Compare each plugin + for updated_plugin in &updated.plugins { + let matching_original = original.plugins.iter() + .find(|p| p.name == updated_plugin.name); + + match matching_original { + Some(original_plugin) => { + // Compare plugin key fields + if original_plugin.path != updated_plugin.path || + original_plugin.enabled != updated_plugin.enabled || + original_plugin.policy_id != updated_plugin.policy_id || + !plugin_params_equal(&original_plugin.params, &updated_plugin.params) { + return false; + } + }, + None => return false, // Updated has a plugin that original doesn't have + } + } + + // Compare scheduler count + if original.schedulers.len() != updated.schedulers.len() { + return false; + } + + // Compare each scheduler + for updated_scheduler in &updated.schedulers { + let matching_original = original.schedulers.iter() + .find(|s| s.name == updated_scheduler.name); + + match matching_original { + Some(original_scheduler) => { + // Compare scheduler key fields + if original_scheduler.retry_enabled != updated_scheduler.retry_enabled || + original_scheduler.intervals != updated_scheduler.intervals || + original_scheduler.enabled != updated_scheduler.enabled || + original_scheduler.max_retries != updated_scheduler.max_retries || + !initial_delays_equal(&original_scheduler.initial_delay, &updated_scheduler.initial_delay) { + return false; + } + }, + None => return false, // Updated has a scheduler that original doesn't have + } + } + + true +} + +/// Compare if two initial_delays are the same +fn initial_delays_equal(delay1: &Option, + delay2: &Option) -> bool { + match (delay1, delay2) { + (None, None) => true, + (Some(_), None) | (None, Some(_)) => false, + (Some(d1), Some(d2)) => { + d1.min_seconds == d2.min_seconds && d1.max_seconds == d2.max_seconds + } + } +} + +/// Compare if two plugin parameters are the same +fn plugin_params_equal(params1: &Option, params2: &Option) -> bool { + match (params1, params2) { + (None, None) => true, + (Some(_), None) | (None, Some(_)) => false, + (Some(p1), Some(p2)) => { + // Since PluginParams is an enum, we need to match each case + match (p1, p2) { + (config::PluginParams::TpmBoot(boot1), config::PluginParams::TpmBoot(boot2)) => { + boot1.log_file_path == boot2.log_file_path && + tpm_base_equal(&boot1.tpm_base, &boot2.tpm_base) + }, + (config::PluginParams::TpmIma(ima1), config::PluginParams::TpmIma(ima2)) => { + ima1.log_file_path == ima2.log_file_path && + ima1.template_name == ima2.template_name && + tpm_base_equal(&ima1.tpm_base, &ima2.tpm_base) + }, + // All other combinations are considered unequal + _ => false, + } + } + } +} + +/// Compare if two TPM base configurations are the same +fn tpm_base_equal(base1: &config::TpmBaseConfig, base2: &config::TpmBaseConfig) -> bool { + base1.tcti_config == base2.tcti_config && + base1.ak_handle == base2.ak_handle && + base1.ak_nv_index == base2.ak_nv_index && + pcr_selections_equal(&base1.pcr_selections, &base2.pcr_selections) && + quote_schemes_equal(&base1.quote_signature_scheme, &base2.quote_signature_scheme) +} + +/// Compare if two PCR selections are the same +fn pcr_selections_equal(pcr1: &Option, pcr2: &Option) -> bool { + match (pcr1, pcr2) { + (None, None) => true, + (Some(_), None) | (None, Some(_)) => false, + (Some(p1), Some(p2)) => { + p1.hash_alg == p2.hash_alg && + p1.banks == p2.banks + } + } +} + +/// Compare if two quote signature schemes are the same +fn quote_schemes_equal(scheme1: &Option, scheme2: &Option) -> bool { + match (scheme1, scheme2) { + (None, None) => true, + (Some(_), None) | (None, Some(_)) => false, + (Some(s1), Some(s2)) => { + s1.signature_algo == s2.signature_algo && + s1.hash_alg == s2.hash_alg + } + } +} + +/// Find configuration differences between two configurations +pub fn find_config_differences(original: &config::Config, updated: &config::Config) -> Vec { + let mut differences = Vec::new(); + + // Compare plugin configurations + for updated_plugin in &updated.plugins { + let matching_original = original.plugins.iter() + .find(|p| p.name == updated_plugin.name); + + match matching_original { + Some(original_plugin) => { + // Compare plugin path + if original_plugin.path != updated_plugin.path { + differences.push(ConfigDifference { + path: format!("plugins.{}.path", updated_plugin.name), + original_value: format!("{:?}", original_plugin.path), + updated_value: format!("{:?}", updated_plugin.path), + field_type: FieldType::String, + }); + } + + // Compare plugin enabled status + if original_plugin.enabled != updated_plugin.enabled { + differences.push(ConfigDifference { + path: format!("plugins.{}.enabled", updated_plugin.name), + original_value: format!("{}", original_plugin.enabled), + updated_value: format!("{}", updated_plugin.enabled), + field_type: FieldType::Boolean, + }); + } + + // Compare policy ID + if original_plugin.policy_id != updated_plugin.policy_id { + differences.push(ConfigDifference { + path: format!("plugins.{}.policy_id", updated_plugin.name), + original_value: format!("{:?}", original_plugin.policy_id), + updated_value: format!("{:?}", updated_plugin.policy_id), + field_type: FieldType::Banks, + }); + } + + // Compare plugin parameters + compare_plugin_params( + &format!("plugins.{}.params", updated_plugin.name), + &original_plugin.params, + &updated_plugin.params, + &mut differences + ); + }, + None => { + // New plugin + differences.push(ConfigDifference { + path: format!("plugins.{}", updated_plugin.name), + original_value: "null".to_string(), + updated_value: "new plugin".to_string(), + field_type: FieldType::Object, + }); + } + } + } + + // Check for deleted plugins + for original_plugin in &original.plugins { + if !updated.plugins.iter().any(|p| p.name == original_plugin.name) { + differences.push(ConfigDifference { + path: format!("plugins.{}", original_plugin.name), + original_value: format!("{:?}", original_plugin), + updated_value: "null".to_string(), + field_type: FieldType::Object, + }); + } + } + + // Compare scheduler configurations + for updated_scheduler in &updated.schedulers { + let matching_original = original.schedulers.iter() + .find(|s| s.name == updated_scheduler.name); + + match matching_original { + Some(original_scheduler) => { + // Compare retry enabled status + if original_scheduler.retry_enabled != updated_scheduler.retry_enabled { + differences.push(ConfigDifference { + path: format!("schedulers.{}.retry_enabled", updated_scheduler.name), + original_value: format!("{}", original_scheduler.retry_enabled), + updated_value: format!("{}", updated_scheduler.retry_enabled), + field_type: FieldType::Boolean, + }); + } + + // Compare interval time + if original_scheduler.intervals != updated_scheduler.intervals { + differences.push(ConfigDifference { + path: format!("schedulers.{}.intervals", updated_scheduler.name), + original_value: format!("{}", original_scheduler.intervals), + updated_value: format!("{}", updated_scheduler.intervals), + field_type: FieldType::Number, + }); + } + + // Compare enabled status + if original_scheduler.enabled != updated_scheduler.enabled { + differences.push(ConfigDifference { + path: format!("schedulers.{}.enabled", updated_scheduler.name), + original_value: format!("{}", original_scheduler.enabled), + updated_value: format!("{}", updated_scheduler.enabled), + field_type: FieldType::Boolean, + }); + } + + // Compare max retries + if original_scheduler.max_retries != updated_scheduler.max_retries { + differences.push(ConfigDifference { + path: format!("schedulers.{}.max_retries", updated_scheduler.name), + original_value: format!("{:?}", original_scheduler.max_retries), + updated_value: format!("{:?}", updated_scheduler.max_retries), + field_type: FieldType::Number, + }); + } + + // Compare initial delay + compare_initial_delay( + &format!("schedulers.{}.initial_delay", updated_scheduler.name), + &original_scheduler.initial_delay, + &updated_scheduler.initial_delay, + &mut differences + ); + }, + None => { + // New scheduler + differences.push(ConfigDifference { + path: format!("schedulers.{}", updated_scheduler.name), + original_value: "null".to_string(), + updated_value: "new scheduler".to_string(), + field_type: FieldType::Object, + }); + } + } + } + + // Check for deleted schedulers + for original_scheduler in &original.schedulers { + if !updated.schedulers.iter().any(|s| s.name == original_scheduler.name) { + differences.push(ConfigDifference { + path: format!("schedulers.{}", original_scheduler.name), + original_value: format!("{:?}", original_scheduler), + updated_value: "null".to_string(), + field_type: FieldType::Object, + }); + } + } + + differences +} + +/// Compare plugin parameter differences +fn compare_plugin_params( + path_prefix: &str, + params1: &Option, + params2: &Option, + differences: &mut Vec +) { + match (params1, params2) { + (None, None) => {}, + (Some(_), None) => { + differences.push(ConfigDifference { + path: path_prefix.to_string(), + original_value: "has params".to_string(), + updated_value: "null".to_string(), + field_type: FieldType::Object, + }); + }, + (None, Some(_)) => { + differences.push(ConfigDifference { + path: path_prefix.to_string(), + original_value: "null".to_string(), + updated_value: "has params".to_string(), + field_type: FieldType::Object, + }); + }, + (Some(p1), Some(p2)) => { + // Check if parameter types are the same + let same_type = match (p1, p2) { + (config::PluginParams::TpmBoot(_), config::PluginParams::TpmBoot(_)) => true, + (config::PluginParams::TpmIma(_), config::PluginParams::TpmIma(_)) => true, + _ => false, + }; + + if !same_type { + differences.push(ConfigDifference { + path: format!("{}.attester_type", path_prefix), + original_value: format!("{:?}", p1), + updated_value: format!("{:?}", p2), + field_type: FieldType::String, + }); + return; // Types different, no further comparison + } + + // Compare based on type + match (p1, p2) { + (config::PluginParams::TpmBoot(boot1), config::PluginParams::TpmBoot(boot2)) => { + // Compare log file path + if boot1.log_file_path != boot2.log_file_path { + differences.push(ConfigDifference { + path: format!("{}.log_file_path", path_prefix), + original_value: format!("{:?}", boot1.log_file_path), + updated_value: format!("{:?}", boot2.log_file_path), + field_type: FieldType::String, + }); + } + + // Compare TPM base configuration + compare_tpm_base( + path_prefix, + &boot1.tpm_base, + &boot2.tpm_base, + differences + ); + }, + (config::PluginParams::TpmIma(ima1), config::PluginParams::TpmIma(ima2)) => { + // Compare log file path + if ima1.log_file_path != ima2.log_file_path { + differences.push(ConfigDifference { + path: format!("{}.log_file_path", path_prefix), + original_value: format!("{:?}", ima1.log_file_path), + updated_value: format!("{:?}", ima2.log_file_path), + field_type: FieldType::String, + }); + } + + // Compare template name + if ima1.template_name != ima2.template_name { + differences.push(ConfigDifference { + path: format!("{}.template_name", path_prefix), + original_value: format!("{:?}", ima1.template_name), + updated_value: format!("{:?}", ima2.template_name), + field_type: FieldType::String, + }); + } + + // Compare TPM base configuration + compare_tpm_base( + path_prefix, + &ima1.tpm_base, + &ima2.tpm_base, + differences + ); + }, + _ => {} // Should not reach here, already checked type + } + } + } +} + +/// Compare TPM base configuration +fn compare_tpm_base( + path_prefix: &str, + base1: &config::TpmBaseConfig, + base2: &config::TpmBaseConfig, + differences: &mut Vec +) { + // Compare TCTI configuration + if base1.tcti_config != base2.tcti_config { + differences.push(ConfigDifference { + path: format!("{}.tcti_config", path_prefix), + original_value: format!("{:?}", base1.tcti_config), + updated_value: format!("{:?}", base2.tcti_config), + field_type: FieldType::String, + }); + } + + // Compare AK handle + if base1.ak_handle != base2.ak_handle { + differences.push(ConfigDifference { + path: format!("{}.ak_handle", path_prefix), + original_value: format!("{:?}", base1.ak_handle), + updated_value: format!("{:?}", base2.ak_handle), + field_type: FieldType::Hex, + }); + } + + // Compare AK NV index + if base1.ak_nv_index != base2.ak_nv_index { + differences.push(ConfigDifference { + path: format!("{}.ak_nv_index", path_prefix), + original_value: format!("{:?}", base1.ak_nv_index), + updated_value: format!("{:?}", base2.ak_nv_index), + field_type: FieldType::Hex, + }); + } + + // Compare PCR selections + compare_pcr_selections( + path_prefix, + &base1.pcr_selections, + &base2.pcr_selections, + differences + ); + + // Compare quote signature scheme + compare_quote_schemes( + path_prefix, + &base1.quote_signature_scheme, + &base2.quote_signature_scheme, + differences + ); +} + +/// Compare PCR selections +fn compare_pcr_selections( + path_prefix: &str, + pcr1: &Option, + pcr2: &Option, + differences: &mut Vec +) { + match (pcr1, pcr2) { + (None, None) => {}, + (Some(_), None) => { + differences.push(ConfigDifference { + path: format!("{}.pcr_selections", path_prefix), + original_value: "has pcr_selections".to_string(), + updated_value: "null".to_string(), + field_type: FieldType::Object, + }); + }, + (None, Some(_)) => { + differences.push(ConfigDifference { + path: format!("{}.pcr_selections", path_prefix), + original_value: "null".to_string(), + updated_value: "has pcr_selections".to_string(), + field_type: FieldType::Object, + }); + }, + (Some(p1), Some(p2)) => { + // Compare hash algorithm + if p1.hash_alg != p2.hash_alg { + differences.push(ConfigDifference { + path: format!("{}.pcr_selections.hash_alg", path_prefix), + original_value: format!("{:?}", p1.hash_alg), + updated_value: format!("{:?}", p2.hash_alg), + field_type: FieldType::String, + }); + } + + // Compare banks + if p1.banks != p2.banks { + differences.push(ConfigDifference { + path: format!("{}.pcr_selections.banks", path_prefix), + original_value: format!("{:?}", p1.banks), + updated_value: format!("{:?}", p2.banks), + field_type: FieldType::Banks, + }); + } + } + } +} + +/// Compare quote signature scheme +fn compare_quote_schemes( + path_prefix: &str, + scheme1: &Option, + scheme2: &Option, + differences: &mut Vec +) { + match (scheme1, scheme2) { + (None, None) => {}, + (Some(_), None) => { + differences.push(ConfigDifference { + path: format!("{}.quote_signature_scheme", path_prefix), + original_value: "has quote_signature_scheme".to_string(), + updated_value: "null".to_string(), + field_type: FieldType::Object, + }); + }, + (None, Some(_)) => { + differences.push(ConfigDifference { + path: format!("{}.quote_signature_scheme", path_prefix), + original_value: "null".to_string(), + updated_value: "has quote_signature_scheme".to_string(), + field_type: FieldType::Object, + }); + }, + (Some(s1), Some(s2)) => { + // Compare signature algorithm + if s1.signature_algo != s2.signature_algo { + differences.push(ConfigDifference { + path: format!("{}.quote_signature_scheme.signature_algo", path_prefix), + original_value: format!("{:?}", s1.signature_algo), + updated_value: format!("{:?}", s2.signature_algo), + field_type: FieldType::String, + }); + } + + // Compare hash algorithm + if s1.hash_alg != s2.hash_alg { + differences.push(ConfigDifference { + path: format!("{}.quote_signature_scheme.hash_alg", path_prefix), + original_value: format!("{:?}", s1.hash_alg), + updated_value: format!("{:?}", s2.hash_alg), + field_type: FieldType::String, + }); + } + } + } +} + +/// Compare initial delay configuration +fn compare_initial_delay( + path_prefix: &str, + delay1: &Option, + delay2: &Option, + differences: &mut Vec +) { + match (delay1, delay2) { + (None, None) => {}, + (Some(_), None) => { + differences.push(ConfigDifference { + path: path_prefix.to_string(), + original_value: "has initial_delay".to_string(), + updated_value: "null".to_string(), + field_type: FieldType::Object, + }); + }, + (None, Some(_)) => { + differences.push(ConfigDifference { + path: path_prefix.to_string(), + original_value: "null".to_string(), + updated_value: "has initial_delay".to_string(), + field_type: FieldType::Object, + }); + }, + (Some(d1), Some(d2)) => { + // Compare minimum seconds + if d1.min_seconds != d2.min_seconds { + differences.push(ConfigDifference { + path: format!("{}.min_seconds", path_prefix), + original_value: format!("{}", d1.min_seconds), + updated_value: format!("{}", d2.min_seconds), + field_type: FieldType::Number, + }); + } + + // Compare maximum seconds + if d1.max_seconds != d2.max_seconds { + differences.push(ConfigDifference { + path: format!("{}.max_seconds", path_prefix), + original_value: format!("{}", d1.max_seconds), + updated_value: format!("{}", d2.max_seconds), + field_type: FieldType::Number, + }); + } + } + } +} \ No newline at end of file diff --git a/attestation_agent/config_sync/src/config_fetch.rs b/attestation_agent/config_sync/src/config_fetch.rs new file mode 100644 index 0000000000000000000000000000000000000000..cccead5856239700b1fc57a45d98e57fe9ac5f6f --- /dev/null +++ b/attestation_agent/config_sync/src/config_fetch.rs @@ -0,0 +1,147 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use serde::Deserialize; +use serde_json; +use reqwest::Method; +use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64}; +use agent_utils::Client; +use serde_yaml; +use crate::config_sync_error::ConfigSyncError; + +#[derive(Debug, Deserialize)] +pub struct AgentConfigResponse { + pub format: Option, + pub content: Option, +} + +/// Fetch configuration information from server +pub async fn fetch_config_from_server() -> Result { + let client = Client::instance(); + let response = client + .request(Method::GET, "/global-trust-authority/service/v1/configuration/query", None) + .await + .map_err(|e| { + log::error!("Failed to get config from server: {}", e); + ConfigSyncError::NetworkError(format!("Failed to get config: {}", e)) + })?; + + if response.status() != reqwest::StatusCode::OK { + log::error!("Failed to get config from server: {}", response.status()); + return Err(ConfigSyncError::ResponseParseError( + format!("Failed to get config from server: {}", response.status()).to_string() + )); + } + + // Check if response has no content + if response.content_length() == Some(0) { + log::info!("Received empty response from server!"); + return Ok(AgentConfigResponse { + format: None, + content: None, + }); + } + + let json_response = response + .json::() + .await + .map_err(|e| { + log::error!("Failed to parse config response: {}", e); + ConfigSyncError::RequestParseError(format!("Failed to parse config response: {}", e)) + })?; + + // Extract necessary fields + let config_response: AgentConfigResponse = match json_response.get("agent_config") { + Some(config) => serde_json::from_value(config.clone()) + .map_err(|e| { + log::error!("Failed to extract config fields: {}", e); + ConfigSyncError::ResponseParseError(format!("Failed to extract config fields: {}", e)) + })?, + None => { + log::error!("Failed to get config from server: config is null."); + return Err(ConfigSyncError::ResponseParseError( + "Failed to get config from server: config is null.".to_string() + )); + } + }; + + Ok(config_response) +} + +/// Decode and convert configuration content +pub fn decode_and_convert_content(config_response: &AgentConfigResponse) -> Result { + // Check if content is available + let content = match &config_response.content { + Some(content) => content, + None => { + log::error!("Configuration content is missing"); + return Err(ConfigSyncError::ResponseParseError( + "Configuration content is missing".to_string() + )); + } + }; + + // Check if format is available + let format = match &config_response.format { + Some(format) => format, + None => { + log::warn!("Format is missing, assuming plain text"); + "PLAIN" + } + }; + + // Decode base64 encoded content field + let decoded_content = BASE64.decode(content) + .map_err(|e| { + log::error!("Failed to decode base64 content: {}", e); + ConfigSyncError::ResponseParseError(format!("Failed to decode base64 content: {}", e)) + })?; + + let decoded_string = String::from_utf8(decoded_content) + .map_err(|e| { + log::error!("Failed to convert decoded content to UTF-8: {}", e); + ConfigSyncError::ResponseParseError(format!("Failed to convert decoded content to UTF-8: {}", e)) + })?; + + // Process content according to format field + let format_upper = format.to_uppercase(); + let yaml_content = match format_upper.as_str() { + "JSON" => convert_json_to_yaml(&decoded_string)?, + "YAML" => { + // Already in YAML format, no conversion needed + log::info!("Content is already in YAML format"); + decoded_string + }, + _ => { + log::warn!("Unknown format '{}', treating as plain text", format); + decoded_string + } + }; + + Ok(yaml_content) +} + +/// Convert JSON to YAML +fn convert_json_to_yaml(json_string: &str) -> Result { + log::info!("Converting JSON to YAML"); + let json_value: serde_json::Value = serde_json::from_str(json_string) + .map_err(|e| { + log::error!("Failed to parse JSON content: {}", e); + ConfigSyncError::ResponseParseError(format!("Failed to parse JSON content: {}", e)) + })?; + + serde_yaml::to_string(&json_value) + .map_err(|e| { + log::error!("Failed to convert JSON to YAML: {}", e); + ConfigSyncError::ResponseParseError(format!("Failed to convert JSON to YAML: {}", e)) + }) +} \ No newline at end of file diff --git a/attestation_agent/config_sync/src/config_schema.rs b/attestation_agent/config_sync/src/config_schema.rs new file mode 100644 index 0000000000000000000000000000000000000000..6c0932bcf1de7d1ae588937b95b37584d8479dfa --- /dev/null +++ b/attestation_agent/config_sync/src/config_schema.rs @@ -0,0 +1,318 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use crate::config_sync_error::ConfigSyncError; + +#[derive(Debug, Serialize, Deserialize)] +pub struct PluginConfig { + pub name: String, + pub path: Option, + pub policy_id: Option>, + pub enabled: Option, + pub params: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SchedulerConfig { + pub name: String, + pub retry_enabled: Option, + pub intervals: Option, + pub initial_delay: Option, + pub max_retries: Option, + pub enabled: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct InitialDelay { + pub min_seconds: u64, + pub max_seconds: u64, +} + +// Create a configuration structure that only contains server-provided fields, for parsing and validation +#[derive(Debug, Serialize, Deserialize)] +pub struct ServerProvidedConfig { + #[serde(skip_serializing_if = "Option::is_none")] + pub plugins: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub schedulers: Option>, +} + +// Validate configuration +pub fn validate_config(config: &ServerProvidedConfig) -> Result<(), ConfigSyncError> { + // Check plugins + if let Some(plugins) = &config.plugins { + for plugin in plugins { + validate_plugin(plugin)?; + } + } + + // Check schedulers + if let Some(schedulers) = &config.schedulers { + for scheduler in schedulers { + validate_scheduler(scheduler)?; + } + } + + Ok(()) +} + +fn validate_plugin(plugin: &PluginConfig) -> Result<(), ConfigSyncError> { + // Validate name + let valid_names = ["tpm_boot", "tpm_ima"]; + if !valid_names.contains(&plugin.name.as_str()) { + return Err(ConfigSyncError::ConfigValidationError(format!("Plugin name must be 'tpm_boot' or 'tpm_ima', got '{}'", plugin.name))); + } + + // Validate path + if let Some(path) = &plugin.path { + if !path.starts_with('/') { + return Err(ConfigSyncError::ConfigValidationError(format!("Plugin path must be an absolute path, got '{}'", path))); + } + } + + // Validate policy_id - it's optional now + // Only validate if it's not empty + if let Some(policy_id) = &plugin.policy_id { + if !policy_id.is_empty() { + for (index, policy) in policy_id.iter().enumerate() { + if policy.is_empty() { + return Err(ConfigSyncError::ConfigValidationError( + format!("Policy ID at index {} cannot be empty", index) + )); + } + } + } + } + + // Validate params fields - all are optional now + let params = &plugin.params; + + // If params exist, validate each field + if let Some(params_map) = params { + // Validate attester_type - optional + if let Some(attester) = params_map.get("attester_type") { + if let Some(attester_str) = attester.as_str() { + if attester_str != "tpm_boot" && attester_str != "tpm_ima" { + return Err(ConfigSyncError::ConfigValidationError( + format!("Plugin attester_type must be 'tpm_boot' or 'tpm_ima', got '{}'", attester_str) + )); + } + } else { + return Err(ConfigSyncError::ConfigValidationError("Plugin attester_type must be a string".to_string())); + } + } + + // Validate tcti_config - optional + if let Some(tcti) = params_map.get("tcti_config") { + if let Some(tcti_str) = tcti.as_str() { + let valid_tcti_configs = ["device", "mssim", "swtpm", "tabrmd", "libtpm"]; + if !valid_tcti_configs.contains(&tcti_str) { + return Err(ConfigSyncError::ConfigValidationError( + format!("Plugin tcti_config must be one of {:?}, got '{}'", valid_tcti_configs, tcti_str) + )); + } + } else { + return Err(ConfigSyncError::ConfigValidationError("Plugin tcti_config must be a string".to_string())); + } + } + + // Validate ak_handle - optional + if let Some(handle) = params_map.get("ak_handle") { + let handle_value = handle.as_u64().unwrap_or_default() as u32; + // Validate the numeric range + if handle_value < 0x81000000 || handle_value > 0x81FFFFFF { + return Err(ConfigSyncError::ConfigValidationError( + format!("Plugin ak_handle must be in the range of 0x81000000-0x81FFFFFF, got '0x{:X}'", handle_value) + )); + } + } + + // Validate ak_nv_index - optional + if let Some(index) = params_map.get("ak_nv_index") { + let index_value = index.as_u64().unwrap_or_default() as u32; + // Validate the numeric range + if index_value < 0x01000000 || index_value > 0x01D1FFFF { + return Err(ConfigSyncError::ConfigValidationError( + format!("Plugin ak_nv_index must be in the range of 0x01000000-0x01D1FFFF, got '0x{:X}'", index_value) + )); + } + } + + // Validate log_file_path - optional + if let Some(log_path) = params_map.get("log_file_path") { + if let Some(path_str) = log_path.as_str() { + if !path_str.starts_with('/') { + return Err(ConfigSyncError::ConfigValidationError(format!("Plugin log_file_path must be an absolute path, got '{}'", path_str))); + } + } else { + return Err(ConfigSyncError::ConfigValidationError("Plugin log_file_path must be a string".to_string())); + } + } + } + + // Validate pcr_selections and quote_signature_scheme + validate_pcr_selections(params)?; + validate_quote_signature_scheme(params)?; + + Ok(()) +} + +fn validate_scheduler(scheduler: &SchedulerConfig) -> Result<(), ConfigSyncError> { + // Validate name + if scheduler.name.is_empty() { + return Err(ConfigSyncError::ConfigValidationError("Scheduler name cannot be empty".to_string())); + } + + // Validate intervals + if let Some(intervals) = &scheduler.intervals { + if *intervals == 0 { + return Err(ConfigSyncError::ConfigValidationError("Scheduler intervals must be greater than 0".to_string())); + } + } + + // Validate initial_delay + if let Some(initial_delay) = &scheduler.initial_delay { + if initial_delay.min_seconds > initial_delay.max_seconds { + return Err(ConfigSyncError::ConfigValidationError( + format!("Scheduler initial_delay min_seconds ({}) must be less than or equal to max_seconds ({})", + initial_delay.min_seconds, initial_delay.max_seconds) + )); + } + } + // Validate max_retries + if let Some(max_retries) = &scheduler.max_retries { + if *max_retries == 0 { + return Err(ConfigSyncError::ConfigValidationError("Scheduler max_retries must be greater than 0".to_string())); + } + } + + Ok(()) +} + +// Helper function to validate pcr_selections +fn validate_pcr_selections(params: &Option>) -> Result<(), ConfigSyncError> { + // If params is None, return Ok directly + let params = match params { + Some(params) => params, + None => return Ok(()) + }; + + // pcr_selections is optional + let pcr_selections = match params.get("pcr_selections") { + Some(value) => value, + None => return Ok(()) // If not present, skip validation + }; + + // Check if pcr_selections is a mapping + let pcr_mapping = match pcr_selections.as_mapping() { + Some(mapping) => mapping, + None => return Err(ConfigSyncError::ConfigValidationError("pcr_selections must be an object".to_string())) + }; + + // banks field is optional + if let Some(banks_value) = pcr_mapping.get(&serde_yaml::Value::String("banks".to_string())) { + // Check if banks is a sequence + let banks_sequence = match banks_value.as_sequence() { + Some(seq) => seq, + None => return Err(ConfigSyncError::ConfigValidationError("pcr_selections.banks must be an array of numbers".to_string())) + }; + + // Validate each bank value + for (index, bank) in banks_sequence.iter().enumerate() { + let bank_num = match bank.as_u64() { + Some(num) => num, + None => return Err(ConfigSyncError::ConfigValidationError(format!("Bank at index {} must be a number", index))) + }; + + if bank_num > 23 { + return Err(ConfigSyncError::ConfigValidationError( + format!("Bank at index {} must be in the range of 0-23, got {}", index, bank_num) + )); + } + } + } + + // hash_alg field is optional + if let Some(hash_alg) = pcr_mapping.get(&serde_yaml::Value::String("hash_alg".to_string())) { + // Check if hash_alg is a string and valid + let hash_alg_str = match hash_alg.as_str() { + Some(str_value) => str_value, + None => return Err(ConfigSyncError::ConfigValidationError("pcr_selections.hash_alg must be a string".to_string())) + }; + + let valid_hash_algs = ["sha1", "sha256", "sha384", "sha512", "sm3"]; + if !valid_hash_algs.contains(&hash_alg_str) { + return Err(ConfigSyncError::ConfigValidationError( + format!("pcr_selections.hash_alg must be one of {:?}, got '{}'", valid_hash_algs, hash_alg_str) + )); + } + } + + Ok(()) +} + +// Helper function to validate quote_signature_scheme +fn validate_quote_signature_scheme(params: &Option>) -> Result<(), ConfigSyncError> { + // If params is None, return Ok directly + let params = match params { + Some(params) => params, + None => return Ok(()) + }; + + // quote_signature_scheme is optional + let quote_scheme = match params.get("quote_signature_scheme") { + Some(value) => value, + None => return Ok(()) // If not present, skip validation + }; + + // Check if quote_signature_scheme is a mapping + let scheme_mapping = match quote_scheme.as_mapping() { + Some(mapping) => mapping, + None => return Err(ConfigSyncError::ConfigValidationError("quote_signature_scheme must be an object".to_string())) + }; + + // signature_algo field is optional + if let Some(sig_algo) = scheme_mapping.get(&serde_yaml::Value::String("signature_algo".to_string())) { + // Check if signature_algo is a string and valid + let sig_algo_str = match sig_algo.as_str() { + Some(str_value) => str_value, + None => return Err(ConfigSyncError::ConfigValidationError("quote_signature_scheme.signature_algo must be a string".to_string())) + }; + + let valid_signature_algos = ["rsapss", "rsassa", "ecdsa"]; + if !valid_signature_algos.contains(&sig_algo_str) { + return Err(ConfigSyncError::ConfigValidationError( + format!("quote_signature_scheme.signature_algo must be one of {:?}, got '{}'", valid_signature_algos, sig_algo_str) + )); + } + } + + // hash_alg field is optional + if let Some(hash_alg) = scheme_mapping.get(&serde_yaml::Value::String("hash_alg".to_string())) { + // Check if hash_alg is a string and valid + let hash_alg_str = match hash_alg.as_str() { + Some(str_value) => str_value, + None => return Err(ConfigSyncError::ConfigValidationError("quote_signature_scheme.hash_alg must be a string".to_string())) + }; + + let valid_hash_algs = ["sha1", "sha256", "sha384", "sha512", "sm3"]; + if !valid_hash_algs.contains(&hash_alg_str) { + return Err(ConfigSyncError::ConfigValidationError( + format!("quote_signature_scheme.hash_alg must be one of {:?}, got '{}'", valid_hash_algs, hash_alg_str) + )); + } + } + + Ok(()) +} diff --git a/attestation_agent/config_sync/src/config_sync.rs b/attestation_agent/config_sync/src/config_sync.rs new file mode 100644 index 0000000000000000000000000000000000000000..09ed52347ab030c496d3fd4b4719469d99411598 --- /dev/null +++ b/attestation_agent/config_sync/src/config_sync.rs @@ -0,0 +1,111 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use std::process; +use log; +use tokio; +use serde_yaml; + +use config::config; + +use crate::config_sync_error::ConfigSyncError; +use crate::config_schema::validate_config; +use crate::config_fetch::{fetch_config_from_server, decode_and_convert_content}; +use crate::config_update::update_config_from_server; +use crate::config_comparison::{configs_are_equal, find_config_differences}; +use crate::file_utils::save_config_with_differences; + +/// Main function to execute configuration synchronization +pub async fn do_sync_config(config: Option, config_path: &str) -> Result<(), ConfigSyncError> { + // Check if local configuration is available + let original_config = match config { + Some(cfg) => cfg, + None => { + log::error!("Local configuration is not available"); + return Err(ConfigSyncError::ConfigNotAvailable( + "Local configuration is not available".to_string() + )); + } + }; + + // Fetch configuration from server + let config_response = fetch_config_from_server().await?; + + // Check if both content and format are missing + if config_response.content.is_none() && config_response.format.is_none() { + log::info!("Received empty configuration from server, will not update local configuration"); + return Ok(()); + } + + // Parse configuration content + let yaml_content = decode_and_convert_content(&config_response)?; + + // Parse YAML content as ServerProvidedConfig + let server_config = serde_yaml::from_str(&yaml_content) + .map_err(|e| { + log::error!("Failed to parse YAML content as config: {}", e); + ConfigSyncError::ResponseParseError(format!("Failed to parse YAML content as config: {}", e)) + })?; + // Validate plugin configuration + validate_config(&server_config)?; + + // Clone original configuration + let mut updated_config = original_config.clone(); + + // Update local configuration with server-provided configuration + update_config_from_server(&mut updated_config, server_config)?; + + // Compare original configuration and updated configuration + if configs_are_equal(&original_config, &updated_config) { + log::info!("New configuration is identical to current configuration, no update needed"); + return Ok(()); + } + + // Find specific configuration differences + let differences = find_config_differences(&original_config, &updated_config); + log::info!("Found {} configuration differences", differences.len()); + for diff in &differences { + log::info!("Config difference - path: {}, type: {:?}, original: {}, updated: {}", + diff.path, diff.field_type, diff.original_value, diff.updated_value); + } + + // Configuration has changed, save and exit program + log::info!("Configuration synchronization complete, program will exit to apply new configuration"); + + // Save the updated configuration + save_config_with_differences(config_path, &differences)?; + + log::info!("Configuration synchronization complete, sending SIGTERM to self to restart with new configuration"); + + // Send signal using standard library in a way compatible with tokio + // tokio will capture this signal and perform graceful shutdown + let pid = process::id(); + match unsafe { libc::kill(pid as i32, libc::SIGTERM) } { + 0 => { + log::info!("SIGTERM signal sent successfully"); + // Don't return immediately, give some time for the signal handler to work + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + }, + _ => { + log::error!("Failed to send SIGTERM, falling back to tokio::task::spawn_blocking"); + // Use tokio's blocking task to execute exit + tokio::task::spawn_blocking(|| { + log::info!("Exiting through tokio::task::spawn_blocking"); + process::exit(0); + }); + // Give some time for the spawn_blocking task to execute + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + } + } + + Ok(()) +} diff --git a/attestation_agent/config_sync/src/config_sync_error.rs b/attestation_agent/config_sync/src/config_sync_error.rs new file mode 100644 index 0000000000000000000000000000000000000000..0bd1cc100d239e1a0a5a57912f815020d1671627 --- /dev/null +++ b/attestation_agent/config_sync/src/config_sync_error.rs @@ -0,0 +1,40 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use std::fmt; + +#[derive(Debug)] +pub enum ConfigSyncError { + NetworkError(String), + RequestParseError(String), + ResponseParseError(String), + ConfigValidationError(String), + FileOperationError(String), + ConfigNotAvailable(String), + ServerError(String), +} + +impl fmt::Display for ConfigSyncError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ConfigSyncError::NetworkError(e) => write!(f, "Network error: {}", e), + ConfigSyncError::RequestParseError(e) => write!(f, "Request parse error: {}", e), + ConfigSyncError::ResponseParseError(e) => write!(f, "Response parse error: {}", e), + ConfigSyncError::ConfigValidationError(e) => write!(f, "Configuration validation error: {}", e), + ConfigSyncError::FileOperationError(e) => write!(f, "File operation error: {}", e), + ConfigSyncError::ConfigNotAvailable(e) => write!(f, "Configuration not available: {}", e), + ConfigSyncError::ServerError(e) => write!(f, "Server error: {}", e), + } + } +} + +impl std::error::Error for ConfigSyncError {} \ No newline at end of file diff --git a/attestation_agent/config_sync/src/config_update.rs b/attestation_agent/config_sync/src/config_update.rs new file mode 100644 index 0000000000000000000000000000000000000000..551f56b5cca870f2d1b5e0451479675952d968e5 --- /dev/null +++ b/attestation_agent/config_sync/src/config_update.rs @@ -0,0 +1,361 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use std::collections::HashMap; +use config::config; +use crate::config_sync_error::ConfigSyncError; +use crate::config_schema::ServerProvidedConfig; + +/// Update local configuration with server-provided configuration +pub fn update_config_from_server(config: &mut config::Config, server_config: ServerProvidedConfig) -> Result<(), ConfigSyncError> { + log::info!("Starting configuration update..."); + + // Update plugin configurations + if let Some(server_plugins) = server_config.plugins { + update_plugins(config, server_plugins)?; + } + + // Update scheduler configurations + if let Some(server_schedulers) = server_config.schedulers { + update_schedulers(config, server_schedulers)?; + } + + log::info!("Configuration update completed"); + Ok(()) +} + +/// Update plugin configurations +fn update_plugins(config: &mut config::Config, server_plugins: Vec) -> Result<(), ConfigSyncError> { + // Create a mapping of existing plugin names to indices + let mut existing_plugin_indices = HashMap::new(); + for (index, plugin) in config.plugins.iter().enumerate() { + existing_plugin_indices.insert(plugin.name.clone(), index); + } + + // Process each plugin configuration provided by the server + for server_plugin in server_plugins { + let plugin_name = server_plugin.name.clone(); + log::debug!("Processing plugin: {}", plugin_name); + + // Check if the plugin already exists + if let Some(&index) = existing_plugin_indices.get(&plugin_name) { + // Update existing plugin + let existing_plugin = &mut config.plugins[index]; + + // Update path if provided + if let Some(path) = server_plugin.path { + existing_plugin.path = path; + } + + // Update policy_id if provided + if let Some(policy_id) = server_plugin.policy_id { + existing_plugin.policy_id = policy_id; + } + + // Update enabled if provided + if let Some(enabled) = server_plugin.enabled { + existing_plugin.enabled = enabled; + } + + // Update parameters if provided + if let Some(params) = server_plugin.params { + if !params.is_empty() { + // Get the existing params to preserve current configuration where not updated + let converted_params = convert_params_to_plugin_params(&plugin_name, ¶ms, &existing_plugin.params)?; + existing_plugin.params = Some(converted_params); + } + } + + log::debug!("Updated plugin: {}", plugin_name); + } + } + + log::info!("Plugin configuration update completed"); + Ok(()) +} + +/// Update scheduler configurations +fn update_schedulers(config: &mut config::Config, server_schedulers: Vec) -> Result<(), ConfigSyncError> { + // Create a mapping of existing scheduler names to indices + let mut existing_scheduler_indices = HashMap::new(); + for (index, scheduler) in config.schedulers.iter().enumerate() { + existing_scheduler_indices.insert(scheduler.name.clone(), index); + } + + // Process each scheduler configuration provided by the server + for server_scheduler in server_schedulers { + let scheduler_name = server_scheduler.name.clone(); + log::debug!("Processing scheduler: {}", scheduler_name); + + // Check if the scheduler already exists + if let Some(&index) = existing_scheduler_indices.get(&scheduler_name) { + // Update existing scheduler + let existing_scheduler = &mut config.schedulers[index]; + + // Update retry_enabled if provided + if let Some(retry_enabled) = server_scheduler.retry_enabled { + existing_scheduler.retry_enabled = retry_enabled; + } + + // Update intervals if provided + if let Some(intervals) = server_scheduler.intervals { + if intervals > 0 { + existing_scheduler.intervals = intervals; + } + } + + // Update initial_delay if provided + if let Some(delay) = server_scheduler.initial_delay { + existing_scheduler.initial_delay = Some(config::InitialDelayConfig { + min_seconds: delay.min_seconds, + max_seconds: delay.max_seconds, + }); + } + + // Update max_retries if provided + if let Some(retries) = server_scheduler.max_retries { + existing_scheduler.max_retries = Some(retries as usize); + } + + // Update enabled if provided + if let Some(enabled) = server_scheduler.enabled { + existing_scheduler.enabled = enabled; + } + + log::debug!("Updated scheduler: {}", scheduler_name); + } + } + + log::info!("Scheduler configuration update completed"); + Ok(()) +} + +/// Convert plugin parameters to PluginParams +fn convert_params_to_plugin_params(plugin_name: &str, params: &HashMap, existing_params: &Option) -> Result { + // Get attester_type, if exists + let attester_type = params.get("attester_type") + .and_then(|v| v.as_str()) + .unwrap_or(""); + + // Determine plugin type based on plugin name or attester_type + match (plugin_name, attester_type) { + // TPM Boot plugin + ("tpm_boot", _) | (_, "tpm_boot") => { + // Extract current TPM base configuration if it exists + let current_tpm_base = match existing_params { + Some(config::PluginParams::TpmBoot(config)) => Some(&config.tpm_base), + _ => None, + }; + + // Get TPM base configuration + let tpm_base = extract_tpm_base(params, current_tpm_base)?; + + // Extract log file path + let log_file_path = params.get("log_file_path") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .or_else(|| match existing_params { + Some(config::PluginParams::TpmBoot(config)) => Some(config.log_file_path.clone()), + _ => None, + }) + .unwrap_or_else(|| "/sys/kernel/security/tpm0/binary_bios_measurements".to_string()); + + Ok(config::PluginParams::TpmBoot(config::TpmConfig { + tpm_base, + log_file_path, + })) + }, + + // TPM IMA plugin + ("tpm_ima", _) | (_, "tpm_ima") => { + // Extract current TPM base configuration if it exists + let current_tpm_base = match existing_params { + Some(config::PluginParams::TpmIma(config)) => Some(&config.tpm_base), + _ => None, + }; + + // Get TPM base configuration + let tpm_base = extract_tpm_base(params, current_tpm_base)?; + + // Extract log file path + let log_file_path = params.get("log_file_path") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .or_else(|| match existing_params { + Some(config::PluginParams::TpmIma(config)) => Some(config.log_file_path.clone()), + _ => None, + }) + .unwrap_or_else(|| "/sys/kernel/security/ima/ascii_runtime_measurements".to_string()); + + // Extract template name + let template_name = params.get("template_name") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .or_else(|| match existing_params { + Some(config::PluginParams::TpmIma(config)) => config.template_name.clone(), + _ => None, + }); + + Ok(config::PluginParams::TpmIma(config::ImaConfig { + tpm_base, + log_file_path, + template_name, + })) + }, + + // TPM DIM plugin (not fully supported yet) + ("tpm_dim", _) | (_, "tpm_dim") => { + log::warn!("TpmDim plugin type not fully supported yet"); + + Err(ConfigSyncError::ResponseParseError( + "TpmDim plugin type not fully supported yet".to_string() + )) + }, + + // Unknown plugin type + _ => Err(ConfigSyncError::ResponseParseError( + format!("Unknown plugin type for plugin: {}", plugin_name) + )) + } +} + +/// Extract TPM base configuration from parameters +fn extract_tpm_base(params: &HashMap, current_tpm_base: Option<&config::TpmBaseConfig>) -> Result { + // Extract TCTI configuration + let tcti_config = params.get("tcti_config") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .or_else(|| current_tpm_base.map(|b| b.tcti_config.clone())) + .unwrap_or_else(|| "device".to_string()); + + // Extract AK handle + let ak_handle = params.get("ak_handle") + .and_then(|v| v.as_u64()) + .map(|v| v as u32) + .or_else(|| current_tpm_base.and_then(|b| b.ak_handle)); + + // Extract AK NV index + let ak_nv_index = params.get("ak_nv_index") + .and_then(|v| v.as_u64()) + .map(|v| v as u32) + .or_else(|| current_tpm_base.and_then(|b| b.ak_nv_index)); + + // For pcr_selections, check if params contains it, if yes extract it, otherwise use current value + let pcr_selections = if params.contains_key("pcr_selections") { + // Server config has pcr_selections, extract or merge with current + let server_pcr = extract_pcr_selections(params, current_tpm_base.and_then(|b| b.pcr_selections.as_ref())); + server_pcr + } else { + // Server config doesn't have pcr_selections, keep current value + current_tpm_base.and_then(|b| b.pcr_selections.clone()) + }; + + // For quote_signature_scheme, similar approach + let quote_signature_scheme = if params.contains_key("quote_signature_scheme") { + // Server config has quote_signature_scheme, extract or merge + let server_scheme = extract_quote_signature_scheme(params, current_tpm_base.and_then(|b| b.quote_signature_scheme.as_ref())); + server_scheme + } else { + // Server config doesn't have quote_signature_scheme, keep current value + current_tpm_base.and_then(|b| b.quote_signature_scheme.clone()) + }; + + Ok(config::TpmBaseConfig { + tcti_config, + ak_handle, + ak_nv_index, + pcr_selections, + quote_signature_scheme, + }) +} + +/// Extract PCR selections configuration from parameters +fn extract_pcr_selections(params: &HashMap, current_pcr_selections: Option<&config::PcrSelection>) -> Option { + let pcr_value = params.get("pcr_selections")?; + let pcr_mapping = pcr_value.as_mapping()?; + + // Start with current values if available, otherwise use defaults + let mut pcr_selection = if let Some(current) = current_pcr_selections { + // Clone the current configuration + config::PcrSelection { + banks: current.banks.clone(), + hash_alg: current.hash_alg.clone(), + } + } else { + // Create with defaults + config::PcrSelection { + banks: Vec::new(), + hash_alg: "sha256".to_string(), + } + }; + + // Only update banks if it exists in the mapping + let banks_key = &serde_yaml::Value::String("banks".to_string()); + if pcr_mapping.contains_key(banks_key) { + if let Some(banks_seq) = pcr_mapping.get(banks_key).and_then(|v| v.as_sequence()) { + pcr_selection.banks = banks_seq.iter() + .filter_map(|v| v.as_u64().map(|n| n as u32)) + .collect(); + } + } + + // Only update hash_alg if it exists in the mapping + let hash_alg_key = &serde_yaml::Value::String("hash_alg".to_string()); + if pcr_mapping.contains_key(hash_alg_key) { + if let Some(hash_alg_str) = pcr_mapping.get(hash_alg_key).and_then(|v| v.as_str()) { + pcr_selection.hash_alg = hash_alg_str.to_string(); + } + } + + // Return the updated configuration + Some(pcr_selection) +} + +/// Extract quote signature scheme from parameters +fn extract_quote_signature_scheme(params: &HashMap, current_quote_signature_scheme: Option<&config::QuoteSignatureScheme>) -> Option { + let scheme_value = params.get("quote_signature_scheme")?; + let scheme_mapping = scheme_value.as_mapping()?; + + // Start with current values if available, otherwise use defaults + let mut quote_scheme = if let Some(current) = current_quote_signature_scheme { + // Clone the current configuration + config::QuoteSignatureScheme { + signature_algo: current.signature_algo.clone(), + hash_alg: current.hash_alg.clone(), + } + } else { + // Create with defaults + config::QuoteSignatureScheme { + signature_algo: "rsassa".to_string(), + hash_alg: "sha256".to_string(), + } + }; + + // Only update signature_algo if it exists in the mapping + let signature_algo_key = &serde_yaml::Value::String("signature_algo".to_string()); + if scheme_mapping.contains_key(signature_algo_key) { + if let Some(sig_algo_str) = scheme_mapping.get(signature_algo_key).and_then(|v| v.as_str()) { + quote_scheme.signature_algo = sig_algo_str.to_string(); + } + } + + // Only update hash_alg if it exists in the mapping + let hash_alg_key = &serde_yaml::Value::String("hash_alg".to_string()); + if scheme_mapping.contains_key(hash_alg_key) { + if let Some(hash_alg_str) = scheme_mapping.get(hash_alg_key).and_then(|v| v.as_str()) { + quote_scheme.hash_alg = hash_alg_str.to_string(); + } + } + + // Return the updated configuration + Some(quote_scheme) +} \ No newline at end of file diff --git a/attestation_agent/config_sync/src/file_utils.rs b/attestation_agent/config_sync/src/file_utils.rs new file mode 100644 index 0000000000000000000000000000000000000000..6a96749024faf916451d4bf98de834a800a541ac --- /dev/null +++ b/attestation_agent/config_sync/src/file_utils.rs @@ -0,0 +1,347 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +use crate::config_sync_error::ConfigSyncError; +use crate::config_comparison::ConfigDifference; + +/// Save configuration to file based on differences +pub fn save_config_with_differences( + config_path: &str, + differences: &Vec +) -> Result<(), ConfigSyncError> { + // Read configuration file + let file_content = std::fs::read_to_string(config_path) + .map_err(|e| ConfigSyncError::FileOperationError(format!("Failed to read config file: {}", e)))?; + + // Locate the configuration in file_content based on the path in differences, and write the updated_value to that location according to FieldType + let mut modified_content = file_content.clone(); + + // Process each difference + for difference in differences { + apply_single_difference(&mut modified_content, difference)?; + } + + // Write the modified content back to the file + std::fs::write(config_path, modified_content) + .map_err(|e| ConfigSyncError::FileOperationError(format!("Failed to write configuration file: {}", e)))?; + + log::info!("Successfully saved updated configuration to file: {}", config_path); + Ok(()) +} + +/// Apply a single configuration difference to the content +pub fn apply_single_difference( + modified_content: &mut String, + difference: &ConfigDifference +) -> Result<(), ConfigSyncError> { + let path_parts: Vec<&str> = difference.path.split('.').collect(); + if path_parts.is_empty() { + log::warn!("Invalid configuration path: {}", difference.path); + return Ok(()); + } + + // Find configuration item position + let current_pos = find_config_item_position(modified_content, &path_parts)?; + if current_pos == 0 { + return Ok(()); // Configuration item position not found + } + + // Find the position of the last configuration item + let last_part = path_parts.last().unwrap(); + let search_pattern = format!("{}: ", last_part); + + if let Some(pos) = modified_content[current_pos..].find(&search_pattern) { + let item_pos = current_pos + pos + search_pattern.len(); + process_config_value(modified_content, item_pos, difference); + log::info!("Updated configuration item {}: {} -> {}", difference.path, difference.original_value, difference.updated_value); + } else { + log::error!("Could not find position for {}.{} in the configuration file", path_parts[..path_parts.len()-1].join("."), last_part); + } + + Ok(()) +} + +/// Find the position of a configuration item in the file +fn find_config_item_position(content: &str, path_parts: &[&str]) -> Result { + let is_plugin_path = path_parts.len() >= 2 && path_parts[0] == "plugins"; + let is_scheduler_path = path_parts.len() >= 2 && path_parts[0] == "schedulers"; + + let mut current_pos = 0; + + // Special handling for paths in plugins.{name}.xxx or schedulers.{name}.xxx format + if is_plugin_path || is_scheduler_path { + current_pos = find_named_section_position(content, path_parts)?; + } else { + // Regular path handling has been removed, but branch is kept for future needs + log::warn!("Only plugins and schedulers path formats are supported"); + } + + Ok(current_pos) +} + +/// Find the position of a named section (like plugins.{name}) +fn find_named_section_position(content: &str, path_parts: &[&str]) -> Result { + // First find the plugins: or schedulers: section + let section_pattern = format!("{}:", path_parts[0]); + if let Some(pos) = content.find(§ion_pattern) { + let mut current_pos = pos + section_pattern.len(); + + // Move to the next line + if let Some(newline_pos) = content[current_pos..].find('\n') { + current_pos += newline_pos + 1; + } + + // Find the item with the specific name + let plugin_name = path_parts[1]; + let name_pattern1 = format!("name: \"{}\"", plugin_name); + let name_pattern2 = format!("name: {}", plugin_name); + + let mut item_found = false; + + // Find the item start position + while current_pos < content.len() { + // Find the next line starting with "- " (list item) + if let Some(dash_pos) = content[current_pos..].find("- ") { + let item_start = current_pos + dash_pos; + + // Calculate the item end position (next "- " or end of file) + let item_end = if let Some(next_dash_pos) = content[item_start + 2..].find("- ") { + item_start + 2 + next_dash_pos + } else { + content.len() + }; + + // Check if this item contains the name we're looking for + let item_content = &content[item_start..item_end]; + if item_content.contains(&name_pattern1) || item_content.contains(&name_pattern2) { + // Found the matching item + current_pos = item_start + 2; // Skip "- " + item_found = true; + break; + } + + // Continue to the next item + current_pos = item_start + 2; + } else { + // No more list items + break; + } + } + + if !item_found { + log::error!("Could not find item with name {}", plugin_name); + return Ok(0); + } else { + // If path length is greater than 2, continue searching for remaining path parts + if path_parts.len() > 3 { + // Start searching from the 3rd part (skip plugins and plugin name) + for i in 2..path_parts.len() - 1 { + let sub_pattern = format!("{}:", path_parts[i]); + if let Some(pos) = content[current_pos..].find(&sub_pattern) { + current_pos += pos + sub_pattern.len(); + + // Move to the end of this line + if let Some(newline_pos) = content[current_pos..].find('\n') { + current_pos += newline_pos + 1; + } + } else { + log::error!("Could not find path part: {}", path_parts[i]); + return Ok(0); + } + } + } + + return Ok(current_pos); + } + } else { + log::error!("Could not find {} section", section_pattern); + return Ok(0); + } +} + +/// Process configuration value: Find value range, extract comment, format new value, and replace original value with new value +fn process_config_value(content: &mut String, start_pos: usize, difference: &ConfigDifference) { + // Extract comment + let value_end_pos = find_value_end(content, start_pos, &difference.field_type); + + // Format new value + let formatted_value = format_updated_value(difference); + + // Replace original value with new value + content.replace_range(start_pos..value_end_pos, &formatted_value); +} + +/// Find the end position of a value and extract comment +fn find_value_end(content: &str, start_pos: usize, field_type: &crate::config_comparison::FieldType) -> usize { + let mut value_end_pos = start_pos; + + // Find comment position + let line_end_pos = if let Some(newline_pos) = content[start_pos..].find('\n') { + start_pos + newline_pos + } else { + content.len() + }; + + // Depending on different field types, find the end position of the value + match field_type { + crate::config_comparison::FieldType::String | + crate::config_comparison::FieldType::Number | + crate::config_comparison::FieldType::Boolean | + crate::config_comparison::FieldType::Hex => { + // For simple types, find the next newline or comment symbol + if let Some(comment_pos) = content[start_pos..line_end_pos].find('#') { + value_end_pos += comment_pos; + // Find the last non-whitespace character before the comment + while value_end_pos > start_pos && content[value_end_pos-1..value_end_pos].chars().next().unwrap().is_whitespace() { + value_end_pos -= 1; + } + } else if let Some(newline_pos) = content[start_pos..].find('\n') { + value_end_pos += newline_pos; + } else { + value_end_pos = content.len(); + } + }, + crate::config_comparison::FieldType::Array | + crate::config_comparison::FieldType::Object => { + // For complex types, find the appropriate end position + find_complex_value_end(content, start_pos, line_end_pos, &mut value_end_pos); + }, + crate::config_comparison::FieldType::Banks => { + // Special handling for PCR banks and policy_id + if let Some(comment_pos) = content[start_pos..line_end_pos].find('#') { + // Find closing bracket before comment + let closing_bracket_pos = content[start_pos..start_pos + comment_pos].rfind(']'); + if let Some(pos) = closing_bracket_pos { + value_end_pos += pos + 1; + } else { + // Closing bracket not found, possibly not on the same line + value_end_pos += comment_pos; + // Find the last non-whitespace character before the comment + while value_end_pos > start_pos && content[value_end_pos-1..value_end_pos].chars().next().unwrap().is_whitespace() { + value_end_pos -= 1; + } + } + } else if let Some(closing_bracket) = content[start_pos..].find("]") { + value_end_pos += closing_bracket + 1; + } else { + // If closing bracket not found, revert to simple handling + if let Some(newline_pos) = content[start_pos..].find('\n') { + value_end_pos += newline_pos; + } else { + value_end_pos = content.len(); + } + } + } + } + + value_end_pos +} + +/// Find the end position of a complex value type +fn find_complex_value_end(content: &str, start_pos: usize, line_end_pos: usize, value_end_pos: &mut usize) { + let mut found_end = false; + + // First check if the current line has a comment + if let Some(comment_pos) = content[start_pos..line_end_pos].find('#') { + // Found comment position, but need to confirm this comment is after the valid value + let array_end = content[start_pos..start_pos + comment_pos].rfind(']'); + if let Some(end_pos) = array_end { + // Found array end symbol before comment + *value_end_pos = start_pos + end_pos + 1; + found_end = true; + } + } + + if !found_end { + // Here simplified handling, find the next same level indented line + let current_line_start = content[..start_pos].rfind('\n').unwrap_or(0); + let indent_level = start_pos - current_line_start - 1; + + let mut pos = start_pos; + + while let Some(next_newline) = content[pos..].find('\n') { + pos += next_newline + 1; + + // Check the indent level of the next line + if pos < content.len() { + let next_non_space = content[pos..].find(|c: char| !c.is_whitespace()); + if let Some(spaces) = next_non_space { + if spaces <= indent_level && spaces > 0 { + *value_end_pos = pos - 1; // Exclude newline + found_end = true; + break; + } + } + } + } + + if !found_end { + *value_end_pos = content.len(); + } + } +} + +/// Format updated value based on field type +fn format_updated_value(difference: &ConfigDifference) -> String { + match difference.field_type { + crate::config_comparison::FieldType::String => { + if difference.updated_value.starts_with('"') && difference.updated_value.ends_with('"') { + difference.updated_value.clone() + } else { + format!("\"{}\"", difference.updated_value.replace("\"", "\\\"")) + } + }, + crate::config_comparison::FieldType::Number => difference.updated_value.clone(), + crate::config_comparison::FieldType::Boolean => difference.updated_value.clone(), + crate::config_comparison::FieldType::Hex => format_hex_value(&difference.updated_value), + crate::config_comparison::FieldType::Array | + crate::config_comparison::FieldType::Object => difference.updated_value.clone(), + crate::config_comparison::FieldType::Banks => format_banks_value(&difference.updated_value), + } +} + +/// Format hexadecimal value +fn format_hex_value(value: &str) -> String { + if value.starts_with("0x") { + value.to_string().clone() + } else { + // Convert number to hexadecimal format (0xXXXX) + let clean_value = value + .trim_start_matches("Some(") + .trim_end_matches(")"); + + match clean_value.parse::() { + Ok(num) => format!("0x{:X}", num), + Err(_) => format!("0x{}", value) + } + } +} + +/// Format Banks value (including policy_id) +fn format_banks_value(value: &str) -> String { + if value.trim() == "[]" { + "[]".to_string() + } else { + // Ensure banks/policy_id list format is correct + let content = value + .trim_start_matches('[') + .trim_end_matches(']') + .trim(); + + if content.is_empty() { + "[]".to_string() + } else { + // Keep array special format + format!("[{}]", content) + } + } +} \ No newline at end of file diff --git a/attestation_agent/config_sync/src/lib.rs b/attestation_agent/config_sync/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..9bfc818ba7e5f5a99fd06bf07395bbfcaa5bb0e0 --- /dev/null +++ b/attestation_agent/config_sync/src/lib.rs @@ -0,0 +1,23 @@ +/* + * Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. + * Global Trust Authority is licensed under the Mulan PSL v2. + * You can use this software according to the terms and conditions of the Mulan PSL v2. + * You may obtain a copy of Mulan PSL v2 at: + * http://license.coscl.org.cn/MulanPSL2 + * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR + * PURPOSE. + * See the Mulan PSL v2 for more details. + */ + +mod config_sync; +mod config_sync_error; +mod config_schema; +mod file_utils; +mod config_comparison; +mod config_update; +mod config_fetch; + +pub use config_sync::do_sync_config; +pub use config_sync_error::ConfigSyncError; +pub use config_schema::ServerProvidedConfig; diff --git a/config/agent_config.yaml b/config/agent_config.yaml index 01266e33efec265e1d335f62a5e2b519c4dc92b2..643d9ad0008aa485b96b7f8bfad3e79574291cee 100644 --- a/config/agent_config.yaml +++ b/config/agent_config.yaml @@ -78,7 +78,10 @@ schedulers: enabled: true - name: "config_sync" # Configuration synchronization task - retry_enabled: false # Temporarily disabled + retry_enabled: true # Temporarily disabled intervals: 300 # Execute every 5 minutes - enabled: true # No initial_delay configuration means no initial random delay is needed - + initial_delay: # Random delay configuration at startup + min_seconds: 1 # Minimum delay 1 second + max_seconds: 60 # Maximum delay 60 seconds + max_retries: 1 + enabled: true