From cb262a8dbb631788ca6ed93e857ad94bf92ea032 Mon Sep 17 00:00:00 2001 From: datron Date: Tue, 18 Nov 2025 15:19:16 +0530 Subject: [PATCH 01/22] fix: make high performance mode a runtime configuration --- crates/context_aware_config/Cargo.toml | 3 +- .../src/api/config/handlers.rs | 67 ++++++++-------- .../src/api/context/handlers.rs | 43 ++++------- .../src/api/default_config/handlers.rs | 31 +------- .../src/api/dimension/handlers.rs | 28 +------ crates/context_aware_config/src/helpers.rs | 40 ++++++---- crates/service_utils/Cargo.toml | 4 +- crates/service_utils/src/service/types.rs | 3 +- crates/superposition/Cargo.toml | 8 +- crates/superposition/src/app_state.rs | 76 ++++++++++--------- .../include/superposition_core.h | 63 +++++++++++++++ 11 files changed, 189 insertions(+), 177 deletions(-) create mode 100644 crates/superposition_core/include/superposition_core.h diff --git a/crates/context_aware_config/Cargo.toml b/crates/context_aware_config/Cargo.toml index 9cc13f4bc..a790b940f 100644 --- a/crates/context_aware_config/Cargo.toml +++ b/crates/context_aware_config/Cargo.toml @@ -17,7 +17,7 @@ blake3 = { workspace = true } cac_client = { path = "../cac_client" } chrono = { workspace = true } diesel = { workspace = true, features = ["numeric"] } -fred = { workspace = true, optional = true, features = ["metrics"] } +fred = { workspace = true, features = ["metrics"] } itertools = { workspace = true } jsonlogic = { workspace = true } jsonschema = { workspace = true } @@ -39,7 +39,6 @@ uuid = { workspace = true } [features] disable_db_data_validation = ["superposition_types/disable_db_data_validation"] -high-performance-mode = ["dep:fred"] [lints] diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 27a6157ba..8072be615 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -1,32 +1,33 @@ use std::collections::HashMap; -#[cfg(feature = "high-performance-mode")] -use actix_http::StatusCode; -#[cfg(feature = "high-performance-mode")] -use actix_web::http::header::ContentType; +use actix_http::{header::HeaderValue, StatusCode}; use actix_web::{ - HttpRequest, HttpResponse, Scope, get, put, routes, + get, + http::header::ContentType, + put, routes, web::{Data, Header, Json, Path, Query}, + HttpRequest, HttpResponse, HttpResponseBuilder, Scope, +}; +use cac_client::{eval_cac, eval_cac_with_reasoning}; +use chrono::{DateTime, Timelike, Utc}; +use diesel::{ + dsl::max, BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, + SelectableHelper, }; -use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; -#[cfg(feature = "high-performance-mode")] use fred::interfaces::KeysInterface; use itertools::Itertools; -use serde_json::{Map, Value, json}; -#[cfg(feature = "high-performance-mode")] -use service_utils::service::types::AppHeader; -use service_utils::{ - helpers::fetch_dimensions_info_map, - service::types::{AppState, DbConnection, WorkspaceContext}, +use serde_json::{json, Map, Value}; +#[cfg(feature = "jsonlogic")] +use service_utils::helpers::extract_dimensions; +use service_utils::service::types::{ + AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext, }; use superposition_core::{ helpers::{calculate_context_weight, hash}, serialize_to_toml, }; use superposition_derives::authorized; -#[cfg(feature = "high-performance-mode")] -use superposition_macros::response_error; -use superposition_macros::{bad_argument, unexpected_error}; +use superposition_macros::{bad_argument, db_error, response_error, unexpected_error}; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DimensionInfo, OverrideWithKeys, Overrides, PaginatedResponse, User, @@ -69,9 +70,7 @@ pub fn endpoints() -> Scope { .service(reduce_handler) .service(list_version_handler) .service(get_version_handler); - #[cfg(feature = "high-performance-mode")] - let scope = scope.service(get_fast_handler); - scope + .service(get_fast_handler); } fn generate_subsets(map: &Map) -> Vec> { @@ -476,8 +475,6 @@ async fn reduce_handler( Ok(HttpResponse::Ok().json(config)) } -#[cfg(feature = "high-performance-mode")] -#[authorized] #[get("/fast")] async fn get_fast_handler( workspace_context: WorkspaceContext, @@ -485,19 +482,23 @@ async fn get_fast_handler( ) -> superposition::Result { use fred::interfaces::MetricsInterface; + // Only use Redis if it's configured + let redis_pool = match &state.redis { + Some(pool) => pool, + None => { + return Err(response_error!( + StatusCode::SERVICE_UNAVAILABLE, + "Redis not configured, fast config endpoint unavailable" + )); + } + }; + log::debug!("Started redis fetch"); - let config_key = format!("{}::cac_config", *workspace_context.schema_name); - let last_modified_at_key = format!( - "{}::cac_config::last_modified_at", - *workspace_context.schema_name - ); - let audit_id_key = - format!("{}::cac_config::audit_id", *workspace_context.schema_name); - let config_version_key = format!( - "{}::cac_config::config_version", - *workspace_context.schema_name - ); - let client = state.redis.next_connected(); + let config_key = format!("{}::cac_config", *schema_name); + let last_modified_at_key = format!("{}::cac_config::last_modified_at", *schema_name); + let audit_id_key = format!("{}::cac_config::audit_id", *schema_name); + let config_version_key = format!("{}::cac_config::config_version", *schema_name); + let client = redis_pool.next_connected(); let config = client.get::(config_key).await; let metrics = client.take_latency_metrics(); let network_metrics = client.take_network_latency_metrics(); diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index 4660569e9..899eaf938 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -56,8 +56,9 @@ use crate::{ helpers::{query_description, validate_ctx}, operations, }, - helpers::validate_change_reason, + dimension::fetch_dimensions_info_map, }; +use crate::helpers::{add_config_version, calculate_context_weight, put_config_in_redis}; pub fn endpoints() -> Scope { Scope::new("") @@ -176,6 +177,9 @@ async fn create_handler( version_id.to_string(), )); + let DbConnection(mut conn) = db_conn; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + Ok(http_resp.json(put_response)) } @@ -261,6 +265,9 @@ async fn update_handler( version_id.to_string(), )); + let DbConnection(mut conn) = db_conn; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + Ok(http_resp.json(override_resp)) } @@ -369,6 +376,9 @@ async fn move_handler( version_id.to_string(), )); + let DbConnection(mut conn) = db_conn; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + Ok(http_resp.json(move_response.context)) } @@ -598,16 +608,7 @@ async fn delete_handler( })?; let DbConnection(mut conn) = db_conn; - - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; - + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &deleted_ctx, resource: Resource::Context, @@ -836,14 +837,8 @@ async fn bulk_operations_handler( Ok((response, version_id)) })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &webhook_contexts, @@ -954,15 +949,7 @@ async fn weight_recompute_handler( let version_id = add_config_version(&state, tags, config_version_desc, transaction_conn, &workspace_context.schema_name)?; Ok(version_id) })?; - - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - config_version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(config_version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &response, diff --git a/crates/context_aware_config/src/api/default_config/handlers.rs b/crates/context_aware_config/src/api/default_config/handlers.rs index a1074e215..889220de6 100644 --- a/crates/context_aware_config/src/api/default_config/handlers.rs +++ b/crates/context_aware_config/src/api/default_config/handlers.rs @@ -43,8 +43,6 @@ use superposition_types::{ result as superposition, }; -#[cfg(feature = "high-performance-mode")] -use crate::helpers::put_config_in_redis; use crate::{ api::{ context::helpers::validation_function_executor, @@ -53,7 +51,7 @@ use crate::{ types::FunctionInfo, }, }, - helpers::{add_config_version, validate_change_reason}, + helpers::{add_config_version, put_config_in_redis, validate_change_reason}, }; pub fn endpoints() -> Scope { @@ -166,14 +164,7 @@ async fn create_handler( Ok(version_id) })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &default_config, @@ -322,14 +313,7 @@ async fn update_handler( Ok((val, version_id)) })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &db_row, @@ -557,14 +541,7 @@ async fn delete_handler( } })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &default_config, diff --git a/crates/context_aware_config/src/api/dimension/handlers.rs b/crates/context_aware_config/src/api/dimension/handlers.rs index bb3ad8a17..8745aaa44 100644 --- a/crates/context_aware_config/src/api/dimension/handlers.rs +++ b/crates/context_aware_config/src/api/dimension/handlers.rs @@ -38,7 +38,6 @@ use superposition_types::{ }; use crate::api::dimension::validations::allow_primitive_types; -#[cfg(feature = "high-performance-mode")] use crate::helpers::put_config_in_redis; use crate::{ api::dimension::{ @@ -242,14 +241,7 @@ async fn create_handler( } })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &inserted_dimension, @@ -480,14 +472,7 @@ async fn update_handler( Ok((result, is_mandatory, version_id)) })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &result, @@ -668,14 +653,7 @@ async fn delete_handler( } })?; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( - version_id, - &state, - &workspace_context.schema_name, - &mut conn, - ) - .await?; + put_config_in_redis(_version_id, state, &schema_name, &mut conn).await?; let data = WebhookData { payload: &dimension_data, diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index 67554bc7e..f5600b95f 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -4,11 +4,9 @@ use actix_web::{ http::header::{HeaderMap, HeaderName, HeaderValue}, web::Data, }; -#[cfg(feature = "high-performance-mode")] -use chrono::DateTime; -use chrono::Utc; +use bigdecimal::{BigDecimal, Num}; +use chrono::{DateTime, Utc}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; -#[cfg(feature = "high-performance-mode")] use fred::interfaces::KeysInterface; use serde_json::{Map, Value, json}; use service_utils::{ @@ -16,7 +14,6 @@ use service_utils::{ service::types::{AppState, EncryptionKey, SchemaName, WorkspaceContext}, }; use superposition_macros::{db_error, unexpected_error, validation_error}; -#[cfg(feature = "high-performance-mode")] use superposition_types::database::schema::event_log::dsl as event_log; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DefaultConfigInfo, @@ -43,7 +40,6 @@ use superposition_types::{ result as superposition, }; -#[cfg(feature = "high-performance-mode")] use uuid::Uuid; use crate::{ @@ -226,13 +222,31 @@ pub fn add_config_version( Ok(version_id) } -#[cfg(feature = "high-performance-mode")] +pub fn get_workspace( + workspace_schema_name: &String, + db_conn: &mut DBConnection, +) -> superposition::Result { + let workspace = workspaces::dsl::workspaces + .filter(workspaces::workspace_schema_name.eq(workspace_schema_name)) + .get_result::(db_conn)?; + Ok(workspace) +} + pub async fn put_config_in_redis( version_id: i64, state: &Data, schema_name: &SchemaName, db_conn: &mut DBConnection, ) -> superposition::Result<()> { + // Only perform Redis operations if Redis is configured + let redis_pool = match &state.redis { + Some(pool) => pool, + None => { + log::debug!("Redis not configured, skipping cache update"); + return Ok(()); + } + }; + let raw_config = generate_cac(db_conn, schema_name)?; let parsed_config = serde_json::to_string(&json!(raw_config)).map_err(|e| { log::error!("failed to convert cac config to string: {}", e); @@ -243,12 +257,10 @@ pub async fn put_config_in_redis( let audit_id_key = format!("{}::cac_config::audit_id", **schema_name); let config_version_key = format!("{}::cac_config::config_version", **schema_name); let last_modified = DateTime::to_rfc2822(&Utc::now()); - let _ = state - .redis + let _ = redis_pool .set::<(), String, String>(config_key, parsed_config, None, None, false) .await; - let _ = state - .redis + let _ = redis_pool .set::<(), String, String>(last_modified_at_key, last_modified, None, None, false) .await; if let Ok(uuid) = event_log::event_log @@ -257,13 +269,11 @@ pub async fn put_config_in_redis( .order_by(event_log::timestamp.desc()) .first::(db_conn) { - let _ = state - .redis + let _ = redis_pool .set::<(), String, String>(audit_id_key, uuid.to_string(), None, None, false) .await; } - let _ = state - .redis + let _ = redis_pool .set::<(), String, i64>(config_version_key, version_id, None, None, false) .await; Ok(()) diff --git a/crates/service_utils/Cargo.toml b/crates/service_utils/Cargo.toml index 067d52ff0..3f00a440d 100644 --- a/crates/service_utils/Cargo.toml +++ b/crates/service_utils/Cargo.toml @@ -17,7 +17,7 @@ base64 = { workspace = true } chrono = { workspace = true } derive_more = { workspace = true } diesel = { workspace = true } -fred = { workspace = true, optional = true } +fred = { workspace = true } futures-util = { workspace = true } log = { workspace = true } once_cell = { workspace = true } @@ -42,7 +42,7 @@ urlencoding = "~2.1.2" tracing-actix-web = { workspace = true } [features] -high-performance-mode = ["dep:fred"] + [lints] workspace = true diff --git a/crates/service_utils/src/service/types.rs b/crates/service_utils/src/service/types.rs index 4ae3804ae..f5ad16a6c 100644 --- a/crates/service_utils/src/service/types.rs +++ b/crates/service_utils/src/service/types.rs @@ -53,8 +53,7 @@ pub struct AppState { pub tenant_middleware_exclusion_list: HashSet, pub service_prefix: String, pub superposition_token: String, - #[cfg(feature = "high-performance-mode")] - pub redis: fred::clients::RedisPool, + pub redis: Option, pub http_client: reqwest::Client, pub master_encryption_key: Option, } diff --git a/crates/superposition/Cargo.toml b/crates/superposition/Cargo.toml index 9126862b8..45c2d8aed 100644 --- a/crates/superposition/Cargo.toml +++ b/crates/superposition/Cargo.toml @@ -17,7 +17,7 @@ context_aware_config = { path = "../context_aware_config" } diesel = { workspace = true } dotenv = "0.15.0" experimentation_platform = { path = "../experimentation_platform" } -fred = { workspace = true, optional = true } +fred = { workspace = true } frontend = { path = "../frontend" } idgenerator = "2.0.0" leptos = { workspace = true } @@ -43,11 +43,7 @@ tracing-actix-web = { workspace = true } json-subscriber = { version = "0.2.7", features = ["tracing-log"] } [features] -high-performance-mode = [ - "context_aware_config/high-performance-mode", - "service_utils/high-performance-mode", - "dep:fred", -] + [lints] workspace = true diff --git a/crates/superposition/src/app_state.rs b/crates/superposition/src/app_state.rs index 016a1d3fb..838ae3a9b 100644 --- a/crates/superposition/src/app_state.rs +++ b/crates/superposition/src/app_state.rs @@ -1,12 +1,11 @@ use std::{ collections::HashSet, sync::{Arc, Mutex}, + time::Duration, }; -#[cfg(feature = "high-performance-mode")] -use std::time::Duration; +use context_aware_config::helpers::get_meta_schema; -#[cfg(feature = "high-performance-mode")] use fred::{ clients::RedisPool, interfaces::ClientLike, @@ -39,41 +38,45 @@ pub async fn get( let snowflake_generator = Arc::new(Mutex::new(SnowflakeIdGenerator::new(1, 1))); - #[cfg(feature = "high-performance-mode")] - let redis_pool = { - let redis_url = - get_from_env_or_default("REDIS_URL", String::from("http://localhost:6379")); - let redis_pool_size = get_from_env_or_default("REDIS_POOL_SIZE", 10); - let redis_max_attempts = get_from_env_or_default("REDIS_MAX_ATTEMPTS", 10); - let redis_connection_timeout = - get_from_env_or_default("REDIS_CONN_TIMEOUT", 1000); - let config = RedisConfig::from_url(&redis_url).unwrap_or_else(|_| { - panic!("Failed to create RedisConfig from url {}", redis_url) - }); - let reconnect_policy = ReconnectPolicy::new_constant(redis_max_attempts, 100); - let redis_pool = RedisPool::new( - config, - Some(PerformanceConfig { - auto_pipeline: true, - ..Default::default() - }), - Some(ConnectionConfig { - connection_timeout: Duration::from_millis(redis_connection_timeout), - ..Default::default() - }), - Some(reconnect_policy), - redis_pool_size, - ) - .map_err(|e| format!("Could not connect to redis due to {e}")) - .unwrap(); + // Initialize Redis pool only if REDIS_URL is explicitly set (not default) + let redis_pool = match std::env::var("REDIS_URL") { + Ok(redis_url) if !redis_url.is_empty() => { + let redis_pool_size = get_from_env_or_default("REDIS_POOL_SIZE", 10); + let redis_max_attempts = get_from_env_or_default("REDIS_MAX_ATTEMPTS", 10); + let redis_connection_timeout = + get_from_env_or_default("REDIS_CONN_TIMEOUT", 1000); + let config = RedisConfig::from_url(&redis_url).expect( + format!("Failed to create RedisConfig from url {}", redis_url).as_str(), + ); + let reconnect_policy = ReconnectPolicy::new_constant(redis_max_attempts, 100); + let redis_pool = RedisPool::new( + config, + Some(PerformanceConfig { + auto_pipeline: true, + ..Default::default() + }), + Some(ConnectionConfig { + connection_timeout: Duration::from_millis(redis_connection_timeout), + ..Default::default() + }), + Some(reconnect_policy), + redis_pool_size, + ) + .map_err(|e| format!("Could not connect to redis due to {e}")) + .unwrap(); - redis_pool.connect(); - redis_pool - .wait_for_connect() - .await - .expect("Failed to connect to Redis"); + redis_pool.connect(); + redis_pool + .wait_for_connect() + .await + .expect("Failed to connect to Redis"); - redis_pool + Some(redis_pool) + } + _ => { + log::info!("REDIS_URL not set, Redis caching disabled"); + None + } }; AppState { @@ -106,7 +109,6 @@ pub async fn get( .collect::>(), service_prefix, superposition_token: get_superposition_token(kms_client, &app_env).await, - #[cfg(feature = "high-performance-mode")] redis: redis_pool, http_client: reqwest::Client::new(), master_encryption_key, diff --git a/crates/superposition_core/include/superposition_core.h b/crates/superposition_core/include/superposition_core.h new file mode 100644 index 000000000..82c759bd7 --- /dev/null +++ b/crates/superposition_core/include/superposition_core.h @@ -0,0 +1,63 @@ +#include +#include +#include +#include +k +/** + * # Safety + * + * Caller ensures that `ebuf` is a sufficiently long buffer to store the + * error message. + */ +char *core_get_resolved_config(const char *default_config_json, + const char *contexts_json, + const char *overrides_json, + const char *dimensions, + const char *query_data_json, + const char *merge_strategy_str, + const char *filter_prefixes_json, + const char *experimentation_json, + char *ebuf); + +/** + * # Safety + * + * Caller ensures that `ebuf` is a sufficiently long buffer to store the + * error message. + */ +char *core_get_resolved_config_with_reasoning(const char *default_config_json, + const char *contexts_json, + const char *overrides_json, + const char *dimensions, + const char *query_data_json, + const char *merge_strategy_str, + const char *filter_prefixes_json, + const char *experimentation_json, + char *ebuf); + +int32_t core_test_connection(void); + +/** + * # Safety + * + * This function is unsafe because: + * - `s` must be a valid pointer to a C string previously allocated by this library + * - `s` must not be null + * - The caller must ensure `s` is not used after this function is called + * - Double-free will cause undefined behavior + */ +void core_free_string(char *s); + +/** + * # Safety + * + * Caller ensures that `ebuf` is a sufficiently long buffer to store the + * error message. + */ +char *core_get_applicable_variants(const char *experiments_json, + const char *experiment_groups_json, + const char *dimensions, + const char *query_data_json, + const char *identifier, + const char *filter_prefixes_json, + char *ebuf); From 8de6284e6e28dbdf1a8dc84bba997ecc5c15ee0d Mon Sep 17 00:00:00 2001 From: datron Date: Wed, 19 Nov 2025 12:07:11 +0530 Subject: [PATCH 02/22] feat: perform redis reads when getting configs --- .env.example | 6 + .../src/api/config/handlers.rs | 173 ++++++------------ .../src/api/context/handlers.rs | 19 +- crates/context_aware_config/src/helpers.rs | 63 ++++++- docker-compose.yaml | 16 +- 5 files changed, 144 insertions(+), 133 deletions(-) diff --git a/.env.example b/.env.example index 35440abbb..254cb407a 100644 --- a/.env.example +++ b/.env.example @@ -38,6 +38,12 @@ AUTH_PROVIDER=DISABLED AUTH_Z_PROVIDER=DISABLED WORKER_ID=1 # MASTER_ENCRYPTION_KEY - add this for enabling secrets in local +ENCRYPTED_KEYS="" # Used for webhook secrets, API keys, etc. +# ENCRYPTED_KEYS="SERVICE1_API_KEY,SERVICE2_API_KEY" +# REDIS_URL="" +# REDIS_POOL_SIZE="10" +# REDIS_MAX_ATTEMPTS="10" +# REDIS_CONN_TIMEOUT="1000" ################################################ ## Following values are to be set in KMS and not directly in ENV diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 8072be615..1b80e8dc2 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -27,7 +27,7 @@ use superposition_core::{ serialize_to_toml, }; use superposition_derives::authorized; -use superposition_macros::{bad_argument, db_error, response_error, unexpected_error}; +use superposition_macros::{bad_argument, db_error, unexpected_error}; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DimensionInfo, OverrideWithKeys, Overrides, PaginatedResponse, User, @@ -51,12 +51,11 @@ use superposition_types::{ use crate::api::context::{self, helpers::query_description}; use crate::{ - api::config::helpers::{ - add_audit_id_to_header, add_config_version_to_header, - add_last_modified_to_header, generate_config_from_version, get_config_version, - get_max_created_at, is_not_modified, + api::{ + context::{self, helpers::query_description}, + dimension::fetch_dimensions_info_map, }, - helpers::{generate_cac, generate_detailed_cac}, + helpers::{generate_cac, generate_detailed_cac, get_config_from_redis}, }; use super::helpers::{apply_prefix_filter_to_config, resolve, setup_query_data}; @@ -70,7 +69,6 @@ pub fn endpoints() -> Scope { .service(reduce_handler) .service(list_version_handler) .service(get_version_handler); - .service(get_fast_handler); } fn generate_subsets(map: &Map) -> Vec> { @@ -475,106 +473,6 @@ async fn reduce_handler( Ok(HttpResponse::Ok().json(config)) } -#[get("/fast")] -async fn get_fast_handler( - workspace_context: WorkspaceContext, - state: Data, -) -> superposition::Result { - use fred::interfaces::MetricsInterface; - - // Only use Redis if it's configured - let redis_pool = match &state.redis { - Some(pool) => pool, - None => { - return Err(response_error!( - StatusCode::SERVICE_UNAVAILABLE, - "Redis not configured, fast config endpoint unavailable" - )); - } - }; - - log::debug!("Started redis fetch"); - let config_key = format!("{}::cac_config", *schema_name); - let last_modified_at_key = format!("{}::cac_config::last_modified_at", *schema_name); - let audit_id_key = format!("{}::cac_config::audit_id", *schema_name); - let config_version_key = format!("{}::cac_config::config_version", *schema_name); - let client = redis_pool.next_connected(); - let config = client.get::(config_key).await; - let metrics = client.take_latency_metrics(); - let network_metrics = client.take_network_latency_metrics(); - log::trace!( - "Network metrics for config fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", - network_metrics.max, - network_metrics.min, - network_metrics.avg, - metrics.max, - metrics.min, - metrics.avg - ); - match config { - Ok(config) => { - let mut response = HttpResponse::Ok(); - if let Ok(max_created_at) = - client.get::(last_modified_at_key).await - { - let metrics = client.take_latency_metrics(); - let network_metrics = client.take_network_latency_metrics(); - log::trace!( - "Network metrics max-created-by fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", - network_metrics.max, - network_metrics.min, - network_metrics.avg, - metrics.max, - metrics.min, - metrics.avg - ); - response - .insert_header((AppHeader::LastModified.to_string(), max_created_at)); - } - if let Ok(audit_id) = client.get::(audit_id_key).await { - let metrics = client.take_latency_metrics(); - let network_metrics = client.take_network_latency_metrics(); - log::trace!( - "Network metrics for audit ID in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", - network_metrics.max, - network_metrics.min, - network_metrics.avg, - metrics.max, - metrics.min, - metrics.avg - ); - response.insert_header((AppHeader::XAuditId.to_string(), audit_id)); - } - if let Ok(config_version) = - client.get::, String>(config_version_key).await - { - let metrics = client.take_latency_metrics(); - let network_metrics = client.take_network_latency_metrics(); - log::trace!( - "Network metrics for version ID in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", - network_metrics.max, - network_metrics.min, - network_metrics.avg, - metrics.max, - metrics.min, - metrics.avg - ); - add_config_version_to_header(&config_version, &mut response); - } - response.insert_header(ContentType::json()); - Ok(response.body(config)) - } - Err(err) => { - log::error!("Could not get config in redis due to {}", err); - Err(response_error!( - StatusCode::INTERNAL_SERVER_ERROR, - "could not fetch config, please try /config API" - )) - } - } -} - -#[authorized] #[routes] #[get("")] #[post("")] @@ -585,9 +483,65 @@ async fn get_handler( dimension_params: DimensionQuery, query_filters: superposition_query::Query, workspace_context: WorkspaceContext, + state: Data, ) -> superposition::Result { let DbConnection(mut conn) = db_conn; + let mut response = HttpResponse::Ok(); + let is_smithy = req.method() == actix_web::http::Method::GET; + + if let Some(ref redis_pool) = state.redis { + let schema_name = workspace_context.schema_name; + let client = redis_pool.next_connected(); + let last_modified_at_key = format!("{}{LAST_MODIFIED_KEY_SUFFIX}", *schema_name); + let audit_id_key = format!("{}{AUDIT_ID_KEY_SUFFIX}", *schema_name); + let config_version_key = format!("{}{CONFIG_VERSION_KEY_SUFFIX}", *schema_name); + let audit_id: String = client.get(&audit_id_key).await.map_err(|e| { + log::error!( + "failed to fetch audit id from redis for schema {}: {}", + *schema_name, + e + ); + unexpected_error!("failed to fetch audit id from redis") + })?; + let last_modified_at = client + .get::(last_modified_at_key) + .await + .map(|time| { + DateTime::parse_from_rfc2822(&time) + .map_err(|err| { + log::error!("Error occurred while parsing last_modified: {}", err) + }) + .ok() + .map(|dt| dt.with_timezone(&Utc)) + }) + .map_err(|e| { + log::error!( + "failed to fetch last modified at from redis for schema {}: {}", + *schema_name, + e + ); + unexpected_error!("failed to fetch last modified at from redis") + })?; + let version = client.get(&config_version_key).await.map_err(|e| { + log::error!( + "failed to fetch config version from redis for schema {}: {}", + *schema_name, + e + ); + unexpected_error!("failed to fetch config version from redis") + })?; + let config = + get_config_from_redis(&schema_name, redis_pool, Some(client)).await?; + + add_last_modified_to_header(last_modified_at, is_smithy, &mut response); + response.insert_header((AppHeader::XAuditId.to_string(), audit_id)); + add_config_version_to_header(&version, &mut response); + return Ok(response.json(config)); + } + + // if fast mode isn't enabled, read from DB + let max_created_at = get_max_created_at(&mut conn, &workspace_context.schema_name) .map_err(|e| log::error!("failed to fetch max timestamp from event_log: {e}")) .ok(); @@ -610,20 +564,15 @@ async fn get_handler( )?; config = apply_prefix_filter_to_config(&query_filters.prefix, config)?; - let is_smithy: bool; let context = if req.method() == actix_web::http::Method::GET { - is_smithy = false; dimension_params.into_inner() } else { - // Assuming smithy. - is_smithy = true; body.map_or_else(QueryMap::default, |body| body.into_inner().context.into()) }; if !context.is_empty() { config = config.filter_by_dimensions(&context); } - let mut response = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut response); add_audit_id_to_header(&mut conn, &mut response, &workspace_context.schema_name); add_config_version_to_header(&version, &mut response); diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index 899eaf938..2f09f3c9a 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -48,17 +48,20 @@ use superposition_types::{ result::{self as superposition, AppError}, }; -use crate::helpers::add_config_version; -#[cfg(feature = "high-performance-mode")] -use crate::helpers::put_config_in_redis; use crate::{ - api::context::{ - helpers::{query_description, validate_ctx}, - operations, + api::{ + context::{ + hash, + helpers::{query_description, validate_ctx}, + operations, + }, + dimension::fetch_dimensions_info_map, + }, + helpers::{ + add_config_version, calculate_context_weight, put_config_in_redis, + validate_change_reason, }, - dimension::fetch_dimensions_info_map, }; -use crate::helpers::{add_config_version, calculate_context_weight, put_config_in_redis}; pub fn endpoints() -> Scope { Scope::new("") diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index f5600b95f..e8ecd99b0 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -7,7 +7,10 @@ use actix_web::{ use bigdecimal::{BigDecimal, Num}; use chrono::{DateTime, Utc}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; -use fred::interfaces::KeysInterface; +use fred::{ + interfaces::KeysInterface, + prelude::{RedisClient, RedisPool}, +}; use serde_json::{Map, Value, json}; use service_utils::{ helpers::{fetch_dimensions_info_map, generate_snowflake_id}, @@ -53,6 +56,11 @@ use crate::{ validation_functions::execute_fn, }; +pub const LAST_MODIFIED_KEY_SUFFIX: &str = "::cac_config::last_modified_at"; +pub const AUDIT_ID_KEY_SUFFIX: &str = "::cac_config::audit_id"; +pub const CONFIG_VERSION_KEY_SUFFIX: &str = "::cac_config::config_version"; +pub const CONFIG_KEY_SUFFIX: &str = "::cac_config"; + pub fn parse_headermap_safe(headermap: &HeaderMap) -> HashMap { let mut req_headers = HashMap::new(); let record_header = |(header_name, header_val): (&HeaderName, &HeaderValue)| { @@ -232,6 +240,50 @@ pub fn get_workspace( Ok(workspace) } +pub async fn get_config_from_redis( + schema_name: &SchemaName, + redis_pool: &RedisPool, + redis_client: Option<&RedisClient>, +) -> superposition::Result { + use fred::interfaces::MetricsInterface; + + log::debug!("Started redis fetch for config"); + let config_key = format!("{}{CONFIG_KEY_SUFFIX}", **schema_name); + let config = { + // this block is so that the client connection is dropped + // before we move on to parsing the config + let client = match redis_client { + Some(client) => client, + None => redis_pool.next_connected(), + }; + let config = client + .get::(config_key) + .await + .map_err(|e| { + log::error!("Failed to fetch config from redis: {}", e); + unexpected_error!("Failed to fetch config from redis due to: {}", e) + })?; + let metrics = client.take_latency_metrics(); + let network_metrics = client.take_network_latency_metrics(); + log::trace!( + "Network metrics for config fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", + network_metrics.max, + network_metrics.min, + network_metrics.avg, + metrics.max, + metrics.min, + metrics.avg + ); + config + }; + + let config = serde_json::from_str(&config).map_err(|e| { + log::error!("Failed to parse config from redis: {}", e); + unexpected_error!("Failed to parse config from redis due to: {}", e) + })?; + Ok(config) +} + pub async fn put_config_in_redis( version_id: i64, state: &Data, @@ -252,10 +304,10 @@ pub async fn put_config_in_redis( log::error!("failed to convert cac config to string: {}", e); unexpected_error!("could not convert cac config to string") })?; - let config_key = format!("{}::cac_config", **schema_name); - let last_modified_at_key = format!("{}::cac_config::last_modified_at", **schema_name); - let audit_id_key = format!("{}::cac_config::audit_id", **schema_name); - let config_version_key = format!("{}::cac_config::config_version", **schema_name); + let config_key = format!("{}{CONFIG_KEY_SUFFIX}", **schema_name); + let last_modified_at_key = format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name); + let audit_id_key = format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name); + let config_version_key = format!("{}{CONFIG_VERSION_KEY_SUFFIX}", **schema_name); let last_modified = DateTime::to_rfc2822(&Utc::now()); let _ = redis_pool .set::<(), String, String>(config_key, parsed_config, None, None, false) @@ -266,6 +318,7 @@ pub async fn put_config_in_redis( if let Ok(uuid) = event_log::event_log .select(event_log::id) .filter(event_log::table_name.eq("contexts")) + .schema_name(schema_name) .order_by(event_log::timestamp.desc()) .first::(db_conn) { diff --git a/docker-compose.yaml b/docker-compose.yaml index 2fa044bf4..60cbc3e60 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -23,14 +23,14 @@ services: # AWS_DEFAULT_REGION: ap-south-1 # EDGE_PORT: 4566 -# redis: -# image: redis:7 -# container_name: superposition_redis -# restart: on-failure -# ports: -# - 6379:6379 -# command: redis-server -# network_mode: bridge + redis: + image: redis:7 + container_name: superposition_redis + restart: on-failure + ports: + - 6379:6379 + command: redis-server + network_mode: bridge # app: # image: ghcr.io/juspay/superposition:latest From cd147caae7f6e2d00d6537d882b1fd47ef8d9659 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 18 Dec 2025 19:28:43 +0530 Subject: [PATCH 03/22] feat: introduce writeback methods for redis - cache get config API responses - cache get experiment list responses - cache get experiment groups response --- .env.example | 2 + Cargo.lock | 1 + .../src/api/config/handlers.rs | 444 ++++++++++++++---- .../src/api/context/handlers.rs | 30 +- .../src/api/default_config/handlers.rs | 16 +- .../src/api/dimension/handlers.rs | 17 +- crates/context_aware_config/src/helpers.rs | 101 ++-- crates/experimentation_platform/Cargo.toml | 1 + .../src/api/experiment_groups/handlers.rs | 107 ++++- .../src/api/experiment_groups/helpers.rs | 47 +- .../src/api/experiments/handlers.rs | 165 ++++++- .../src/api/experiments/helpers.rs | 56 ++- crates/service_utils/Cargo.toml | 4 +- crates/service_utils/src/lib.rs | 1 + crates/service_utils/src/redis.rs | 112 +++++ crates/service_utils/src/service.rs | 23 + crates/service_utils/src/service/types.rs | 21 +- crates/superposition/src/app_state.rs | 6 +- .../include/superposition_core.h | 2 +- 19 files changed, 941 insertions(+), 215 deletions(-) create mode 100644 crates/service_utils/src/redis.rs diff --git a/.env.example b/.env.example index 254cb407a..d1149b324 100644 --- a/.env.example +++ b/.env.example @@ -44,6 +44,8 @@ ENCRYPTED_KEYS="" # Used for webhook secrets, API keys, etc. # REDIS_POOL_SIZE="10" # REDIS_MAX_ATTEMPTS="10" # REDIS_CONN_TIMEOUT="1000" +# TTL in seconds +# REDIS_KEY_TTL=604800 ################################################ ## Following values are to be set in KMS and not directly in ENV diff --git a/Cargo.lock b/Cargo.lock index 3e5c8a95a..dfb443d3c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2281,6 +2281,7 @@ dependencies = [ "cac_client", "chrono", "experimentation_client", + "fred", "juspay_diesel", "log", "reqwest", diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 1b80e8dc2..e8552ad19 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -8,26 +8,31 @@ use actix_web::{ web::{Data, Header, Json, Path, Query}, HttpRequest, HttpResponse, HttpResponseBuilder, Scope, }; -use cac_client::{eval_cac, eval_cac_with_reasoning}; use chrono::{DateTime, Timelike, Utc}; use diesel::{ dsl::max, BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper, }; -use fred::interfaces::KeysInterface; use itertools::Itertools; use serde_json::{json, Map, Value}; #[cfg(feature = "jsonlogic")] use service_utils::helpers::extract_dimensions; -use service_utils::service::types::{ - AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext, +use service_utils::{ + redis::{ + fetch_from_redis_else_writeback, AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, + CONFIG_VERSION_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, + }, + service::{ + get_db_connection, + types::{AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext}, + }, }; use superposition_core::{ helpers::{calculate_context_weight, hash}, serialize_to_toml, }; use superposition_derives::authorized; -use superposition_macros::{bad_argument, db_error, unexpected_error}; +use superposition_macros::{bad_argument, db_error, not_found, unexpected_error}; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DimensionInfo, OverrideWithKeys, Overrides, PaginatedResponse, User, @@ -44,11 +49,14 @@ use superposition_types::{ ChangeReason, cac::{ConfigVersion, ConfigVersionListItem}, }, - schema::config_versions::dsl as config_versions, + schema::{config_versions::dsl as config_versions, event_log::dsl as event_log}, + superposition_schema::superposition::workspaces, }, result as superposition, }; +use uuid::Uuid; +<<<<<<< HEAD use crate::api::context::{self, helpers::query_description}; use crate::{ api::{ @@ -56,12 +64,19 @@ use crate::{ dimension::fetch_dimensions_info_map, }, helpers::{generate_cac, generate_detailed_cac, get_config_from_redis}, +======= +use crate::api::{ + context::{self, helpers::query_description}, + dimension::fetch_dimensions_info_map, +>>>>>>> 269cf29d (feat: introduce writeback methods for redis) }; +use crate::helpers::{calculate_context_weight, generate_cac}; use super::helpers::{apply_prefix_filter_to_config, resolve, setup_query_data}; #[allow(clippy::let_and_return)] pub fn endpoints() -> Scope { +<<<<<<< HEAD let scope = Scope::new("") .service(get_handler) .service(get_toml_handler) @@ -69,6 +84,192 @@ pub fn endpoints() -> Scope { .service(reduce_handler) .service(list_version_handler) .service(get_version_handler); +======= + Scope::new("") + .service(get_handler) + .service(resolve_handler) + .service(reduce_handler) + .service(list_version_handler) + .service(get_version_handler) +} + +fn get_config_version_from_workspace( + workspace_context: &WorkspaceContext, + conn: &mut DBConnection, +) -> Option { + match workspaces::dsl::workspaces + .select(workspaces::config_version) + .filter( + workspaces::organisation_id + .eq(&workspace_context.organisation_id.0) + .and(workspaces::workspace_name.eq(&workspace_context.workspace_id.0)), + ) + .get_result::>(conn) + { + Ok(version) => version, + Err(e) => { + log::error!( + "Failed to get config_version for org_id: {}, workspace_name: {} — {:?}", + workspace_context.organisation_id.0, + workspace_context.workspace_id.0, + e + ); + None + } + } +} + +fn get_config_version_from_db( + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> Result { + config_versions::config_versions + .select(config_versions::id) + .order_by(config_versions::created_at.desc()) + .schema_name(schema_name) + .first::(conn) +} + +fn get_config_version( + version: &Option, + workspace_context: &WorkspaceContext, + conn: &mut DBConnection, +) -> superposition::Result { + match version.as_ref() { + Some(v) if *v != *"latest" => v.parse::().map_or_else( + |e| { + log::error!("failed to decode version as integer: {v}, error: {e}"); + Err(bad_argument!("version is not of type integer")) + }, + Ok, + ), + _ => match get_config_version_from_workspace(workspace_context, conn) { + Some(v) => Ok(v), + None => get_config_version_from_db(conn, &workspace_context.schema_name) + .map_err(|e| { + log::error!("failed to fetch latest config version from db: {e}"); + db_error!(e) + }), + }, + } +} + +pub fn fetch_audit_id( + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> Option { + event_log::event_log + .select(event_log::id) + .filter(event_log::table_name.eq("contexts")) + .order_by(event_log::timestamp.desc()) + .schema_name(schema_name) + .first::(conn) + .map(|uuid| uuid.to_string()) + .ok() +} + +fn add_last_modified_to_header( + max_created_at: Option>, + is_smithy: bool, + resp_builder: &mut HttpResponseBuilder, +) { + if let Some(date) = max_created_at { + let value = if is_smithy { + // Smithy needs to be in this format otherwise they can't + // deserialize it. + HeaderValue::from_str(date.to_rfc3339().as_str()) + } else { + HeaderValue::from_str(date.to_rfc2822().as_str()) + }; + if let Ok(header_value) = value { + resp_builder + .insert_header((AppHeader::LastModified.to_string(), header_value)); + } else { + log::error!("failed parsing datetime_utc {:?}", value); + } + } +} + +fn add_config_version_to_header( + config_version: &Option, + resp_builder: &mut HttpResponseBuilder, +) { + if let Some(val) = config_version { + resp_builder.insert_header(( + AppHeader::XConfigVersion.to_string(), + val.clone().to_string(), + )); + } +} + +fn get_max_created_at( + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> Result, diesel::result::Error> { + config_versions::config_versions + .select(max(config_versions::created_at)) + .schema_name(schema_name) + .first::>>(conn) + .and_then(|res| res.ok_or(diesel::result::Error::NotFound)) +} + +fn is_not_modified(max_created_at: Option>, req: &HttpRequest) -> bool { + let nanosecond_erasure = |t: DateTime| t.with_nanosecond(0); + let last_modified = req + .headers() + .get("If-Modified-Since") + .and_then(|header_val| { + let header_str = header_val.to_str().ok()?; + DateTime::parse_from_rfc2822(header_str) + .map(|datetime| datetime.with_timezone(&Utc)) + .ok() + }) + .and_then(nanosecond_erasure); + log::info!("last modified {last_modified:?}"); + let parsed_max: Option> = max_created_at.and_then(nanosecond_erasure); + max_created_at.is_some() && parsed_max <= last_modified +} + +pub fn generate_config_from_version( + version: &mut Option, + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> superposition::Result { + if let Some(val) = version { + let config = config_versions::config_versions + .select(config_versions::config) + .filter(config_versions::id.eq(*val)) + .schema_name(schema_name) + .get_result::(conn) + .map_err(|err| { + log::error!("failed to fetch config with error: {}", err); + db_error!(err) + })?; + serde_json::from_value::(config).map_err(|err| { + log::error!("failed to decode config: {}", err); + unexpected_error!("failed to decode config") + }) + } else { + match config_versions::config_versions + .select((config_versions::id, config_versions::config)) + .order(config_versions::created_at.desc()) + .schema_name(schema_name) + .first::<(i64, Value)>(conn) + { + Ok((latest_version, config)) => { + *version = Some(latest_version); + serde_json::from_value::(config).or_else(|err| { + log::error!("failed to decode config: {}", err); + generate_cac(conn, schema_name) + }) + } + Err(err) => { + log::error!("failed to find latest config: {err}"); + generate_cac(conn, schema_name) + } + } + } +>>>>>>> 269cf29d (feat: introduce writeback methods for redis) } fn generate_subsets(map: &Map) -> Vec> { @@ -473,78 +674,36 @@ async fn reduce_handler( Ok(HttpResponse::Ok().json(config)) } +#[authorized] #[routes] #[get("")] #[post("")] async fn get_handler( req: HttpRequest, body: Option>, - db_conn: DbConnection, dimension_params: DimensionQuery, query_filters: superposition_query::Query, workspace_context: WorkspaceContext, state: Data, ) -> superposition::Result { - let DbConnection(mut conn) = db_conn; - let mut response = HttpResponse::Ok(); - let is_smithy = req.method() == actix_web::http::Method::GET; - - if let Some(ref redis_pool) = state.redis { - let schema_name = workspace_context.schema_name; - let client = redis_pool.next_connected(); - let last_modified_at_key = format!("{}{LAST_MODIFIED_KEY_SUFFIX}", *schema_name); - let audit_id_key = format!("{}{AUDIT_ID_KEY_SUFFIX}", *schema_name); - let config_version_key = format!("{}{CONFIG_VERSION_KEY_SUFFIX}", *schema_name); - let audit_id: String = client.get(&audit_id_key).await.map_err(|e| { - log::error!( - "failed to fetch audit id from redis for schema {}: {}", - *schema_name, - e - ); - unexpected_error!("failed to fetch audit id from redis") - })?; - let last_modified_at = client - .get::(last_modified_at_key) - .await - .map(|time| { - DateTime::parse_from_rfc2822(&time) - .map_err(|err| { - log::error!("Error occurred while parsing last_modified: {}", err) - }) - .ok() - .map(|dt| dt.with_timezone(&Utc)) + let is_smithy = req.method() != actix_web::http::Method::GET; + let schema_name = workspace_context.schema_name.clone(); + let max_created_at = fetch_from_redis_else_writeback::>( + format!("{}{LAST_MODIFIED_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + get_max_created_at(&mut conn, &schema_name).map_err(|e| { + log::error!("failed to fetch max timestamp from event_log: {e}"); + db_error!(e) }) - .map_err(|e| { - log::error!( - "failed to fetch last modified at from redis for schema {}: {}", - *schema_name, - e - ); - unexpected_error!("failed to fetch last modified at from redis") - })?; - let version = client.get(&config_version_key).await.map_err(|e| { - log::error!( - "failed to fetch config version from redis for schema {}: {}", - *schema_name, - e - ); - unexpected_error!("failed to fetch config version from redis") - })?; - let config = - get_config_from_redis(&schema_name, redis_pool, Some(client)).await?; - - add_last_modified_to_header(last_modified_at, is_smithy, &mut response); - response.insert_header((AppHeader::XAuditId.to_string(), audit_id)); - add_config_version_to_header(&version, &mut response); - return Ok(response.json(config)); - } - - // if fast mode isn't enabled, read from DB - - let max_created_at = get_max_created_at(&mut conn, &workspace_context.schema_name) - .map_err(|e| log::error!("failed to fetch max timestamp from event_log: {e}")) - .ok(); + }, + ) + .await + .ok(); log::info!("Max created at: {max_created_at:?}"); @@ -555,14 +714,35 @@ async fn get_handler( } let query_filters = query_filters.into_inner(); - let mut version = get_config_version(&query_filters.version, &workspace_context)?; - - let mut config = generate_config_from_version( - &mut version, - &mut conn, - &workspace_context.schema_name, - )?; - + let version = fetch_from_redis_else_writeback::( + format!("{}{CONFIG_VERSION_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + get_config_version(&query_filters.version, &workspace_context, &mut conn) + }, + ) + .await + .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; + + let mut config = fetch_from_redis_else_writeback::( + format!("{}::{}{CONFIG_KEY_SUFFIX}", schema_name.0, version,), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + generate_config_from_version( + &mut Some(version), + &mut conn, + &workspace_context.schema_name, + ) + }, + ) + .await + .map_err(|e| unexpected_error!("failed to generate config: {}", e))?; config = apply_prefix_filter_to_config(&query_filters.prefix, config)?; let context = if req.method() == actix_web::http::Method::GET { dimension_params.into_inner() @@ -572,11 +752,29 @@ async fn get_handler( if !context.is_empty() { config = config.filter_by_dimensions(&context); } - add_last_modified_to_header(max_created_at, is_smithy, &mut response); +<<<<<<< HEAD add_audit_id_to_header(&mut conn, &mut response, &workspace_context.schema_name); add_config_version_to_header(&version, &mut response); +======= + if let Ok(audit_id) = fetch_from_redis_else_writeback::( + format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + fetch_audit_id(&mut conn, &workspace_context.schema_name) + .ok_or(not_found!("Audit ID not found")) + }, + ) + .await + { + response.insert_header((AppHeader::XAuditId.to_string(), audit_id)); + } + add_config_version_to_header(&Some(version), &mut response); +>>>>>>> 269cf29d (feat: introduce writeback methods for redis) Ok(response.json(config)) } @@ -624,46 +822,96 @@ async fn resolve_handler( req: HttpRequest, body: Option>, merge_strategy: Header, - db_conn: DbConnection, dimension_params: DimensionQuery, query_filters: superposition_query::Query, workspace_context: WorkspaceContext, state: Data, ) -> superposition::Result { - let DbConnection(mut conn) = db_conn; let query_filters = query_filters.into_inner(); - - let max_created_at = get_max_created_at(&mut conn, &workspace_context.schema_name) - .map_err(|e| log::error!("failed to fetch max timestamp from event_log : {e}")) - .ok(); + let schema_name = workspace_context.schema_name.clone(); + + let max_created_at = fetch_from_redis_else_writeback::>( + format!("{}{LAST_MODIFIED_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + get_max_created_at(&mut conn, &schema_name).map_err(|e| { + log::error!("failed to fetch max timestamp from event_log: {e}"); + db_error!(e) + }) + }, + ) + .await + .ok(); if is_not_modified(max_created_at, &req) { return Ok(HttpResponse::NotModified().finish()); } - let mut config_version = - get_config_version(&query_filters.version, &workspace_context)?; - let mut config = generate_config_from_version( - &mut config_version, - &mut conn, - &workspace_context.schema_name, - )?; + let config_version = fetch_from_redis_else_writeback::( + format!("{}{CONFIG_VERSION_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + get_config_version(&query_filters.version, &workspace_context, &mut conn) + }, + ) + .await + .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; + + let mut config = fetch_from_redis_else_writeback::( + format!("{}::{}{CONFIG_KEY_SUFFIX}", schema_name.0, config_version,), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + generate_config_from_version( + &mut Some(config_version), + &mut conn, + &workspace_context.schema_name, + ) + }, + ) + .await + .map_err(|e| unexpected_error!("failed to generate config: {}", e))?; + let (is_smithy, query_data) = setup_query_data(&req, &body, &dimension_params)?; - let resolved_config = resolve( - &mut config, - query_data, - merge_strategy, - &mut conn, - &query_filters, - &workspace_context, - &state.master_encryption_key, - )?; + let resolved_config = { + let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; + resolve( + &mut config, + query_data, + merge_strategy, + &mut conn, + &query_filters, + &workspace_context, + )? + }; let mut resp = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut resp); - add_audit_id_to_header(&mut conn, &mut resp, &workspace_context.schema_name); - add_config_version_to_header(&config_version, &mut resp); + if let Ok(audit_id) = fetch_from_redis_else_writeback::( + format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + fetch_audit_id(&mut conn, &workspace_context.schema_name) + .ok_or(not_found!("Audit ID not found")) + }, + ) + .await + { + resp.insert_header((AppHeader::XAuditId.to_string(), audit_id)); + } + add_config_version_to_header(&Some(config_version), &mut resp); Ok(resp.json(resolved_config)) } diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index 2f09f3c9a..2b0ae7459 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -181,7 +181,10 @@ async fn create_handler( )); let DbConnection(mut conn) = db_conn; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } Ok(http_resp.json(put_response)) } @@ -269,7 +272,10 @@ async fn update_handler( )); let DbConnection(mut conn) = db_conn; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } Ok(http_resp.json(override_resp)) } @@ -380,7 +386,10 @@ async fn move_handler( )); let DbConnection(mut conn) = db_conn; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } Ok(http_resp.json(move_response.context)) } @@ -611,7 +620,10 @@ async fn delete_handler( })?; let DbConnection(mut conn) = db_conn; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &deleted_ctx, resource: Resource::Context, @@ -841,7 +853,10 @@ async fn bulk_operations_handler( })?; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &webhook_contexts, @@ -952,7 +967,10 @@ async fn weight_recompute_handler( let version_id = add_config_version(&state, tags, config_version_desc, transaction_conn, &workspace_context.schema_name)?; Ok(version_id) })?; - put_config_in_redis(config_version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(config_version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &response, diff --git a/crates/context_aware_config/src/api/default_config/handlers.rs b/crates/context_aware_config/src/api/default_config/handlers.rs index 889220de6..e13ce889e 100644 --- a/crates/context_aware_config/src/api/default_config/handlers.rs +++ b/crates/context_aware_config/src/api/default_config/handlers.rs @@ -164,7 +164,10 @@ async fn create_handler( Ok(version_id) })?; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &default_config, @@ -313,7 +316,10 @@ async fn update_handler( Ok((val, version_id)) })?; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &db_row, @@ -541,7 +547,11 @@ async fn delete_handler( } })?; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = + put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &default_config, diff --git a/crates/context_aware_config/src/api/dimension/handlers.rs b/crates/context_aware_config/src/api/dimension/handlers.rs index 8745aaa44..089b06e85 100644 --- a/crates/context_aware_config/src/api/dimension/handlers.rs +++ b/crates/context_aware_config/src/api/dimension/handlers.rs @@ -241,7 +241,10 @@ async fn create_handler( } })?; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &inserted_dimension, @@ -472,7 +475,10 @@ async fn update_handler( Ok((result, is_mandatory, version_id)) })?; - put_config_in_redis(version_id, state, &schema_name, &mut conn).await?; + if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &result, @@ -653,8 +659,11 @@ async fn delete_handler( } })?; - put_config_in_redis(_version_id, state, &schema_name, &mut conn).await?; - + if let Err(e) = + put_config_in_redis(_version_id, state, &schema_name, &mut conn).await + { + log::error!("Failed to update redis cache with new context: {}", e); + } let data = WebhookData { payload: &dimension_data, resource: Resource::Dimension, diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index e8ecd99b0..184064d38 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -7,15 +7,21 @@ use actix_web::{ use bigdecimal::{BigDecimal, Num}; use chrono::{DateTime, Utc}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; -use fred::{ - interfaces::KeysInterface, - prelude::{RedisClient, RedisPool}, -}; +use fred::{interfaces::KeysInterface, types::Expiration}; +use jsonschema::{Draft, JSONSchema}; +use num_bigint::BigUint; use serde_json::{Map, Value, json}; use service_utils::{ helpers::{fetch_dimensions_info_map, generate_snowflake_id}, service::types::{AppState, EncryptionKey, SchemaName, WorkspaceContext}, }; +use service_utils::{ + helpers::get_from_env_or_default, + redis::{ + AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, + LAST_MODIFIED_KEY_SUFFIX, + }, +}; use superposition_macros::{db_error, unexpected_error, validation_error}; use superposition_types::database::schema::event_log::dsl as event_log; use superposition_types::{ @@ -42,7 +48,6 @@ use superposition_types::{ logic::dimensions_to_start_from, result as superposition, }; - use uuid::Uuid; use crate::{ @@ -56,11 +61,6 @@ use crate::{ validation_functions::execute_fn, }; -pub const LAST_MODIFIED_KEY_SUFFIX: &str = "::cac_config::last_modified_at"; -pub const AUDIT_ID_KEY_SUFFIX: &str = "::cac_config::audit_id"; -pub const CONFIG_VERSION_KEY_SUFFIX: &str = "::cac_config::config_version"; -pub const CONFIG_KEY_SUFFIX: &str = "::cac_config"; - pub fn parse_headermap_safe(headermap: &HeaderMap) -> HashMap { let mut req_headers = HashMap::new(); let record_header = |(header_name, header_val): (&HeaderName, &HeaderValue)| { @@ -240,50 +240,6 @@ pub fn get_workspace( Ok(workspace) } -pub async fn get_config_from_redis( - schema_name: &SchemaName, - redis_pool: &RedisPool, - redis_client: Option<&RedisClient>, -) -> superposition::Result { - use fred::interfaces::MetricsInterface; - - log::debug!("Started redis fetch for config"); - let config_key = format!("{}{CONFIG_KEY_SUFFIX}", **schema_name); - let config = { - // this block is so that the client connection is dropped - // before we move on to parsing the config - let client = match redis_client { - Some(client) => client, - None => redis_pool.next_connected(), - }; - let config = client - .get::(config_key) - .await - .map_err(|e| { - log::error!("Failed to fetch config from redis: {}", e); - unexpected_error!("Failed to fetch config from redis due to: {}", e) - })?; - let metrics = client.take_latency_metrics(); - let network_metrics = client.take_network_latency_metrics(); - log::trace!( - "Network metrics for config fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", - network_metrics.max, - network_metrics.min, - network_metrics.avg, - metrics.max, - metrics.min, - metrics.avg - ); - config - }; - - let config = serde_json::from_str(&config).map_err(|e| { - log::error!("Failed to parse config from redis: {}", e); - unexpected_error!("Failed to parse config from redis due to: {}", e) - })?; - Ok(config) -} - pub async fn put_config_in_redis( version_id: i64, state: &Data, @@ -298,22 +254,35 @@ pub async fn put_config_in_redis( return Ok(()); } }; - + let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); + let expiration = Some(Expiration::EX(key_ttl)); let raw_config = generate_cac(db_conn, schema_name)?; let parsed_config = serde_json::to_string(&json!(raw_config)).map_err(|e| { log::error!("failed to convert cac config to string: {}", e); unexpected_error!("could not convert cac config to string") })?; - let config_key = format!("{}{CONFIG_KEY_SUFFIX}", **schema_name); + let config_key = format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, version_id); let last_modified_at_key = format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name); let audit_id_key = format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name); let config_version_key = format!("{}{CONFIG_VERSION_KEY_SUFFIX}", **schema_name); let last_modified = DateTime::to_rfc2822(&Utc::now()); let _ = redis_pool - .set::<(), String, String>(config_key, parsed_config, None, None, false) + .set::<(), String, String>( + config_key, + parsed_config, + expiration.clone(), + None, + false, + ) .await; let _ = redis_pool - .set::<(), String, String>(last_modified_at_key, last_modified, None, None, false) + .set::<(), String, String>( + last_modified_at_key, + last_modified, + expiration.clone(), + None, + false, + ) .await; if let Ok(uuid) = event_log::event_log .select(event_log::id) @@ -323,11 +292,23 @@ pub async fn put_config_in_redis( .first::(db_conn) { let _ = redis_pool - .set::<(), String, String>(audit_id_key, uuid.to_string(), None, None, false) + .set::<(), String, String>( + audit_id_key, + uuid.to_string(), + expiration.clone(), + None, + false, + ) .await; } let _ = redis_pool - .set::<(), String, i64>(config_version_key, version_id, None, None, false) + .set::<(), String, i64>( + config_version_key, + version_id, + expiration.clone(), + None, + false, + ) .await; Ok(()) } diff --git a/crates/experimentation_platform/Cargo.toml b/crates/experimentation_platform/Cargo.toml index 4ec80e083..efad1b1ab 100644 --- a/crates/experimentation_platform/Cargo.toml +++ b/crates/experimentation_platform/Cargo.toml @@ -16,6 +16,7 @@ cac_client = { path = "../cac_client" } chrono = { workspace = true } diesel = { workspace = true } experimentation_client = { path = "../experimentation_client" } +fred = { workspace = true, features = ["metrics"] } log = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index 4ffe7f970..8fcf3d5f9 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -10,7 +10,11 @@ use diesel::{ use serde_json::Value; use service_utils::{ helpers::{generate_snowflake_id, get_from_env_or_default}, - service::types::{AppState, DbConnection, WorkspaceContext}, + redis::{fetch_from_redis_else_writeback, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}, + service::{ + get_db_connection, + types::{AppState, DbConnection, SchemaName, WorkspaceContext}, + }, }; use superposition_derives::authorized; use superposition_macros::{bad_argument, unexpected_error}; @@ -39,7 +43,8 @@ use superposition_types::{ use crate::api::{ experiment_groups::helpers::{ add_members, create_system_generated_experiment_group, - fetch_and_validate_members, fetch_experiment_group, remove_members, + fetch_and_validate_members, fetch_experiment_group, + put_experiment_groups_in_redis, remove_members, validate_experiment_group_constraints, }, experiments::{ @@ -140,6 +145,18 @@ async fn create_handler( .get_result::(transaction_conn)?; Ok(new_experiment_group) })?; + if let Err(err) = put_experiment_groups_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!( + "Failed to update experiment groups in redis after creation: {}", + err + ); + } Ok(Json(new_experiment_group)) } @@ -184,6 +201,15 @@ async fn update_handler( .returning(ExperimentGroup::as_returning()) .schema_name(&workspace_context.schema_name) .get_result(&mut conn)?; + if let Err(err) = put_experiment_groups_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update experiment groups in redis: {}", err); + } Ok(Json(updated_group)) } @@ -233,6 +259,15 @@ async fn add_members_handler( &user, ) })?; + if let Err(err) = put_experiment_groups_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update experiment groups in redis: {}", err); + } Ok(experiment_group) } @@ -275,6 +310,15 @@ async fn remove_members_handler( &user, ) })?; + if let Err(err) = put_experiment_groups_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update experiment groups in redis: {}", err); + } Ok(experiment_group) } @@ -284,8 +328,36 @@ async fn list_handler( workspace_context: WorkspaceContext, pagination_params: superposition_query::Query, filters: superposition_query::Query, - db_conn: DbConnection, + schema_name: SchemaName, + state: Data, ) -> superposition::Result>> { + let key = format!("{}{}", schema_name.0, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX); + fetch_from_redis_else_writeback::>( + key, + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let db_conn = get_db_connection(db_pool)?; + list_experiment_groups_db( + pagination_params, + filters, + db_conn, + schema_name.clone(), + ) + }, + ) + .await + .map(Json) + .map_err(|e| unexpected_error!(e)) +} + +fn list_experiment_groups_db( + pagination_params: superposition_query::Query, + filters: superposition_query::Query, + db_conn: DbConnection, + schema_name: SchemaName, +) -> superposition::Result> { let DbConnection(mut conn) = db_conn; let query_builder = |filters: &ExpGroupFilters| { let mut builder = experiment_groups::experiment_groups @@ -325,7 +397,7 @@ async fn list_handler( if let Some(true) = pagination_params.all { let result: ExperimentGroups = base_query.get_results::(&mut conn)?; - return Ok(Json(PaginatedResponse::all(result))); + return Ok(PaginatedResponse::all(result)); } let total_items = count_query.count().get_result(&mut conn)?; let limit = pagination_params.count.unwrap_or(10); @@ -334,11 +406,11 @@ async fn list_handler( let query = base_query.limit(limit).offset(offset); let data = query.load::(&mut conn)?; let total_pages = (total_items as f64 / limit as f64).ceil() as i64; - Ok(Json(PaginatedResponse { + Ok(PaginatedResponse { total_pages, total_items, data, - })) + }) } #[authorized] @@ -364,9 +436,10 @@ async fn delete_handler( exp_group_id: web::Path, mut db_conn: DbConnection, user: User, + state: Data, ) -> superposition::Result> { let id = exp_group_id.into_inner(); - db_conn.transaction::, superposition::AppError, _>(|conn| { + let result = db_conn.transaction::, superposition::AppError, _>(|conn| { let marked_group = diesel::update(experiment_groups::experiment_groups) .filter(experiment_groups::id.eq(&id)) .set(( @@ -387,7 +460,17 @@ async fn delete_handler( .schema_name(&workspace_context.schema_name) .execute(conn)?; Ok(Json(marked_group)) - }) + }); + if let Err(err) = + put_experiment_groups_in_redis(state.redis.clone(), &mut db_conn, &schema_name) + .await + { + log::error!( + "Failed to update experiment groups in redis after creation: {}", + err + ); + } + result } // Remove this after backfilling experiment groups @@ -447,6 +530,14 @@ async fn backfill_handler( } Ok(results) })?; + if let Err(err) = + put_experiment_groups_in_redis(state.redis.clone(), &mut conn, &schema_name).await + { + log::error!( + "Failed to update experiment groups in redis after creation: {}", + err + ); + } Ok(Json(experiment_groups)) } diff --git a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs index 66ff15b47..b37264680 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs @@ -4,9 +4,11 @@ use actix_web::web::{Data, Json}; use diesel::{ BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper, }; +use fred::{prelude::{KeysInterface, RedisPool}, types::Expiration}; use serde_json::Value; use service_utils::{ - helpers::generate_snowflake_id, + helpers::{generate_snowflake_id, get_from_env_or_default}, + redis::EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, service::types::{AppState, SchemaName, WorkspaceContext}, }; use superposition_macros::{bad_argument, unexpected_error}; @@ -25,7 +27,7 @@ use superposition_types::{ experiment_groups::dsl as experiment_groups, experiments::dsl as experiments, }, }, - result as superposition, + result as superposition, Condition, DBConnection, PaginatedResponse, User, }; use crate::api::experiments::helpers::{ensure_experiments_exist, hash}; @@ -452,3 +454,44 @@ pub fn fetch_experiment_group( .get_result::(conn)?; Ok(experiment_group) } + +pub async fn put_experiment_groups_in_redis( + redis_pool: Option, + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> superposition::Result<()> { + let pool = match redis_pool { + Some(pool) => pool, + None => { + log::debug!("Redis not configured, skipping experiment groups cache update"); + return Ok(()); + } + }; + + let experiment_group_list: Vec = + experiment_groups::experiment_groups + .order(experiment_groups::last_modified_at.desc()) + .schema_name(schema_name) + .load::(conn)?; + + let paginated_response = PaginatedResponse::all(experiment_group_list); + + let serialized = serde_json::to_string(&paginated_response).map_err(|e| { + log::error!("Failed to serialize experiment groups for redis: {}", e); + unexpected_error!("Failed to serialize experiment groups for redis: {}", e) + })?; + + let key = format!("{}{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}", **schema_name); + let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); + let expiration = Some(Expiration::EX(key_ttl)); + pool.next_connected() + .set::<(), String, String>(key, serialized, expiration, None, false) + .await + .map_err(|e| { + log::error!("Failed to write experiment groups to redis: {}", e); + unexpected_error!("Failed to write experiment groups to redis: {}", e) + })?; + + log::debug!("Successfully updated experiment groups cache in Redis"); + Ok(()) +} diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index e995a539b..bee9881eb 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -27,8 +27,16 @@ use service_utils::{ WebhookData, construct_request_headers, execute_webhook_call, fetch_dimensions_info_map, generate_snowflake_id, request, }, - service::types::{ - AppHeader, AppState, CustomHeaders, DbConnection, WorkspaceContext, + redis::{ + fetch_from_redis_else_writeback, EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, + EXPERIMENTS_LIST_KEY_SUFFIX, + }, + service::{ + get_db_connection, + types::{ + AppHeader, AppState, CustomHeaders, DbConnection, SchemaName, + WorkspaceContext, + }, }, }; use superposition_derives::authorized; @@ -70,8 +78,9 @@ use superposition_types::{ experiments::dsl as experiments, }, }, - logic::{evaluate_local_cohorts, evaluate_local_cohorts_skip_unresolved}, - result as superposition, + logic::evaluate_local_cohorts, + result as superposition, Cac, Condition, Config, Contextual, DBConnection, Exp, + ListResponse, Overrides, PaginatedResponse, SortBy, User, }; use crate::api::{ @@ -81,7 +90,8 @@ use crate::api::{ }, experiments::{ helpers::{ - fetch_and_validate_change_reason_with_function, validate_control_overrides, + fetch_and_validate_change_reason_with_function, fetch_webhook_by_event, + get_workspace, put_experiments_in_redis, validate_control_overrides, validate_delete_experiment_variants, }, types::StartedByChangeSet, @@ -374,6 +384,20 @@ async fn create_handler( Ok(inserted_experiment) })?; + // Update Redis cache with active experiments and experiment groups + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!( + "Failed to update redis cache for experiments after creating experiment {}: {}", + inserted_experiment.id, + err + ); + } let response = ExperimentResponse::from(inserted_experiment); let data = WebhookData { @@ -430,6 +454,17 @@ async fn conclude_handler( ) .await?; + // Update Redis cache with active experiments and experiment groups + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update redis cache for experiments: {}", err); + } + let experiment_response = ExperimentResponse::from(response); let data = WebhookData { @@ -697,6 +732,17 @@ async fn discard_handler( ) .await?; + // Update Redis cache with active experiments and experiment groups + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update redis cache for experiments: {}", err); + } + let experiment_response = ExperimentResponse::from(response); let data = WebhookData { @@ -943,15 +989,28 @@ async fn list_handler( pagination_params: superposition_query::Query, filters: superposition_query::Query, dimension_params: DimensionQuery, - db_conn: DbConnection, + schema_name: SchemaName, + state: Data, ) -> superposition::Result { - let DbConnection(mut conn) = db_conn; - - let max_event_timestamp: Option> = event_log::event_log - .filter(event_log::table_name.eq("experiments")) - .select(diesel::dsl::max(event_log::timestamp)) - .schema_name(&workspace_context.schema_name) - .first(&mut conn)?; + let max_event_timestamp = fetch_from_redis_else_writeback::>>( + format!("{}{EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + event_log::event_log + .filter(event_log::table_name.eq("experiments")) + .select(diesel::dsl::max(event_log::timestamp)) + .schema_name(&schema_name) + .first(&mut conn) + .map_err(|e| { + log::error!("failed to fetch max timestamp from event_log: {e}"); + unexpected_error!("failed to fetch max timestamp from event_log: {e}") + }) + }, + ) + .await?; let last_modified = req .headers() @@ -966,7 +1025,52 @@ async fn list_handler( if max_event_timestamp.is_some() && max_event_timestamp < last_modified { return Ok(HttpResponse::NotModified().finish()); }; + let show_all = pagination_params.all.unwrap_or_default(); + let read_from_redis = show_all + && filters + .status + .clone() + .is_some_and(|v| *v == ExperimentStatusType::active_list()); + if read_from_redis { + let response = + fetch_from_redis_else_writeback::>( + format!("{}{EXPERIMENTS_LIST_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(conn) = get_db_connection(db_pool)?; + list_experiments_db( + pagination_params.clone(), + filters.clone(), + dimension_params.clone(), + schema_name.clone(), + conn, + ) + }, + ) + .await?; + Ok(HttpResponse::Ok().json(response)) + } else { + let DbConnection(conn) = get_db_connection(state.db_pool.clone())?; + let paginated_response = list_experiments_db( + pagination_params, + filters, + dimension_params, + schema_name, + conn, + )?; + Ok(HttpResponse::Ok().json(paginated_response)) + } +} +fn list_experiments_db( + pagination_params: superposition_query::Query, + filters: superposition_query::Query, + dimension_params: DimensionQuery, + schema_name: SchemaName, + mut conn: DBConnection, +) -> superposition::Result> { let dimension_params = dimension_params.into_inner(); let query_builder = |filters: &ExperimentListFilters| { @@ -1105,7 +1209,7 @@ async fn list_handler( } }; - Ok(HttpResponse::Ok().json(paginated_response)) + Ok(paginated_response) } #[authorized] @@ -1632,6 +1736,17 @@ async fn update_handler( Ok(updated_experiment) })?; + // Update Redis cache with active experiments and experiment groups + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update redis cache for experiments: {}", err); + } + let experiment_response = ExperimentResponse::from(updated_experiment); let data = WebhookData { @@ -1684,6 +1799,17 @@ async fn pause_handler( ) .await?; + // Update Redis cache with active experiments and experiment groups + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update redis cache for experiments: {}", err); + } + let experiment_response = ExperimentResponse::from(response); let data = WebhookData { @@ -1772,6 +1898,17 @@ async fn resume_handler( ) .await?; + // Update Redis cache with active experiments and experiment groups + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update redis cache for experiments: {}", err); + } + let experiment_response = ExperimentResponse::from(response); let data = WebhookData { diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index 0f3ff8a24..389315216 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -9,6 +9,10 @@ use diesel::{ pg::PgConnection, r2d2::{ConnectionManager, PooledConnection}, }; +use fred::{ + prelude::{KeysInterface, RedisPool}, + types::Expiration, +}; use serde_json::{Map, Value}; use service_utils::service::types::{ AppState, ExperimentationFlags, SchemaName, WorkspaceContext, @@ -20,6 +24,7 @@ use superposition_types::{ I64Update, config::{ConfigQuery, ResolveConfigQuery}, experiment_groups::ExpGroupMemberRequest, + experiments::ExperimentResponse, functions::{ CHANGE_REASON_VALIDATION_FN_NAME, FunctionExecutionRequest, FunctionExecutionResponse, Stage, @@ -35,7 +40,8 @@ use superposition_types::{ }, schema::experiments::dsl as experiments, }, - result as superposition, + result as superposition, Condition, Config, DBConnection, Exp, Overrides, + PaginatedResponse, User, }; use crate::api::experiment_groups::helpers::{ @@ -795,3 +801,51 @@ pub async fn fetch_and_validate_change_reason_with_function( } } } + +pub async fn put_experiments_in_redis( + redis_pool: Option, + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> superposition::Result<()> { + let pool = match redis_pool { + Some(pool) => pool, + None => { + log::debug!("Redis not configured, skipping experiments cache update"); + return Ok(()); + } + }; + + let active_statuses = ExperimentStatusType::active_list(); + + let experiment_list: Vec = experiments::experiments + .filter(experiments::status.eq_any(active_statuses)) + .order(experiments::last_modified.desc()) + .schema_name(schema_name) + .load::(conn)?; + + let experiment_responses: Vec = experiment_list + .into_iter() + .map(ExperimentResponse::from) + .collect(); + + let paginated_response = PaginatedResponse::all(experiment_responses); + + let serialized = serde_json::to_string(&paginated_response).map_err(|e| { + log::error!("Failed to serialize experiments for redis: {}", e); + unexpected_error!("Failed to serialize experiments for redis: {}", e) + })?; + + let key = format!("{}{EXPERIMENTS_LIST_KEY_SUFFIX}", **schema_name); + let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); + let expiration = Some(Expiration::EX(key_ttl)); + pool.next_connected() + .set::<(), String, String>(key, serialized, expiration, None, false) + .await + .map_err(|e| { + log::error!("Failed to write experiments to redis: {}", e); + unexpected_error!("Failed to write experiments to redis: {}", e) + })?; + + log::debug!("Successfully updated experiments cache in Redis"); + Ok(()) +} diff --git a/crates/service_utils/Cargo.toml b/crates/service_utils/Cargo.toml index 3f00a440d..9b51d1c2a 100644 --- a/crates/service_utils/Cargo.toml +++ b/crates/service_utils/Cargo.toml @@ -17,7 +17,7 @@ base64 = { workspace = true } chrono = { workspace = true } derive_more = { workspace = true } diesel = { workspace = true } -fred = { workspace = true } +fred = { workspace = true, features = ["metrics"] } futures-util = { workspace = true } log = { workspace = true } once_cell = { workspace = true } @@ -31,12 +31,12 @@ secrecy = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } strum_macros = { workspace = true } -superposition_macros = { workspace = true } superposition_types = { workspace = true, features = [ "result", "api", "diesel_derives", ] } +superposition_macros = { workspace = true } url = { workspace = true } urlencoding = "~2.1.2" tracing-actix-web = { workspace = true } diff --git a/crates/service_utils/src/lib.rs b/crates/service_utils/src/lib.rs index 772b7a016..5f227dc5e 100644 --- a/crates/service_utils/src/lib.rs +++ b/crates/service_utils/src/lib.rs @@ -5,4 +5,5 @@ pub mod encryption; pub mod extensions; pub mod helpers; pub mod middlewares; +pub mod redis; pub mod service; diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs new file mode 100644 index 000000000..2bfb8b12e --- /dev/null +++ b/crates/service_utils/src/redis.rs @@ -0,0 +1,112 @@ +use fred::{prelude::{KeysInterface, RedisClient, RedisPool}, types::Expiration}; +use serde::{de::DeserializeOwned, Serialize}; +use superposition_macros::unexpected_error; +use superposition_types::result as superposition; + +use crate::{db::PgSchemaConnectionPool, helpers::get_from_env_or_default, service::types::SchemaName}; + +pub const LAST_MODIFIED_KEY_SUFFIX: &str = "::cac_config::last_modified_at"; +pub const AUDIT_ID_KEY_SUFFIX: &str = "::cac_config::audit_id"; +pub const CONFIG_VERSION_KEY_SUFFIX: &str = "::cac_config::config_version"; +pub const CONFIG_KEY_SUFFIX: &str = "::cac_config"; +pub const EXPERIMENTS_LIST_KEY_SUFFIX: &str = "::experiments_list"; +pub const EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX: &str = "::experiments::last_modified_at"; +pub const EXPERIMENT_GROUPS_LIST_KEY_SUFFIX: &str = "::experiment_groups_list"; + +/// Fetch data from Redis if available, else fall back to database call and write back to Redis +/// if redis is disabled read from the database directly +/// the fallback function is expected to return Result +/// You can use move closures to capture variables in the database_call +pub async fn fetch_from_redis_else_writeback( + key: String, + schema_name: &SchemaName, + redis_pool: Option, + db_pool: PgSchemaConnectionPool, + database_call: impl FnOnce(PgSchemaConnectionPool) -> superposition::Result, +) -> superposition::Result +where + T: Serialize + DeserializeOwned, +{ + if redis_pool.is_none() { + log::trace!("Redis pool not configured, using fallback"); + return database_call(db_pool); + } + let pool = redis_pool.ok_or(unexpected_error!( + "Could not access redis pool, this message should never be seen", + ))?; + let client = pool.next_connected(); + match get_data_from_redis(key.clone(), client).await { + Ok(data) => Ok(data), + Err(e) => { + log::info!( + "Falling back to DB for schema {} due to Redis error: {}", + **schema_name, + e + ); + let data = database_call(db_pool); + if let Ok(ref value) = data { + let serialized = serde_json::to_string(value).map_err(|e| { + log::error!("Failed to serialize data for redis writeback: {}", e); + unexpected_error!( + "Failed to serialize data for redis writeback due to: {}", + e + ) + })?; + let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); + let expiration = Some(Expiration::EX(key_ttl)); + client + .set::<(), String, String>(key, serialized, expiration, None, false) + .await + .map_err(|e| { + log::error!("Failed to write back data to redis: {}", e); + unexpected_error!( + "Failed to write back data to redis due to: {}", + e + ) + })?; + } + data + } + } +} + +pub async fn get_data_from_redis( + key_name: String, + client: &RedisClient, +) -> Result +where + T: DeserializeOwned, +{ + use fred::interfaces::MetricsInterface; + + log::debug!("Started redis fetch for config"); + let config = { + // this block is so that the client connection is dropped + // before we move on to parsing the config + let config = client + .get::(key_name.clone()) + .await + .map_err(|e| { + log::error!("Failed to fetch {key_name} from redis: {}", e); + format!("Failed to fetch {key_name} from redis due to: {}", e) + })?; + let metrics = client.take_latency_metrics(); + let network_metrics = client.take_network_latency_metrics(); + log::trace!( + "Network metrics for config fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", + network_metrics.max, + network_metrics.min, + network_metrics.avg, + metrics.max, + metrics.min, + metrics.avg + ); + config + }; + + let value = serde_json::from_str::(&config).map_err(|e| { + log::error!("Failed to parse value from redis: {}", e); + format!("Failed to parse value from redis due to: {}", e) + })?; + Ok(value) +} diff --git a/crates/service_utils/src/service.rs b/crates/service_utils/src/service.rs index cd408564e..d1b54de0c 100644 --- a/crates/service_utils/src/service.rs +++ b/crates/service_utils/src/service.rs @@ -1 +1,24 @@ +use crate::db::PgSchemaConnectionPool; +use diesel::Connection; +use superposition_macros::unexpected_error; +use superposition_types::result as superposition; +use types::DbConnection; + pub mod types; + +pub fn get_db_connection( + db_pool: PgSchemaConnectionPool, +) -> superposition::Result { + match db_pool.get() { + Ok(mut conn) => { + conn.set_prepared_statement_cache_size( + diesel::connection::CacheSize::Disabled, + ); + Ok(DbConnection(conn)) + } + Err(e) => { + log::info!("Unable to get db connection from pool, error: {e}"); + Err(unexpected_error!("Could not get a DB connection, contact an admin and check logs for further information")) + } + } +} diff --git a/crates/service_utils/src/service/types.rs b/crates/service_utils/src/service/types.rs index f5ad16a6c..17c58bb7f 100644 --- a/crates/service_utils/src/service/types.rs +++ b/crates/service_utils/src/service/types.rs @@ -9,7 +9,8 @@ use std::{ use actix_web::{Error, FromRequest, HttpMessage, error, web::Data}; use derive_more::{Deref, DerefMut}; use diesel::r2d2::{ConnectionManager, PooledConnection}; -use diesel::{Connection, PgConnection}; +use diesel::PgConnection; +use jsonschema::JSONSchema; use secrecy::SecretString; use snowflake::SnowflakeIdGenerator; use superposition_types::database::models::Workspace; @@ -188,18 +189,12 @@ impl FromRequest for DbConnection { } }; - let result = match app_state.db_pool.get() { - Ok(mut conn) => { - conn.set_prepared_statement_cache_size( - diesel::connection::CacheSize::Disabled, - ); - Ok(DbConnection(conn)) - } - Err(e) => { - log::info!("Unable to get db connection from pool, error: {e}"); - Err(error::ErrorInternalServerError("")) - } - }; + let result = super::get_db_connection(app_state.db_pool.clone()).map_err(|e| { + log::error!("Failed to inject DB connection, error: {}", e); + error::ErrorInternalServerError( + "A database error occurred, please contact an admin or check logs", + ) + }); ready(result) } diff --git a/crates/superposition/src/app_state.rs b/crates/superposition/src/app_state.rs index 838ae3a9b..89ad745a6 100644 --- a/crates/superposition/src/app_state.rs +++ b/crates/superposition/src/app_state.rs @@ -45,9 +45,9 @@ pub async fn get( let redis_max_attempts = get_from_env_or_default("REDIS_MAX_ATTEMPTS", 10); let redis_connection_timeout = get_from_env_or_default("REDIS_CONN_TIMEOUT", 1000); - let config = RedisConfig::from_url(&redis_url).expect( - format!("Failed to create RedisConfig from url {}", redis_url).as_str(), - ); + let config = RedisConfig::from_url(&redis_url).unwrap_or_else(|_| { + panic!("Failed to create RedisConfig from url {}", redis_url) + }); let reconnect_policy = ReconnectPolicy::new_constant(redis_max_attempts, 100); let redis_pool = RedisPool::new( config, diff --git a/crates/superposition_core/include/superposition_core.h b/crates/superposition_core/include/superposition_core.h index 82c759bd7..d3505a995 100644 --- a/crates/superposition_core/include/superposition_core.h +++ b/crates/superposition_core/include/superposition_core.h @@ -2,7 +2,7 @@ #include #include #include -k + /** * # Safety * From d80b4b2abcbcf38d4cad6f1f5505867e3dbb0ed0 Mon Sep 17 00:00:00 2001 From: datron Date: Fri, 9 Jan 2026 19:15:24 +0530 Subject: [PATCH 04/22] feat: use redis caches in combined resolve API --- Cargo.lock | 1 + crates/context_aware_config/src/api/config.rs | 1 + .../src/api/config/handlers.rs | 68 +----- .../src/api/config/helpers.rs | 47 ++-- .../src/api/experiments/handlers.rs | 74 +++--- .../src/api/experiments/helpers.rs | 3 +- crates/superposition/Cargo.toml | 1 + crates/superposition/src/resolve/handlers.rs | 229 ++++++++++++++---- crates/superposition/src/resolve/types.rs | 2 + 9 files changed, 262 insertions(+), 164 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dfb443d3c..c36c9c3b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5577,6 +5577,7 @@ dependencies = [ "serde", "serde_json", "service_utils", + "superposition_core", "superposition_derives", "superposition_macros", "superposition_types", diff --git a/crates/context_aware_config/src/api/config.rs b/crates/context_aware_config/src/api/config.rs index 62a998ddb..0f0a44d03 100644 --- a/crates/context_aware_config/src/api/config.rs +++ b/crates/context_aware_config/src/api/config.rs @@ -1,3 +1,4 @@ mod handlers; pub use handlers::endpoints; +pub use handlers::fetch_audit_id; pub mod helpers; diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index e8552ad19..b6539402d 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -9,10 +9,7 @@ use actix_web::{ HttpRequest, HttpResponse, HttpResponseBuilder, Scope, }; use chrono::{DateTime, Timelike, Utc}; -use diesel::{ - dsl::max, BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, - SelectableHelper, -}; +use diesel::{dsl::max, ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; use itertools::Itertools; use serde_json::{json, Map, Value}; #[cfg(feature = "jsonlogic")] @@ -50,7 +47,6 @@ use superposition_types::{ cac::{ConfigVersion, ConfigVersionListItem}, }, schema::{config_versions::dsl as config_versions, event_log::dsl as event_log}, - superposition_schema::superposition::workspaces, }, result as superposition, }; @@ -66,6 +62,7 @@ use crate::{ helpers::{generate_cac, generate_detailed_cac, get_config_from_redis}, ======= use crate::api::{ + config::helpers::get_config_version, context::{self, helpers::query_description}, dimension::fetch_dimensions_info_map, >>>>>>> 269cf29d (feat: introduce writeback methods for redis) @@ -93,67 +90,6 @@ pub fn endpoints() -> Scope { .service(get_version_handler) } -fn get_config_version_from_workspace( - workspace_context: &WorkspaceContext, - conn: &mut DBConnection, -) -> Option { - match workspaces::dsl::workspaces - .select(workspaces::config_version) - .filter( - workspaces::organisation_id - .eq(&workspace_context.organisation_id.0) - .and(workspaces::workspace_name.eq(&workspace_context.workspace_id.0)), - ) - .get_result::>(conn) - { - Ok(version) => version, - Err(e) => { - log::error!( - "Failed to get config_version for org_id: {}, workspace_name: {} — {:?}", - workspace_context.organisation_id.0, - workspace_context.workspace_id.0, - e - ); - None - } - } -} - -fn get_config_version_from_db( - conn: &mut DBConnection, - schema_name: &SchemaName, -) -> Result { - config_versions::config_versions - .select(config_versions::id) - .order_by(config_versions::created_at.desc()) - .schema_name(schema_name) - .first::(conn) -} - -fn get_config_version( - version: &Option, - workspace_context: &WorkspaceContext, - conn: &mut DBConnection, -) -> superposition::Result { - match version.as_ref() { - Some(v) if *v != *"latest" => v.parse::().map_or_else( - |e| { - log::error!("failed to decode version as integer: {v}, error: {e}"); - Err(bad_argument!("version is not of type integer")) - }, - Ok, - ), - _ => match get_config_version_from_workspace(workspace_context, conn) { - Some(v) => Ok(v), - None => get_config_version_from_db(conn, &workspace_context.schema_name) - .map_err(|e| { - log::error!("failed to fetch latest config version from db: {e}"); - db_error!(e) - }), - }, - } -} - pub fn fetch_audit_id( conn: &mut DBConnection, schema_name: &SchemaName, diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index eecc32e5d..6d5ce635f 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -38,25 +38,36 @@ pub fn apply_prefix_filter_to_config( pub fn get_config_version( version: &Option, workspace_context: &WorkspaceContext, -) -> superposition::Result> { - version.as_ref().map_or_else( - || Ok(workspace_context.settings.config_version), - |version| { - if *version == *"latest" { - log::trace!("latest config request"); - return Ok(None); - } - version.parse::().map_or_else( - |e| { - log::error!( - "failed to decode version as integer: {version}, error: {e}" - ); - Err(bad_argument!("version is not of type integer")) - }, - |v| Ok(Some(v)), - ) + conn: &mut DBConnection, +) -> superposition::Result { + match version.as_ref() { + Some(v) if *v != *"latest" => v.parse::().map_or_else( + |e| { + log::error!("failed to decode version as integer: {v}, error: {e}"); + Err(bad_argument!("version is not of type integer")) + }, + Ok, + ), + _ => match get_config_version_from_workspace(workspace_context, conn) { + Some(v) => Ok(v), + None => get_config_version_from_db(conn, &workspace_context.schema_name) + .map_err(|e| { + log::error!("failed to fetch latest config version from db: {e}"); + db_error!(e) + }), }, - ) + } +} + +fn get_config_version_from_db( + conn: &mut DBConnection, + schema_name: &SchemaName, +) -> Result { + config_versions::config_versions + .select(config_versions::id) + .order_by(config_versions::created_at.desc()) + .schema_name(schema_name) + .first::(conn) } pub fn add_audit_id_to_header( diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index bee9881eb..abf2cf0c5 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -13,9 +13,9 @@ use actix_web::{ }; use chrono::{DateTime, Utc}; use diesel::{ - BoolExpressionMethods, Connection, ExpressionMethods, PgConnection, QueryDsl, - RunQueryDsl, SelectableHelper, TextExpressionMethods, r2d2::{ConnectionManager, PooledConnection}, + Connection, ExpressionMethods, PgConnection, QueryDsl, RunQueryDsl, SelectableHelper, + TextExpressionMethods, }; use experimentation_client::{ get_applicable_buckets_from_group, get_applicable_variants_from_group_response, @@ -73,10 +73,7 @@ use superposition_types::{ }, others::WebhookEvent, }, - schema::{ - event_log::dsl as event_log, experiment_groups::dsl as experiment_groups, - experiments::dsl as experiments, - }, + schema::{event_log::dsl as event_log, experiments::dsl as experiments}, }, logic::evaluate_local_cohorts, result as superposition, Cac, Condition, Config, Contextual, DBConnection, Exp, @@ -90,8 +87,9 @@ use crate::api::{ }, experiments::{ helpers::{ - fetch_and_validate_change_reason_with_function, fetch_webhook_by_event, - get_workspace, put_experiments_in_redis, validate_control_overrides, + fetch_and_validate_change_reason_with_function, fetch_experiment_groups, + fetch_experiments, fetch_webhook_by_event, get_workspace, + put_experiments_in_redis, validate_control_overrides, validate_delete_experiment_variants, }, types::StartedByChangeSet, @@ -874,41 +872,32 @@ pub async fn discard( } pub async fn get_applicable_variants_helper( - db_conn: &mut PooledConnection>, + experiments: &Vec, + experiment_groups: &Vec, context: Map, dimensions_info: &HashMap, identifier: String, - workspace_context: &WorkspaceContext, ) -> superposition::Result<(Vec, HashMap)> { - use superposition_types::database::schema::experiments::dsl; - - let experiment_groups = experiment_groups::experiment_groups - .schema_name(&workspace_context.schema_name) - .load::(db_conn)?; - - let context = evaluate_local_cohorts(dimensions_info, &context); + let context = Value::Object(evaluate_local_cohorts(&config.dimensions, &context)); let buckets = - get_applicable_buckets_from_group(&experiment_groups, &context, &identifier); + get_applicable_buckets_from_group(experiment_groups, &context, &identifier); let exp_ids = buckets .iter() .filter_map(|(_, bucket)| bucket.experiment_id.parse::().ok()) .collect::>(); - let exps = dsl::experiments - .filter( - dsl::id - .eq_any(exp_ids) - .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), - ) - .schema_name(&workspace_context.schema_name) - .load::(db_conn)? - .into_iter() - .map(|exp| { - let exp_response = ExperimentResponse::from(exp); - let id = exp_response.id.clone(); - (id, exp_response) + let exps = experiments + .iter() + .filter_map(|exp| { + if exp_ids.contains(&exp.id) { + let exp_response = ExperimentResponse::from(exp.clone()); + let id = exp_response.id.clone(); + Some((id, exp_response)) + } else { + None + } }) .collect::>(); @@ -925,12 +914,12 @@ pub async fn get_applicable_variants_helper( async fn get_applicable_variants_handler( workspace_context: WorkspaceContext, req: HttpRequest, - db_conn: DbConnection, + state: Data, req_body: Option>, query_data: Option>, dimension_params: Option>, + workspace_context: WorkspaceContext, ) -> superposition::Result>, Json>>> { - let DbConnection(mut conn) = db_conn; let (context, identifier) = match (req.method().clone(), query_data, dimension_params, req_body) { ( @@ -950,15 +939,16 @@ async fn get_applicable_variants_handler( return Err(bad_argument!("Invalid input for the method")); } }; + let experiments = fetch_experiments(&state, &workspace_context).await?; + let experiment_groups = fetch_experiment_groups(&state, &workspace_context).await?; + let (config, _) = fetch_cac_config(&state, &workspace_context).await?; - let dimensions_info = - fetch_dimensions_info_map(&mut conn, &workspace_context.schema_name)?; let (applicable_variants, exps) = get_applicable_variants_helper( - &mut conn, + &experiments, + &experiment_groups, context, &dimensions_info, identifier, - &workspace_context, ) .await?; @@ -1395,6 +1385,16 @@ async fn ramp_handler( let (_, config_version_id) = fetch_cac_config(&state, &workspace_context).await?; let experiment_response = ExperimentResponse::from(updated_experiment); + if let Err(err) = put_experiments_in_redis( + state.redis.clone(), + &mut conn, + &workspace_request.schema_name, + ) + .await + { + log::error!("Failed to update redis cache for experiments: {}", err); + } + let webhook_event = if matches!(experiment.status, ExperimentStatusType::CREATED) { WebhookEvent::ExperimentStarted } else { diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index 389315216..ed417a0aa 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -35,7 +35,8 @@ use superposition_types::{ models::{ ChangeReason, experimentation::{ - Experiment, ExperimentStatusType, GroupType, Variant, VariantType, + Experiment, ExperimentGroup, ExperimentStatusType, GroupType, Variant, + VariantType, }, }, schema::experiments::dsl as experiments, diff --git a/crates/superposition/Cargo.toml b/crates/superposition/Cargo.toml index 45c2d8aed..b0e0de521 100644 --- a/crates/superposition/Cargo.toml +++ b/crates/superposition/Cargo.toml @@ -17,6 +17,7 @@ context_aware_config = { path = "../context_aware_config" } diesel = { workspace = true } dotenv = "0.15.0" experimentation_platform = { path = "../experimentation_platform" } +superposition_core = { workspace = true } fred = { workspace = true } frontend = { path = "../frontend" } idgenerator = "2.0.0" diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index dc54bdc28..6f892fc37 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -1,20 +1,44 @@ use actix_web::{ - HttpRequest, HttpResponse, Scope, routes, + routes, web::{Data, Header, Json}, + HttpRequest, HttpResponse, Scope, }; +use chrono::{DateTime, Utc}; +use context_aware_config::api::config::fetch_audit_id; use context_aware_config::api::config::helpers::{ - add_audit_id_to_header, add_config_version_to_header, add_last_modified_to_header, + add_config_version_to_header, add_last_modified_to_header, generate_config_from_version, get_config_version, get_max_created_at, is_not_modified, resolve, setup_query_data, }; -use experimentation_platform::api::experiments::handlers::get_applicable_variants_helper; +use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl}; use serde_json::{Map, Value}; -use service_utils::service::types::{AppState, DbConnection, WorkspaceContext}; +use service_utils::{ + redis::{ + fetch_from_redis_else_writeback, AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, + CONFIG_VERSION_KEY_SUFFIX, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, + LAST_MODIFIED_KEY_SUFFIX, + }, + service::{ + get_db_connection, + types::{AppHeader, AppState, DbConnection, WorkspaceContext}, + }, +}; +use std::collections::{HashMap, HashSet}; +use superposition_core::experiment::{ + get_applicable_buckets_from_group, get_applicable_variants_from_group_response, + FfiExperiment, FfiExperimentGroup, +}; use superposition_derives::authorized; +use superposition_macros::{db_error, not_found, unexpected_error}; use superposition_types::{ api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, custom_query::{self as superposition_query, CustomQuery, DimensionQuery, QueryMap}, - result as superposition, + database::{ + models::experimentation::{Experiment, ExperimentGroup, ExperimentStatusType}, + schema::{experiment_groups::dsl as experiment_groups, experiments::dsl}, + }, + logic::evaluate_local_cohorts, + result as superposition, Config, PaginatedResponse, }; use super::types::IdentifierQuery; @@ -32,65 +56,186 @@ async fn resolve_with_exp_handler( req: HttpRequest, body: Option>, merge_strategy: Header, - db_conn: DbConnection, dimension_params: DimensionQuery, query_filters: superposition_query::Query, identifier_query: superposition_query::Query, workspace_context: WorkspaceContext, state: Data, ) -> superposition::Result { - let DbConnection(mut conn) = db_conn; let query_filters = query_filters.into_inner(); let identifier_query = identifier_query.into_inner(); - let max_created_at = get_max_created_at(&mut conn, &workspace_context.schema_name) - .map_err(|e| log::error!("failed to fetch max timestamp from event_log : {e}")) - .ok(); + let schema_name = workspace_context.schema_name.clone(); + + let max_created_at = fetch_from_redis_else_writeback::>( + format!("{}{LAST_MODIFIED_KEY_SUFFIX}", *schema_name), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + get_max_created_at(&mut conn, &schema_name).map_err(|e| { + log::error!("failed to fetch max timestamp from event_log: {e}"); + db_error!(e) + }) + }, + ) + .await + .ok(); if identifier_query.identifier.is_none() && is_not_modified(max_created_at, &req) { return Ok(HttpResponse::NotModified().finish()); } let (is_smithy, mut query_data) = setup_query_data(&req, &body, &dimension_params)?; - let mut config_version = - get_config_version(&query_filters.version, &workspace_context)?; - // This is needed as `generate_config_from_version` updates config_version value - // in case nothing was found either from query params or workspace settings - // This value is separately needed, as in the following check the value before the modification is required - let config_ver = config_version.to_owned(); + let config_version = fetch_from_redis_else_writeback::( + format!("{}{CONFIG_VERSION_KEY_SUFFIX}", *schema_name), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + get_config_version(&query_filters.version, &workspace_context, &mut conn) + }, + ) + .await + .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; + + let mut config = fetch_from_redis_else_writeback::( + format!("{}::{}{CONFIG_KEY_SUFFIX}", *schema_name, config_version), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + generate_config_from_version( + &mut Some(config_version), + &mut conn, + &workspace_context.schema_name, + ) + }, + ) + .await + .map_err(|e| unexpected_error!("failed to generate config: {}", e))?; - let mut config = generate_config_from_version( - &mut config_version, - &mut conn, - &workspace_context.schema_name, - )?; - - if let (None, Some(identifier)) = (config_ver, identifier_query.identifier) { + if let (None, Some(identifier)) = + (query_filters.version.clone(), identifier_query.identifier) + { let context_map: &Map = &query_data; - let (applicable_variants, _) = get_applicable_variants_helper( - &mut conn, - context_map.clone(), - &config.dimensions, - identifier, - &workspace_context, - ) - .await?; + + // Fetch experiment groups from redis + let experiment_groups = + fetch_from_redis_else_writeback::>( + format!("{}{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}", *schema_name), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + let groups = experiment_groups::experiment_groups + .schema_name(&workspace_context.schema_name) + .load::(&mut conn) + .map_err(|e| { + log::error!("failed to fetch experiment groups: {e}"); + db_error!(e) + })?; + let total_items = groups.len() as i64; + Ok(PaginatedResponse { + total_pages: 1, + total_items, + data: groups, + }) + }, + ) + .await; + + let experiment_groups: Vec = experiment_groups + .map(|paginated| paginated.data) + .unwrap_or_default(); + + // Convert to FfiExperimentGroup for superposition_core functions + let ffi_experiment_groups: Vec = experiment_groups + .into_iter() + .map(FfiExperimentGroup::from) + .collect(); + + let context = + Value::Object(evaluate_local_cohorts(&config.dimensions, context_map)); + + let buckets = get_applicable_buckets_from_group( + &ffi_experiment_groups, + &context, + &identifier, + ); + + let exp_ids = buckets + .iter() + .filter_map(|(_, bucket)| bucket.experiment_id.parse::().ok()) + .collect::>(); + + // Fetch experiments from database (these are filtered by specific IDs, so caching all wouldn't help much) + let exps: HashMap = if !exp_ids.is_empty() { + let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; + dsl::experiments + .filter( + dsl::id + .eq_any(exp_ids) + .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), + ) + .schema_name(&workspace_context.schema_name) + .load::(&mut conn) + .map_err(|e| { + log::error!("failed to fetch experiments: {e}"); + db_error!(e) + })? + .into_iter() + .map(|exp| { + let ffi_exp = FfiExperiment::from(exp); + let id = ffi_exp.id.clone(); + (id, ffi_exp) + }) + .collect() + } else { + HashMap::new() + }; + + let applicable_variants = + get_applicable_variants_from_group_response(&exps, &context, &buckets); query_data.insert("variantIds".to_string(), applicable_variants.into()); } - let resolved_config = resolve( - &mut config, - query_data, - merge_strategy, - &mut conn, - &query_filters, - &workspace_context, - &state.master_encryption_key, - )?; + let resolved_config = { + let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; + resolve( + &mut config, + query_data, + merge_strategy, + &mut conn, + &query_filters, + &workspace_context, + )? + }; let mut resp = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut resp); - add_audit_id_to_header(&mut conn, &mut resp, &workspace_context.schema_name); - add_config_version_to_header(&config_version, &mut resp); + + // Fetch audit_id from redis + if let Ok(audit_id) = fetch_from_redis_else_writeback::( + format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), + &schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + fetch_audit_id(&mut conn, &workspace_context.schema_name) + .ok_or(not_found!("Audit ID not found")) + }, + ) + .await + { + resp.insert_header((AppHeader::XAuditId.to_string(), audit_id)); + } + + add_config_version_to_header(&Some(config_version), &mut resp); Ok(resp.json(resolved_config)) } diff --git a/crates/superposition/src/resolve/types.rs b/crates/superposition/src/resolve/types.rs index 045bdf647..edb14886f 100644 --- a/crates/superposition/src/resolve/types.rs +++ b/crates/superposition/src/resolve/types.rs @@ -2,6 +2,8 @@ use serde::Deserialize; use superposition_derives::{IsEmpty, QueryParam}; use superposition_types::{IsEmpty, custom_query::QueryParam}; +/// Query param for targeting key or the identifier to be used in experiments. +/// Also known as toss #[derive(Deserialize, IsEmpty, QueryParam, Default)] pub struct IdentifierQuery { #[query_param(skip_if_empty)] From 43169893a932138f4718e6aaf1fb31a71d67b184 Mon Sep 17 00:00:00 2001 From: datron Date: Mon, 9 Feb 2026 16:38:03 +0530 Subject: [PATCH 05/22] chore: rebase with main --- .../client/SuperpositionAsyncClientImpl.java | 2 +- .../client/SuperpositionClientImpl.java | 2 +- .../src/api/config/handlers.rs | 37 +--- .../src/api/config/helpers.rs | 48 ++++- .../src/api/context/handlers.rs | 51 +++-- .../src/api/default_config/handlers.rs | 14 +- .../src/api/dimension/handlers.rs | 25 ++- crates/context_aware_config/src/helpers.rs | 13 +- .../src/api/experiment_groups/handlers.rs | 86 +++++---- .../src/api/experiment_groups/helpers.rs | 9 +- .../src/api/experiments/handlers.rs | 74 ++++---- .../src/api/experiments/helpers.rs | 179 +++++++++++++++++- crates/service_utils/src/redis.rs | 12 +- crates/service_utils/src/service.rs | 4 +- crates/service_utils/src/service/types.rs | 2 +- crates/superposition/src/resolve/handlers.rs | 22 +-- crates/superposition/src/resolve/types.rs | 2 +- 17 files changed, 406 insertions(+), 176 deletions(-) diff --git a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java index e08d63e6d..7649261ca 100644 --- a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java +++ b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java @@ -267,8 +267,8 @@ final class SuperpositionAsyncClientImpl extends Client implements SuperpositionAsyncClient { private static final TypeRegistry TYPE_REGISTRY = TypeRegistry.builder() .putType(NotAuthorizedException.$ID, NotAuthorizedException.class, NotAuthorizedException::builder) - .putType(AccessDeniedException.$ID, AccessDeniedException.class, AccessDeniedException::builder) .putType(ValidationException.$ID, ValidationException.class, ValidationException::builder) + .putType(AccessDeniedException.$ID, AccessDeniedException.class, AccessDeniedException::builder) .putType(InternalFailureException.$ID, InternalFailureException.class, InternalFailureException::builder) .putType(UnknownOperationException.$ID, UnknownOperationException.class, UnknownOperationException::builder) .putType(MalformedRequestException.$ID, MalformedRequestException.class, MalformedRequestException::builder) diff --git a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java index 904ada31e..bfc6f4349 100644 --- a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java +++ b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java @@ -267,8 +267,8 @@ final class SuperpositionClientImpl extends Client implements SuperpositionClient { private static final TypeRegistry TYPE_REGISTRY = TypeRegistry.builder() .putType(NotAuthorizedException.$ID, NotAuthorizedException.class, NotAuthorizedException::builder) - .putType(AccessDeniedException.$ID, AccessDeniedException.class, AccessDeniedException::builder) .putType(ValidationException.$ID, ValidationException.class, ValidationException::builder) + .putType(AccessDeniedException.$ID, AccessDeniedException.class, AccessDeniedException::builder) .putType(InternalFailureException.$ID, InternalFailureException.class, InternalFailureException::builder) .putType(UnknownOperationException.$ID, UnknownOperationException.class, UnknownOperationException::builder) .putType(MalformedRequestException.$ID, MalformedRequestException.class, MalformedRequestException::builder) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index b6539402d..a2ddabfb7 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -1,23 +1,19 @@ use std::collections::HashMap; -use actix_http::{header::HeaderValue, StatusCode}; +use actix_http::header::HeaderValue; use actix_web::{ - get, - http::header::ContentType, - put, routes, + HttpRequest, HttpResponse, HttpResponseBuilder, Scope, get, put, routes, web::{Data, Header, Json, Path, Query}, - HttpRequest, HttpResponse, HttpResponseBuilder, Scope, }; use chrono::{DateTime, Timelike, Utc}; -use diesel::{dsl::max, ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; +use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper, dsl::max}; use itertools::Itertools; -use serde_json::{json, Map, Value}; -#[cfg(feature = "jsonlogic")] -use service_utils::helpers::extract_dimensions; +use serde_json::{Map, Value, json}; use service_utils::{ + helpers::fetch_dimensions_info_map, redis::{ - fetch_from_redis_else_writeback, AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, - CONFIG_VERSION_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, + AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, + LAST_MODIFIED_KEY_SUFFIX, fetch_from_redis_else_writeback, }, service::{ get_db_connection, @@ -52,7 +48,6 @@ use superposition_types::{ }; use uuid::Uuid; -<<<<<<< HEAD use crate::api::context::{self, helpers::query_description}; use crate::{ api::{ @@ -60,12 +55,9 @@ use crate::{ dimension::fetch_dimensions_info_map, }, helpers::{generate_cac, generate_detailed_cac, get_config_from_redis}, -======= use crate::api::{ config::helpers::get_config_version, context::{self, helpers::query_description}, - dimension::fetch_dimensions_info_map, ->>>>>>> 269cf29d (feat: introduce writeback methods for redis) }; use crate::helpers::{calculate_context_weight, generate_cac}; @@ -73,20 +65,12 @@ use super::helpers::{apply_prefix_filter_to_config, resolve, setup_query_data}; #[allow(clippy::let_and_return)] pub fn endpoints() -> Scope { -<<<<<<< HEAD let scope = Scope::new("") .service(get_handler) .service(get_toml_handler) .service(resolve_handler) .service(reduce_handler) .service(list_version_handler) - .service(get_version_handler); -======= - Scope::new("") - .service(get_handler) - .service(resolve_handler) - .service(reduce_handler) - .service(list_version_handler) .service(get_version_handler) } @@ -689,11 +673,6 @@ async fn get_handler( config = config.filter_by_dimensions(&context); } add_last_modified_to_header(max_created_at, is_smithy, &mut response); -<<<<<<< HEAD - add_audit_id_to_header(&mut conn, &mut response, &workspace_context.schema_name); - add_config_version_to_header(&version, &mut response); - -======= if let Ok(audit_id) = fetch_from_redis_else_writeback::( format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), &schema_name, @@ -710,7 +689,6 @@ async fn get_handler( response.insert_header((AppHeader::XAuditId.to_string(), audit_id)); } add_config_version_to_header(&Some(version), &mut response); ->>>>>>> 269cf29d (feat: introduce writeback methods for redis) Ok(response.json(config)) } @@ -827,6 +805,7 @@ async fn resolve_handler( &mut conn, &query_filters, &workspace_context, + &state.master_encryption_key, )? }; diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index 6d5ce635f..a08120072 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -5,7 +5,7 @@ use actix_web::{ }; use cac_client::{eval_cac, eval_cac_with_reasoning}; use chrono::{DateTime, Timelike, Utc}; -use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, dsl::max}; +use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, dsl::max}; use serde_json::{Map, Value}; use service_utils::service::types::{ AppHeader, EncryptionKey, SchemaName, WorkspaceContext, @@ -15,8 +15,9 @@ use superposition_types::{ Config, DBConnection, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, custom_query::{CommaSeparatedStringQParams, DimensionQuery, QueryMap}, - database::schema::{ - config_versions::dsl as config_versions, event_log::dsl as event_log, + database::{ + schema::{config_versions::dsl as config_versions, event_log::dsl as event_log}, + superposition_schema::superposition::workspaces, }, result as superposition, }; @@ -50,16 +51,45 @@ pub fn get_config_version( ), _ => match get_config_version_from_workspace(workspace_context, conn) { Some(v) => Ok(v), - None => get_config_version_from_db(conn, &workspace_context.schema_name) - .map_err(|e| { - log::error!("failed to fetch latest config version from db: {e}"); - db_error!(e) - }), + None => get_config_version_from_versions_table( + conn, + &workspace_context.schema_name, + ) + .map_err(|e| { + log::error!("failed to fetch latest config version from db: {e}"); + db_error!(e) + }), }, } } -fn get_config_version_from_db( +fn get_config_version_from_workspace( + workspace_context: &WorkspaceContext, + conn: &mut DBConnection, +) -> Option { + match workspaces::dsl::workspaces + .select(workspaces::config_version) + .filter( + workspaces::organisation_id + .eq(&workspace_context.organisation_id.0) + .and(workspaces::workspace_name.eq(&workspace_context.workspace_id.0)), + ) + .get_result::>(conn) + { + Ok(version) => version, + Err(e) => { + log::error!( + "Failed to get config_version for org_id: {}, workspace_name: {} — {:?}", + workspace_context.organisation_id.0, + workspace_context.workspace_id.0, + e + ); + None + } + } +} + +fn get_config_version_from_versions_table( conn: &mut DBConnection, schema_name: &SchemaName, ) -> Result { diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index 2b0ae7459..1f9c89a71 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -49,13 +49,10 @@ use superposition_types::{ }; use crate::{ - api::{ - context::{ - hash, - helpers::{query_description, validate_ctx}, - operations, - }, - dimension::fetch_dimensions_info_map, + api::context::{ + hash, + helpers::{query_description, validate_ctx}, + operations, }, helpers::{ add_config_version, calculate_context_weight, put_config_in_redis, @@ -181,9 +178,11 @@ async fn create_handler( )); let DbConnection(mut conn) = db_conn; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } Ok(http_resp.json(put_response)) @@ -272,9 +271,11 @@ async fn update_handler( )); let DbConnection(mut conn) = db_conn; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } Ok(http_resp.json(override_resp)) @@ -386,9 +387,11 @@ async fn move_handler( )); let DbConnection(mut conn) = db_conn; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } Ok(http_resp.json(move_response.context)) @@ -620,9 +623,11 @@ async fn delete_handler( })?; let DbConnection(mut conn) = db_conn; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { payload: &deleted_ctx, @@ -853,9 +858,11 @@ async fn bulk_operations_handler( })?; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { @@ -967,9 +974,15 @@ async fn weight_recompute_handler( let version_id = add_config_version(&state, tags, config_version_desc, transaction_conn, &workspace_context.schema_name)?; Ok(version_id) })?; - if let Err(e) = put_config_in_redis(config_version_id, state, &schema_name, &mut conn).await + if let Err(e) = put_config_in_redis( + config_version_id, + state, + &workspace_context.schema_name, + &mut conn, + ) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { diff --git a/crates/context_aware_config/src/api/default_config/handlers.rs b/crates/context_aware_config/src/api/default_config/handlers.rs index e13ce889e..e0fb03095 100644 --- a/crates/context_aware_config/src/api/default_config/handlers.rs +++ b/crates/context_aware_config/src/api/default_config/handlers.rs @@ -164,9 +164,11 @@ async fn create_handler( Ok(version_id) })?; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { @@ -316,9 +318,11 @@ async fn update_handler( Ok((val, version_id)) })?; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { @@ -548,7 +552,7 @@ async fn delete_handler( })?; if let Err(e) = - put_config_in_redis(version_id, state, &schema_name, &mut conn).await + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn).await { log::error!("Failed to update redis cache with new context: {}", e); } diff --git a/crates/context_aware_config/src/api/dimension/handlers.rs b/crates/context_aware_config/src/api/dimension/handlers.rs index 089b06e85..2572cefcc 100644 --- a/crates/context_aware_config/src/api/dimension/handlers.rs +++ b/crates/context_aware_config/src/api/dimension/handlers.rs @@ -241,9 +241,11 @@ async fn create_handler( } })?; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { @@ -475,9 +477,11 @@ async fn update_handler( Ok((result, is_mandatory, version_id)) })?; - if let Err(e) = put_config_in_redis(version_id, state, &schema_name, &mut conn).await + if let Err(e) = + put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { @@ -590,7 +594,7 @@ async fn delete_handler( )?; if context_ids.is_empty() { - let (version_id, dimension_data) = conn.transaction::<_, superposition::AppError, _>(|transaction_conn| { + let (resp, version_id) = conn.transaction::<_, superposition::AppError, _>(|transaction_conn| { use dimensions::dsl; if !dimension_data.dependency_graph.is_empty() { @@ -659,10 +663,15 @@ async fn delete_handler( } })?; - if let Err(e) = - put_config_in_redis(_version_id, state, &schema_name, &mut conn).await + if let Err(e) = put_config_in_redis( + version_id, + state, + &workspace_context.schema_name, + &mut conn, + ) + .await { - log::error!("Failed to update redis cache with new context: {}", e); + log::warn!("Failed to update redis cache with new context: {}", e); } let data = WebhookData { payload: &dimension_data, diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index 184064d38..4c7943a06 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -11,10 +11,6 @@ use fred::{interfaces::KeysInterface, types::Expiration}; use jsonschema::{Draft, JSONSchema}; use num_bigint::BigUint; use serde_json::{Map, Value, json}; -use service_utils::{ - helpers::{fetch_dimensions_info_map, generate_snowflake_id}, - service::types::{AppState, EncryptionKey, SchemaName, WorkspaceContext}, -}; use service_utils::{ helpers::get_from_env_or_default, redis::{ @@ -22,8 +18,15 @@ use service_utils::{ LAST_MODIFIED_KEY_SUFFIX, }, }; +use service_utils::{ + helpers::{fetch_dimensions_info_map, generate_snowflake_id}, + service::types::{AppState, EncryptionKey, SchemaName, WorkspaceContext}, +}; use superposition_macros::{db_error, unexpected_error, validation_error}; -use superposition_types::database::schema::event_log::dsl as event_log; +use superposition_types::database::{ + models::Workspace, schema::event_log::dsl as event_log, + superposition_schema::superposition::workspaces, +}; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DefaultConfigInfo, DefaultConfigsWithSchema, DetailedConfig, DimensionInfo, OverrideWithKeys, Overrides, diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index 8fcf3d5f9..f42d52198 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -10,10 +10,10 @@ use diesel::{ use serde_json::Value; use service_utils::{ helpers::{generate_snowflake_id, get_from_env_or_default}, - redis::{fetch_from_redis_else_writeback, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}, + redis::{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, fetch_from_redis_else_writeback}, service::{ get_db_connection, - types::{AppState, DbConnection, SchemaName, WorkspaceContext}, + types::{AppState, DbConnection, WorkspaceContext}, }, }; use superposition_derives::authorized; @@ -148,7 +148,7 @@ async fn create_handler( if let Err(err) = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -204,7 +204,7 @@ async fn update_handler( if let Err(err) = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -262,7 +262,7 @@ async fn add_members_handler( if let Err(err) = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -313,7 +313,7 @@ async fn remove_members_handler( if let Err(err) = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -328,13 +328,15 @@ async fn list_handler( workspace_context: WorkspaceContext, pagination_params: superposition_query::Query, filters: superposition_query::Query, - schema_name: SchemaName, state: Data, ) -> superposition::Result>> { - let key = format!("{}{}", schema_name.0, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX); + let key = format!( + "{}{}", + workspace_context.schema_name.0, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX + ); fetch_from_redis_else_writeback::>( key, - &schema_name, + &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { @@ -343,7 +345,7 @@ async fn list_handler( pagination_params, filters, db_conn, - schema_name.clone(), + &workspace_context, ) }, ) @@ -356,7 +358,7 @@ fn list_experiment_groups_db( pagination_params: superposition_query::Query, filters: superposition_query::Query, db_conn: DbConnection, - schema_name: SchemaName, + workspace_context: &WorkspaceContext, ) -> superposition::Result> { let DbConnection(mut conn) = db_conn; let query_builder = |filters: &ExpGroupFilters| { @@ -439,31 +441,35 @@ async fn delete_handler( state: Data, ) -> superposition::Result> { let id = exp_group_id.into_inner(); - let result = db_conn.transaction::, superposition::AppError, _>(|conn| { - let marked_group = diesel::update(experiment_groups::experiment_groups) - .filter(experiment_groups::id.eq(&id)) - .set(( - experiment_groups::last_modified_by.eq(user.get_email()), - experiment_groups::last_modified_at.eq(chrono::Utc::now()), - )) - .returning(ExperimentGroup::as_returning()) - .schema_name(&workspace_context.schema_name) - .get_result(conn)?; - if !marked_group.member_experiment_ids.is_empty() { - return Err(bad_argument!( - "Cannot delete experiment group {} since it has members", - marked_group.name - )); - } - diesel::delete(experiment_groups::experiment_groups) - .filter(experiment_groups::id.eq(&id)) - .schema_name(&workspace_context.schema_name) - .execute(conn)?; - Ok(Json(marked_group)) - }); - if let Err(err) = - put_experiment_groups_in_redis(state.redis.clone(), &mut db_conn, &schema_name) - .await + let result = db_conn + .transaction::, superposition::AppError, _>(|conn| { + let marked_group = diesel::update(experiment_groups::experiment_groups) + .filter(experiment_groups::id.eq(&id)) + .set(( + experiment_groups::last_modified_by.eq(user.email), + experiment_groups::last_modified_at.eq(chrono::Utc::now()), + )) + .returning(ExperimentGroup::as_returning()) + .schema_name(&workspace_context.schema_name) + .get_result(conn)?; + if !marked_group.member_experiment_ids.is_empty() { + return Err(bad_argument!( + "Cannot delete experiment group {} since it has members", + marked_group.name + )); + } + diesel::delete(experiment_groups::experiment_groups) + .filter(experiment_groups::id.eq(&id)) + .schema_name(&workspace_context.schema_name) + .execute(conn)?; + Ok(Json(marked_group)) + }); + if let Err(err) = put_experiment_groups_in_redis( + state.redis.clone(), + &mut db_conn, + &workspace_context.schema_name, + ) + .await { log::error!( "Failed to update experiment groups in redis after creation: {}", @@ -530,8 +536,12 @@ async fn backfill_handler( } Ok(results) })?; - if let Err(err) = - put_experiment_groups_in_redis(state.redis.clone(), &mut conn, &schema_name).await + if let Err(err) = put_experiment_groups_in_redis( + state.redis.clone(), + &mut conn, + &workspace_context.schema_name, + ) + .await { log::error!( "Failed to update experiment groups in redis after creation: {}", diff --git a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs index b37264680..672f99e6a 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs @@ -4,7 +4,10 @@ use actix_web::web::{Data, Json}; use diesel::{ BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper, }; -use fred::{prelude::{KeysInterface, RedisPool}, types::Expiration}; +use fred::{ + prelude::{KeysInterface, RedisPool}, + types::Expiration, +}; use serde_json::Value; use service_utils::{ helpers::{generate_snowflake_id, get_from_env_or_default}, @@ -13,7 +16,7 @@ use service_utils::{ }; use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ - Condition, DBConnection, User, + Condition, DBConnection, PaginatedResponse, User, api::experiment_groups::ExpGroupMemberRequest, database::{ models::{ @@ -27,7 +30,7 @@ use superposition_types::{ experiment_groups::dsl as experiment_groups, experiments::dsl as experiments, }, }, - result as superposition, Condition, DBConnection, PaginatedResponse, User, + result as superposition, }; use crate::api::experiments::helpers::{ensure_experiments_exist, hash}; diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index abf2cf0c5..01220242f 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -13,9 +13,9 @@ use actix_web::{ }; use chrono::{DateTime, Utc}; use diesel::{ - r2d2::{ConnectionManager, PooledConnection}, Connection, ExpressionMethods, PgConnection, QueryDsl, RunQueryDsl, SelectableHelper, TextExpressionMethods, + r2d2::{ConnectionManager, PooledConnection}, }; use experimentation_client::{ get_applicable_buckets_from_group, get_applicable_variants_from_group_response, @@ -28,22 +28,19 @@ use service_utils::{ fetch_dimensions_info_map, generate_snowflake_id, request, }, redis::{ - fetch_from_redis_else_writeback, EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, - EXPERIMENTS_LIST_KEY_SUFFIX, + EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, EXPERIMENTS_LIST_KEY_SUFFIX, + fetch_from_redis_else_writeback, }, service::{ get_db_connection, - types::{ - AppHeader, AppState, CustomHeaders, DbConnection, SchemaName, - WorkspaceContext, - }, + types::{AppHeader, AppState, CustomHeaders, DbConnection, WorkspaceContext}, }, }; use superposition_derives::authorized; use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ - Cac, Condition, Contextual, DimensionInfo, Exp, ListResponse, Overrides, - PaginatedResponse, Resource, SortBy, User, + Cac, Condition, Contextual, DBConnection, DimensionInfo, Exp, ListResponse, + Overrides, PaginatedResponse, SortBy, User, api::{ DimensionMatchStrategy, context::{ @@ -75,9 +72,8 @@ use superposition_types::{ }, schema::{event_log::dsl as event_log, experiments::dsl as experiments}, }, - logic::evaluate_local_cohorts, - result as superposition, Cac, Condition, Config, Contextual, DBConnection, Exp, - ListResponse, Overrides, PaginatedResponse, SortBy, User, + logic::{evaluate_local_cohorts, evaluate_local_cohorts_skip_unresolved}, + result as superposition, }; use crate::api::{ @@ -88,9 +84,8 @@ use crate::api::{ experiments::{ helpers::{ fetch_and_validate_change_reason_with_function, fetch_experiment_groups, - fetch_experiments, fetch_webhook_by_event, get_workspace, - put_experiments_in_redis, validate_control_overrides, - validate_delete_experiment_variants, + fetch_experiments, fetch_webhook_by_event, put_experiments_in_redis, + validate_control_overrides, validate_delete_experiment_variants, }, types::StartedByChangeSet, }, @@ -386,7 +381,7 @@ async fn create_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -456,7 +451,7 @@ async fn conclude_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -734,7 +729,7 @@ async fn discard_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -872,13 +867,13 @@ pub async fn discard( } pub async fn get_applicable_variants_helper( - experiments: &Vec, - experiment_groups: &Vec, + experiments: &[Experiment], + experiment_groups: &[ExperimentGroup], context: Map, dimensions_info: &HashMap, identifier: String, ) -> superposition::Result<(Vec, HashMap)> { - let context = Value::Object(evaluate_local_cohorts(&config.dimensions, &context)); + let context = evaluate_local_cohorts(dimensions_info, &context); let buckets = get_applicable_buckets_from_group(experiment_groups, &context, &identifier); @@ -918,7 +913,6 @@ async fn get_applicable_variants_handler( req_body: Option>, query_data: Option>, dimension_params: Option>, - workspace_context: WorkspaceContext, ) -> superposition::Result>, Json>>> { let (context, identifier) = match (req.method().clone(), query_data, dimension_params, req_body) { @@ -939,9 +933,12 @@ async fn get_applicable_variants_handler( return Err(bad_argument!("Invalid input for the method")); } }; + let dimensions_info = { + let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; + fetch_dimensions_info_map(&mut conn, &workspace_context.schema_name)? + }; let experiments = fetch_experiments(&state, &workspace_context).await?; let experiment_groups = fetch_experiment_groups(&state, &workspace_context).await?; - let (config, _) = fetch_cac_config(&state, &workspace_context).await?; let (applicable_variants, exps) = get_applicable_variants_helper( &experiments, @@ -979,12 +976,14 @@ async fn list_handler( pagination_params: superposition_query::Query, filters: superposition_query::Query, dimension_params: DimensionQuery, - schema_name: SchemaName, state: Data, ) -> superposition::Result { let max_event_timestamp = fetch_from_redis_else_writeback::>>( - format!("{}{EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX}", schema_name.0), - &schema_name, + format!( + "{}{EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX}", + *workspace_context.schema_name + ), + &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { @@ -992,7 +991,7 @@ async fn list_handler( event_log::event_log .filter(event_log::table_name.eq("experiments")) .select(diesel::dsl::max(event_log::timestamp)) - .schema_name(&schema_name) + .schema_name(&workspace_context.schema_name) .first(&mut conn) .map_err(|e| { log::error!("failed to fetch max timestamp from event_log: {e}"); @@ -1024,8 +1023,11 @@ async fn list_handler( if read_from_redis { let response = fetch_from_redis_else_writeback::>( - format!("{}{EXPERIMENTS_LIST_KEY_SUFFIX}", schema_name.0), - &schema_name, + format!( + "{}{EXPERIMENTS_LIST_KEY_SUFFIX}", + *workspace_context.schema_name + ), + &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { @@ -1034,8 +1036,8 @@ async fn list_handler( pagination_params.clone(), filters.clone(), dimension_params.clone(), - schema_name.clone(), conn, + &workspace_context, ) }, ) @@ -1047,8 +1049,8 @@ async fn list_handler( pagination_params, filters, dimension_params, - schema_name, conn, + &workspace_context, )?; Ok(HttpResponse::Ok().json(paginated_response)) } @@ -1058,8 +1060,8 @@ fn list_experiments_db( pagination_params: superposition_query::Query, filters: superposition_query::Query, dimension_params: DimensionQuery, - schema_name: SchemaName, mut conn: DBConnection, + workspace_context: &WorkspaceContext, ) -> superposition::Result> { let dimension_params = dimension_params.into_inner(); @@ -1388,7 +1390,7 @@ async fn ramp_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -1740,7 +1742,7 @@ async fn update_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -1803,7 +1805,7 @@ async fn pause_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { @@ -1902,7 +1904,7 @@ async fn resume_handler( if let Err(err) = put_experiments_in_redis( state.redis.clone(), &mut conn, - &workspace_request.schema_name, + &workspace_context.schema_name, ) .await { diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index ed417a0aa..a0eb547aa 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -14,12 +14,14 @@ use fred::{ types::Expiration, }; use serde_json::{Map, Value}; -use service_utils::service::types::{ - AppState, ExperimentationFlags, SchemaName, WorkspaceContext, +use service_utils::{ + helpers::get_from_env_or_default, + redis::EXPERIMENTS_LIST_KEY_SUFFIX, + service::types::{AppState, ExperimentationFlags, SchemaName, WorkspaceContext}, }; use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ - Condition, Config, DBConnection, Exp, Overrides, User, + Condition, Config, DBConnection, Exp, Overrides, PaginatedResponse, User, api::{ I64Update, config::{ConfigQuery, ResolveConfigQuery}, @@ -41,8 +43,7 @@ use superposition_types::{ }, schema::experiments::dsl as experiments, }, - result as superposition, Condition, Config, DBConnection, Exp, Overrides, - PaginatedResponse, User, + result as superposition, }; use crate::api::experiment_groups::helpers::{ @@ -442,6 +443,174 @@ pub async fn fetch_cac_config( } } +pub async fn fetch_experiments( + state: &Data, + workspace_request: &WorkspaceContext, +) -> superposition::Result> { + let http_client = reqwest::Client::new(); + let url = format!("{}/experiments", state.cac_host); + let headers_map = construct_header_map( + &workspace_request.workspace_id, + &workspace_request.organisation_id, + vec![], + )?; + + let response = http_client + .get(&url) + .headers(headers_map.into()) + .header( + header::AUTHORIZATION, + format!("Internal {}", state.superposition_token), + ) + .send() + .await; + + match response { + Ok(res) => { + let experiments = res + .json::>() + .await + .map(|experiments| { + experiments + .data + .iter() + .map(|experiment| Experiment { + id: experiment.id.parse::().unwrap_or_default(), + created_at: experiment.created_at, + created_by: experiment.created_by.clone(), + last_modified: experiment.last_modified, + name: experiment.name.clone(), + experiment_type: experiment.experiment_type, + override_keys: experiment.override_keys.clone(), + status: experiment.status, + traffic_percentage: experiment.traffic_percentage, + started_at: experiment.started_at, + started_by: experiment.started_by.clone(), + context: experiment.context.clone(), + variants: experiment.variants.clone(), + last_modified_by: experiment.last_modified_by.clone(), + chosen_variant: experiment.chosen_variant.clone(), + description: experiment.description.clone(), + change_reason: experiment.change_reason.clone(), + metrics: experiment.metrics.clone(), + experiment_group_id: experiment + .experiment_group_id + .as_ref() + .and_then(|id_str| id_str.parse::().ok()), + }) + .collect::>() + }) + .map_err(|err| { + log::error!( + "failed to parse experiments response with error: {}", + err + ); + unexpected_error!("Failed to parse experiments.") + })?; + Ok(experiments) + } + Err(error) => { + log::error!("Failed to fetch experiments with error: {:?}", error); + Err(unexpected_error!(error)) + } + } +} + +pub async fn fetch_experiment_groups( + state: &Data, + workspace_request: &WorkspaceContext, +) -> superposition::Result> { + let http_client = reqwest::Client::new(); + let url = format!("{}/experiment-groups", state.cac_host); + let headers_map = construct_header_map( + &workspace_request.workspace_id, + &workspace_request.organisation_id, + vec![], + )?; + + let response = http_client + .get(&url) + .headers(headers_map.into()) + .header( + header::AUTHORIZATION, + format!("Internal {}", state.superposition_token), + ) + .send() + .await; + + match response { + Ok(res) => { + let experiment_groups = res + .json::>() + .await + .map(|experiment_groups| experiment_groups.data) + .map_err(|err| { + log::error!( + "failed to parse experiment groups response with error: {}", + err + ); + unexpected_error!("Failed to parse experiment groups.") + })?; + Ok(experiment_groups) + } + Err(error) => { + log::error!("Failed to fetch experiment groups with error: {:?}", error); + Err(unexpected_error!(error)) + } + } +} + +pub async fn fetch_webhook_by_event( + state: &Data, + user: &User, + event: &WebhookEvent, + workspace_context: &WorkspaceContext, +) -> superposition::Result { + let http_client = reqwest::Client::new(); + let url = format!("{}/webhook/event/{event}", state.cac_host); + let user_str = serde_json::to_string(user).map_err(|err| { + log::error!("Something went wrong, failed to stringify user data {err}"); + unexpected_error!( + "Something went wrong, failed to stringify user data {}", + err + ) + })?; + + let headers_map = construct_header_map( + &workspace_context.workspace_id, + &workspace_context.organisation_id, + vec![("x-user", user_str)], + )?; + + let response = http_client + .get(&url) + .headers(headers_map.into()) + .header( + header::AUTHORIZATION, + format!("Internal {}", state.superposition_token), + ) + .send() + .await; + + match response { + Ok(res) => { + if res.status() == 404 { + log::info!("No Webhook found for event: {}", event); + return Ok(Webhook::default()); + } + let webhook = res.json::().await.map_err(|err| { + log::error!("failed to parse Webhook response with error: {}", err); + unexpected_error!("Failed to parse Webhook.") + })?; + Ok(webhook) + } + Err(error) => { + log::error!("Failed to fetch Webhook with error: {:?}", error); + Err(unexpected_error!(error)) + } + } +} + pub fn handle_experiment_group_membership( experiment: &Experiment, new_group_id: &Option, diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index 2bfb8b12e..fac01265c 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -1,9 +1,15 @@ -use fred::{prelude::{KeysInterface, RedisClient, RedisPool}, types::Expiration}; -use serde::{de::DeserializeOwned, Serialize}; +use fred::{ + prelude::{KeysInterface, RedisClient, RedisPool}, + types::Expiration, +}; +use serde::{Serialize, de::DeserializeOwned}; use superposition_macros::unexpected_error; use superposition_types::result as superposition; -use crate::{db::PgSchemaConnectionPool, helpers::get_from_env_or_default, service::types::SchemaName}; +use crate::{ + db::PgSchemaConnectionPool, helpers::get_from_env_or_default, + service::types::SchemaName, +}; pub const LAST_MODIFIED_KEY_SUFFIX: &str = "::cac_config::last_modified_at"; pub const AUDIT_ID_KEY_SUFFIX: &str = "::cac_config::audit_id"; diff --git a/crates/service_utils/src/service.rs b/crates/service_utils/src/service.rs index d1b54de0c..cd18545c5 100644 --- a/crates/service_utils/src/service.rs +++ b/crates/service_utils/src/service.rs @@ -18,7 +18,9 @@ pub fn get_db_connection( } Err(e) => { log::info!("Unable to get db connection from pool, error: {e}"); - Err(unexpected_error!("Could not get a DB connection, contact an admin and check logs for further information")) + Err(unexpected_error!( + "Could not get a DB connection, contact an admin and check logs for further information" + )) } } } diff --git a/crates/service_utils/src/service/types.rs b/crates/service_utils/src/service/types.rs index 17c58bb7f..6edca986b 100644 --- a/crates/service_utils/src/service/types.rs +++ b/crates/service_utils/src/service/types.rs @@ -8,8 +8,8 @@ use std::{ use actix_web::{Error, FromRequest, HttpMessage, error, web::Data}; use derive_more::{Deref, DerefMut}; -use diesel::r2d2::{ConnectionManager, PooledConnection}; use diesel::PgConnection; +use diesel::r2d2::{ConnectionManager, PooledConnection}; use jsonschema::JSONSchema; use secrecy::SecretString; use snowflake::SnowflakeIdGenerator; diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index 6f892fc37..a4a1afce4 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -1,7 +1,6 @@ use actix_web::{ - routes, + HttpRequest, HttpResponse, Scope, routes, web::{Data, Header, Json}, - HttpRequest, HttpResponse, Scope, }; use chrono::{DateTime, Utc}; use context_aware_config::api::config::fetch_audit_id; @@ -14,9 +13,9 @@ use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl}; use serde_json::{Map, Value}; use service_utils::{ redis::{ - fetch_from_redis_else_writeback, AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, - CONFIG_VERSION_KEY_SUFFIX, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, - LAST_MODIFIED_KEY_SUFFIX, + AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, + EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, + fetch_from_redis_else_writeback, }, service::{ get_db_connection, @@ -25,12 +24,13 @@ use service_utils::{ }; use std::collections::{HashMap, HashSet}; use superposition_core::experiment::{ - get_applicable_buckets_from_group, get_applicable_variants_from_group_response, - FfiExperiment, FfiExperimentGroup, + FfiExperiment, FfiExperimentGroup, get_applicable_buckets_from_group, + get_applicable_variants_from_group_response, }; use superposition_derives::authorized; use superposition_macros::{db_error, not_found, unexpected_error}; use superposition_types::{ + Config, PaginatedResponse, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, custom_query::{self as superposition_query, CustomQuery, DimensionQuery, QueryMap}, database::{ @@ -38,7 +38,7 @@ use superposition_types::{ schema::{experiment_groups::dsl as experiment_groups, experiments::dsl}, }, logic::evaluate_local_cohorts, - result as superposition, Config, PaginatedResponse, + result as superposition, }; use super::types::IdentifierQuery; @@ -100,7 +100,7 @@ async fn resolve_with_exp_handler( ) .await .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; - + let mut config = fetch_from_redis_else_writeback::( format!("{}::{}{CONFIG_KEY_SUFFIX}", *schema_name, config_version), &schema_name, @@ -159,8 +159,7 @@ async fn resolve_with_exp_handler( .map(FfiExperimentGroup::from) .collect(); - let context = - Value::Object(evaluate_local_cohorts(&config.dimensions, context_map)); + let context = evaluate_local_cohorts(&config.dimensions, context_map); let buckets = get_applicable_buckets_from_group( &ffi_experiment_groups, @@ -213,6 +212,7 @@ async fn resolve_with_exp_handler( &mut conn, &query_filters, &workspace_context, + &state.master_encryption_key, )? }; diff --git a/crates/superposition/src/resolve/types.rs b/crates/superposition/src/resolve/types.rs index edb14886f..a9bfcf24f 100644 --- a/crates/superposition/src/resolve/types.rs +++ b/crates/superposition/src/resolve/types.rs @@ -2,7 +2,7 @@ use serde::Deserialize; use superposition_derives::{IsEmpty, QueryParam}; use superposition_types::{IsEmpty, custom_query::QueryParam}; -/// Query param for targeting key or the identifier to be used in experiments. +/// Query param for targeting key or the identifier to be used in experiments. /// Also known as toss #[derive(Deserialize, IsEmpty, QueryParam, Default)] pub struct IdentifierQuery { From 846f54fffd6dbf8192ed30bc8eb42cb5c9a10671 Mon Sep 17 00:00:00 2001 From: datron Date: Tue, 10 Feb 2026 12:52:33 +0530 Subject: [PATCH 06/22] fix: improve config version fetches --- .../src/api/config/handlers.rs | 66 +++++--------- .../src/api/config/helpers.rs | 90 ++++++++----------- crates/superposition/src/resolve/handlers.rs | 19 +--- crates/superposition_sdk/src/json_errors.rs | 13 +-- 4 files changed, 72 insertions(+), 116 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index a2ddabfb7..adcb71f7d 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -12,8 +12,8 @@ use serde_json::{Map, Value, json}; use service_utils::{ helpers::fetch_dimensions_info_map, redis::{ - AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, - LAST_MODIFIED_KEY_SUFFIX, fetch_from_redis_else_writeback, + AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, + fetch_from_redis_else_writeback, }, service::{ get_db_connection, @@ -608,15 +608,15 @@ async fn get_handler( ) -> superposition::Result { let mut response = HttpResponse::Ok(); let is_smithy = req.method() != actix_web::http::Method::GET; - let schema_name = workspace_context.schema_name.clone(); + let schema_name = &workspace_context.schema_name; let max_created_at = fetch_from_redis_else_writeback::>( - format!("{}{LAST_MODIFIED_KEY_SUFFIX}", schema_name.0), - &schema_name, + format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name), + schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_max_created_at(&mut conn, &schema_name).map_err(|e| { + get_max_created_at(&mut conn, schema_name).map_err(|e| { log::error!("failed to fetch max timestamp from event_log: {e}"); db_error!(e) }) @@ -625,7 +625,7 @@ async fn get_handler( .await .ok(); - log::info!("Max created at: {max_created_at:?}"); + log::trace!("Max created at: {max_created_at:?}"); let is_not_modified = is_not_modified(max_created_at, &req); @@ -634,22 +634,12 @@ async fn get_handler( } let query_filters = query_filters.into_inner(); - let version = fetch_from_redis_else_writeback::( - format!("{}{CONFIG_VERSION_KEY_SUFFIX}", schema_name.0), - &schema_name, - state.redis.clone(), - state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_config_version(&query_filters.version, &workspace_context, &mut conn) - }, - ) - .await - .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; + let version = + get_config_version(&query_filters.version, &workspace_context, &state).await?; let mut config = fetch_from_redis_else_writeback::( - format!("{}::{}{CONFIG_KEY_SUFFIX}", schema_name.0, version,), - &schema_name, + format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, version), + schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { @@ -674,8 +664,8 @@ async fn get_handler( } add_last_modified_to_header(max_created_at, is_smithy, &mut response); if let Ok(audit_id) = fetch_from_redis_else_writeback::( - format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), - &schema_name, + format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), + schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { @@ -742,16 +732,16 @@ async fn resolve_handler( state: Data, ) -> superposition::Result { let query_filters = query_filters.into_inner(); - let schema_name = workspace_context.schema_name.clone(); + let schema_name = &workspace_context.schema_name; let max_created_at = fetch_from_redis_else_writeback::>( - format!("{}{LAST_MODIFIED_KEY_SUFFIX}", schema_name.0), - &schema_name, + format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name), + schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_max_created_at(&mut conn, &schema_name).map_err(|e| { + get_max_created_at(&mut conn, schema_name).map_err(|e| { log::error!("failed to fetch max timestamp from event_log: {e}"); db_error!(e) }) @@ -764,22 +754,12 @@ async fn resolve_handler( return Ok(HttpResponse::NotModified().finish()); } - let config_version = fetch_from_redis_else_writeback::( - format!("{}{CONFIG_VERSION_KEY_SUFFIX}", schema_name.0), - &schema_name, - state.redis.clone(), - state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_config_version(&query_filters.version, &workspace_context, &mut conn) - }, - ) - .await - .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; + let config_version = + get_config_version(&query_filters.version, &workspace_context, &state).await?; let mut config = fetch_from_redis_else_writeback::( - format!("{}::{}{CONFIG_KEY_SUFFIX}", schema_name.0, config_version,), - &schema_name, + format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, config_version,), + schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { @@ -812,8 +792,8 @@ async fn resolve_handler( let mut resp = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut resp); if let Ok(audit_id) = fetch_from_redis_else_writeback::( - format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), - &schema_name, + format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), + schema_name, state.redis.clone(), state.db_pool.clone(), |db_pool| { diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index a08120072..808c5e4b0 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -1,23 +1,29 @@ use actix_http::header::HeaderValue; use actix_web::{ HttpRequest, HttpResponseBuilder, - web::{Header, Json}, + web::{Data, Header, Json}, }; use cac_client::{eval_cac, eval_cac_with_reasoning}; use chrono::{DateTime, Timelike, Utc}; -use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl, dsl::max}; +use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, dsl::max}; use serde_json::{Map, Value}; -use service_utils::service::types::{ - AppHeader, EncryptionKey, SchemaName, WorkspaceContext, +use service_utils::{ + redis::{CONFIG_VERSION_KEY_SUFFIX, fetch_from_redis_else_writeback}, + service::{ + get_db_connection, + types::{ + AppHeader, AppState, DbConnection, EncryptionKey, SchemaName, + WorkspaceContext, + }, + }, }; use superposition_macros::{bad_argument, db_error, unexpected_error}; use superposition_types::{ Config, DBConnection, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, custom_query::{CommaSeparatedStringQParams, DimensionQuery, QueryMap}, - database::{ - schema::{config_versions::dsl as config_versions, event_log::dsl as event_log}, - superposition_schema::superposition::workspaces, + database::schema::{ + config_versions::dsl as config_versions, event_log::dsl as event_log, }, result as superposition, }; @@ -36,10 +42,10 @@ pub fn apply_prefix_filter_to_config( Ok(config) } -pub fn get_config_version( +pub async fn get_config_version( version: &Option, workspace_context: &WorkspaceContext, - conn: &mut DBConnection, + state: &Data, ) -> superposition::Result { match version.as_ref() { Some(v) if *v != *"latest" => v.parse::().map_or_else( @@ -49,57 +55,35 @@ pub fn get_config_version( }, Ok, ), - _ => match get_config_version_from_workspace(workspace_context, conn) { + _ => match workspace_context.settings.config_version { Some(v) => Ok(v), - None => get_config_version_from_versions_table( - conn, + None => fetch_from_redis_else_writeback::( + format!( + "{}{CONFIG_VERSION_KEY_SUFFIX}", + *workspace_context.schema_name + ), &workspace_context.schema_name, + state.redis.clone(), + state.db_pool.clone(), + |db_pool| { + let DbConnection(mut conn) = get_db_connection(db_pool)?; + config_versions::config_versions + .select(config_versions::id) + .order_by(config_versions::created_at.desc()) + .schema_name(&workspace_context.schema_name) + .first::(&mut conn) + .map_err(|e| { + log::error!("failed to fetch config version from db: {}", e); + db_error!(e) + }) + }, ) - .map_err(|e| { - log::error!("failed to fetch latest config version from db: {e}"); - db_error!(e) - }), + .await + .map_err(|e| unexpected_error!("Config version not found due to: {}", e)), }, } } -fn get_config_version_from_workspace( - workspace_context: &WorkspaceContext, - conn: &mut DBConnection, -) -> Option { - match workspaces::dsl::workspaces - .select(workspaces::config_version) - .filter( - workspaces::organisation_id - .eq(&workspace_context.organisation_id.0) - .and(workspaces::workspace_name.eq(&workspace_context.workspace_id.0)), - ) - .get_result::>(conn) - { - Ok(version) => version, - Err(e) => { - log::error!( - "Failed to get config_version for org_id: {}, workspace_name: {} — {:?}", - workspace_context.organisation_id.0, - workspace_context.workspace_id.0, - e - ); - None - } - } -} - -fn get_config_version_from_versions_table( - conn: &mut DBConnection, - schema_name: &SchemaName, -) -> Result { - config_versions::config_versions - .select(config_versions::id) - .order_by(config_versions::created_at.desc()) - .schema_name(schema_name) - .first::(conn) -} - pub fn add_audit_id_to_header( conn: &mut DBConnection, resp_builder: &mut HttpResponseBuilder, diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index a4a1afce4..1dc4c4dfa 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -13,9 +13,8 @@ use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl}; use serde_json::{Map, Value}; use service_utils::{ redis::{ - AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, - EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, - fetch_from_redis_else_writeback, + AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, + LAST_MODIFIED_KEY_SUFFIX, fetch_from_redis_else_writeback, }, service::{ get_db_connection, @@ -88,18 +87,8 @@ async fn resolve_with_exp_handler( let (is_smithy, mut query_data) = setup_query_data(&req, &body, &dimension_params)?; - let config_version = fetch_from_redis_else_writeback::( - format!("{}{CONFIG_VERSION_KEY_SUFFIX}", *schema_name), - &schema_name, - state.redis.clone(), - state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_config_version(&query_filters.version, &workspace_context, &mut conn) - }, - ) - .await - .map_err(|e| unexpected_error!("Config version not found due to: {}", e))?; + let config_version = + get_config_version(&query_filters.version, &workspace_context, &state).await?; let mut config = fetch_from_redis_else_writeback::( format!("{}::{}{CONFIG_KEY_SUFFIX}", *schema_name, config_version), diff --git a/crates/superposition_sdk/src/json_errors.rs b/crates/superposition_sdk/src/json_errors.rs index 6ddf43d7f..4f1e5d482 100644 --- a/crates/superposition_sdk/src/json_errors.rs +++ b/crates/superposition_sdk/src/json_errors.rs @@ -97,7 +97,9 @@ pub fn parse_error_metadata( #[cfg(test)] mod test { - use crate::json_errors::{parse_error_body, parse_error_metadata, sanitize_error_code}; + use crate::json_errors::{ + parse_error_body, parse_error_metadata, sanitize_error_code, + }; use aws_smithy_runtime_api::client::orchestrator::HttpResponse; use aws_smithy_types::{body::SdkBody, error::ErrorMetadata}; use std::borrow::Cow; @@ -147,9 +149,11 @@ mod test { fn ignore_unrecognized_fields() { assert_eq!( Some(Cow::Borrowed("FooError")), - parse_error_body(br#"{ "__type": "FooError", "asdf": 5, "fdsa": {}, "foo": "1" }"#) - .unwrap() - .code + parse_error_body( + br#"{ "__type": "FooError", "asdf": 5, "fdsa": {}, "foo": "1" }"# + ) + .unwrap() + .code ); } @@ -209,4 +213,3 @@ mod test { ); } } - From dcae59b267d1a6805ada1e412da7166a01f2f8ae Mon Sep 17 00:00:00 2001 From: datron Date: Tue, 10 Feb 2026 13:24:39 +0530 Subject: [PATCH 07/22] fix: remove duplicate functions --- .../src/api/config/handlers.rs | 117 ++---------------- 1 file changed, 8 insertions(+), 109 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index adcb71f7d..190e5b5d4 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -1,12 +1,11 @@ use std::collections::HashMap; -use actix_http::header::HeaderValue; use actix_web::{ - HttpRequest, HttpResponse, HttpResponseBuilder, Scope, get, put, routes, + HttpRequest, HttpResponse, Scope, get, put, routes, web::{Data, Header, Json, Path, Query}, }; -use chrono::{DateTime, Timelike, Utc}; -use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper, dsl::max}; +use chrono::{DateTime, Utc}; +use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; use itertools::Itertools; use serde_json::{Map, Value, json}; use service_utils::{ @@ -56,7 +55,11 @@ use crate::{ }, helpers::{generate_cac, generate_detailed_cac, get_config_from_redis}, use crate::api::{ - config::helpers::get_config_version, + config::helpers::{ + add_config_version_to_header, add_last_modified_to_header, + generate_config_from_version, get_config_version, get_max_created_at, + is_not_modified, + }, context::{self, helpers::query_description}, }; use crate::helpers::{calculate_context_weight, generate_cac}; @@ -88,110 +91,6 @@ pub fn fetch_audit_id( .ok() } -fn add_last_modified_to_header( - max_created_at: Option>, - is_smithy: bool, - resp_builder: &mut HttpResponseBuilder, -) { - if let Some(date) = max_created_at { - let value = if is_smithy { - // Smithy needs to be in this format otherwise they can't - // deserialize it. - HeaderValue::from_str(date.to_rfc3339().as_str()) - } else { - HeaderValue::from_str(date.to_rfc2822().as_str()) - }; - if let Ok(header_value) = value { - resp_builder - .insert_header((AppHeader::LastModified.to_string(), header_value)); - } else { - log::error!("failed parsing datetime_utc {:?}", value); - } - } -} - -fn add_config_version_to_header( - config_version: &Option, - resp_builder: &mut HttpResponseBuilder, -) { - if let Some(val) = config_version { - resp_builder.insert_header(( - AppHeader::XConfigVersion.to_string(), - val.clone().to_string(), - )); - } -} - -fn get_max_created_at( - conn: &mut DBConnection, - schema_name: &SchemaName, -) -> Result, diesel::result::Error> { - config_versions::config_versions - .select(max(config_versions::created_at)) - .schema_name(schema_name) - .first::>>(conn) - .and_then(|res| res.ok_or(diesel::result::Error::NotFound)) -} - -fn is_not_modified(max_created_at: Option>, req: &HttpRequest) -> bool { - let nanosecond_erasure = |t: DateTime| t.with_nanosecond(0); - let last_modified = req - .headers() - .get("If-Modified-Since") - .and_then(|header_val| { - let header_str = header_val.to_str().ok()?; - DateTime::parse_from_rfc2822(header_str) - .map(|datetime| datetime.with_timezone(&Utc)) - .ok() - }) - .and_then(nanosecond_erasure); - log::info!("last modified {last_modified:?}"); - let parsed_max: Option> = max_created_at.and_then(nanosecond_erasure); - max_created_at.is_some() && parsed_max <= last_modified -} - -pub fn generate_config_from_version( - version: &mut Option, - conn: &mut DBConnection, - schema_name: &SchemaName, -) -> superposition::Result { - if let Some(val) = version { - let config = config_versions::config_versions - .select(config_versions::config) - .filter(config_versions::id.eq(*val)) - .schema_name(schema_name) - .get_result::(conn) - .map_err(|err| { - log::error!("failed to fetch config with error: {}", err); - db_error!(err) - })?; - serde_json::from_value::(config).map_err(|err| { - log::error!("failed to decode config: {}", err); - unexpected_error!("failed to decode config") - }) - } else { - match config_versions::config_versions - .select((config_versions::id, config_versions::config)) - .order(config_versions::created_at.desc()) - .schema_name(schema_name) - .first::<(i64, Value)>(conn) - { - Ok((latest_version, config)) => { - *version = Some(latest_version); - serde_json::from_value::(config).or_else(|err| { - log::error!("failed to decode config: {}", err); - generate_cac(conn, schema_name) - }) - } - Err(err) => { - log::error!("failed to find latest config: {err}"); - generate_cac(conn, schema_name) - } - } - } ->>>>>>> 269cf29d (feat: introduce writeback methods for redis) -} - fn generate_subsets(map: &Map) -> Vec> { let mut subsets = Vec::new(); let keys: Vec = map.keys().cloned().collect_vec(); From 1e173bc55d9947b3cf0ead9635f77580f75c5e1d Mon Sep 17 00:00:00 2001 From: datron Date: Tue, 10 Feb 2026 16:19:38 +0530 Subject: [PATCH 08/22] fix: update redis caching for experiment APIs --- .../src/api/experiment_groups/handlers.rs | 48 +++---- .../src/api/experiments/handlers.rs | 77 +++++++----- .../src/api/experiments/helpers.rs | 119 +----------------- crates/service_utils/src/redis.rs | 38 +++--- crates/superposition_sdk/src/json_errors.rs | 13 +- 5 files changed, 92 insertions(+), 203 deletions(-) diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index f42d52198..92742d00b 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -19,7 +19,7 @@ use service_utils::{ use superposition_derives::authorized; use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ - PaginatedResponse, SortBy, User, + IsEmpty, PaginatedResponse, SortBy, User, api::experiment_groups::{ ExpGroupCreateRequest, ExpGroupFilters, ExpGroupMemberRequest, ExpGroupUpdateRequest, SortOn, @@ -332,30 +332,36 @@ async fn list_handler( ) -> superposition::Result>> { let key = format!( "{}{}", - workspace_context.schema_name.0, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX + *workspace_context.schema_name, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX ); - fetch_from_redis_else_writeback::>( - key, - &workspace_context.schema_name, - state.redis.clone(), - state.db_pool.clone(), - |db_pool| { - let db_conn = get_db_connection(db_pool)?; - list_experiment_groups_db( - pagination_params, - filters, - db_conn, - &workspace_context, - ) - }, - ) - .await - .map(Json) - .map_err(|e| unexpected_error!(e)) + let read_from_redis = pagination_params.all.is_some_and(|e| e) && filters.is_empty(); + let list_experiments_closure = |db_pool| { + let db_conn = get_db_connection(db_pool)?; + list_experiment_groups_db( + &pagination_params, + filters, + db_conn, + &workspace_context, + ) + }; + if read_from_redis { + fetch_from_redis_else_writeback::>( + key, + &workspace_context.schema_name, + state.redis.clone(), + state.db_pool.clone(), + list_experiments_closure, + ) + .await + .map(Json) + .map_err(|e| unexpected_error!(e)) + } else { + list_experiments_closure(state.db_pool.clone()).map(Json) + } } fn list_experiment_groups_db( - pagination_params: superposition_query::Query, + pagination_params: &superposition_query::Query, filters: superposition_query::Query, db_conn: DbConnection, workspace_context: &WorkspaceContext, diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index 01220242f..0545293ac 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -13,8 +13,8 @@ use actix_web::{ }; use chrono::{DateTime, Utc}; use diesel::{ - Connection, ExpressionMethods, PgConnection, QueryDsl, RunQueryDsl, SelectableHelper, - TextExpressionMethods, + BoolExpressionMethods, Connection, ExpressionMethods, PgConnection, QueryDsl, + RunQueryDsl, SelectableHelper, TextExpressionMethods, r2d2::{ConnectionManager, PooledConnection}, }; use experimentation_client::{ @@ -70,7 +70,10 @@ use superposition_types::{ }, others::WebhookEvent, }, - schema::{event_log::dsl as event_log, experiments::dsl as experiments}, + schema::{ + event_log::dsl as event_log, experiment_groups::dsl as experiment_groups, + experiments::dsl as experiments, + }, }, logic::{evaluate_local_cohorts, evaluate_local_cohorts_skip_unresolved}, result as superposition, @@ -83,9 +86,9 @@ use crate::api::{ }, experiments::{ helpers::{ - fetch_and_validate_change_reason_with_function, fetch_experiment_groups, - fetch_experiments, fetch_webhook_by_event, put_experiments_in_redis, - validate_control_overrides, validate_delete_experiment_variants, + fetch_and_validate_change_reason_with_function, fetch_webhook_by_event, + put_experiments_in_redis, validate_control_overrides, + validate_delete_experiment_variants, }, types::StartedByChangeSet, }, @@ -867,32 +870,41 @@ pub async fn discard( } pub async fn get_applicable_variants_helper( - experiments: &[Experiment], - experiment_groups: &[ExperimentGroup], + db_conn: &mut PooledConnection>, context: Map, dimensions_info: &HashMap, identifier: String, + workspace_context: &WorkspaceContext, ) -> superposition::Result<(Vec, HashMap)> { + use superposition_types::database::schema::experiments::dsl; + + let experiment_groups = experiment_groups::experiment_groups + .schema_name(&workspace_context.schema_name) + .load::(db_conn)?; + let context = evaluate_local_cohorts(dimensions_info, &context); let buckets = - get_applicable_buckets_from_group(experiment_groups, &context, &identifier); + get_applicable_buckets_from_group(&experiment_groups, &context, &identifier); let exp_ids = buckets .iter() .filter_map(|(_, bucket)| bucket.experiment_id.parse::().ok()) .collect::>(); - let exps = experiments - .iter() - .filter_map(|exp| { - if exp_ids.contains(&exp.id) { - let exp_response = ExperimentResponse::from(exp.clone()); - let id = exp_response.id.clone(); - Some((id, exp_response)) - } else { - None - } + let exps = dsl::experiments + .filter( + dsl::id + .eq_any(exp_ids) + .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), + ) + .schema_name(&workspace_context.schema_name) + .load::(db_conn)? + .into_iter() + .map(|exp| { + let exp_response = ExperimentResponse::from(exp); + let id = exp_response.id.clone(); + (id, exp_response) }) .collect::>(); @@ -933,21 +945,19 @@ async fn get_applicable_variants_handler( return Err(bad_argument!("Invalid input for the method")); } }; - let dimensions_info = { + let (applicable_variants, exps) = { let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; - fetch_dimensions_info_map(&mut conn, &workspace_context.schema_name)? + let di = fetch_dimensions_info_map(&mut conn, &workspace_context.schema_name)?; + let (av, e) = get_applicable_variants_helper( + &mut conn, + context, + &di, + identifier, + &workspace_context, + ) + .await?; + (av, e) }; - let experiments = fetch_experiments(&state, &workspace_context).await?; - let experiment_groups = fetch_experiment_groups(&state, &workspace_context).await?; - - let (applicable_variants, exps) = get_applicable_variants_helper( - &experiments, - &experiment_groups, - context, - &dimensions_info, - identifier, - ) - .await?; let variants = exps .into_iter() @@ -1019,7 +1029,8 @@ async fn list_handler( && filters .status .clone() - .is_some_and(|v| *v == ExperimentStatusType::active_list()); + .is_some_and(|v| *v == ExperimentStatusType::active_list()) + && dimension_params.is_empty(); if read_from_redis { let response = fetch_from_redis_else_writeback::>( diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index a0eb547aa..eeb797fab 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -37,7 +37,7 @@ use superposition_types::{ models::{ ChangeReason, experimentation::{ - Experiment, ExperimentGroup, ExperimentStatusType, GroupType, Variant, + Experiment, ExperimentStatusType, GroupType, Variant, VariantType, }, }, @@ -443,123 +443,6 @@ pub async fn fetch_cac_config( } } -pub async fn fetch_experiments( - state: &Data, - workspace_request: &WorkspaceContext, -) -> superposition::Result> { - let http_client = reqwest::Client::new(); - let url = format!("{}/experiments", state.cac_host); - let headers_map = construct_header_map( - &workspace_request.workspace_id, - &workspace_request.organisation_id, - vec![], - )?; - - let response = http_client - .get(&url) - .headers(headers_map.into()) - .header( - header::AUTHORIZATION, - format!("Internal {}", state.superposition_token), - ) - .send() - .await; - - match response { - Ok(res) => { - let experiments = res - .json::>() - .await - .map(|experiments| { - experiments - .data - .iter() - .map(|experiment| Experiment { - id: experiment.id.parse::().unwrap_or_default(), - created_at: experiment.created_at, - created_by: experiment.created_by.clone(), - last_modified: experiment.last_modified, - name: experiment.name.clone(), - experiment_type: experiment.experiment_type, - override_keys: experiment.override_keys.clone(), - status: experiment.status, - traffic_percentage: experiment.traffic_percentage, - started_at: experiment.started_at, - started_by: experiment.started_by.clone(), - context: experiment.context.clone(), - variants: experiment.variants.clone(), - last_modified_by: experiment.last_modified_by.clone(), - chosen_variant: experiment.chosen_variant.clone(), - description: experiment.description.clone(), - change_reason: experiment.change_reason.clone(), - metrics: experiment.metrics.clone(), - experiment_group_id: experiment - .experiment_group_id - .as_ref() - .and_then(|id_str| id_str.parse::().ok()), - }) - .collect::>() - }) - .map_err(|err| { - log::error!( - "failed to parse experiments response with error: {}", - err - ); - unexpected_error!("Failed to parse experiments.") - })?; - Ok(experiments) - } - Err(error) => { - log::error!("Failed to fetch experiments with error: {:?}", error); - Err(unexpected_error!(error)) - } - } -} - -pub async fn fetch_experiment_groups( - state: &Data, - workspace_request: &WorkspaceContext, -) -> superposition::Result> { - let http_client = reqwest::Client::new(); - let url = format!("{}/experiment-groups", state.cac_host); - let headers_map = construct_header_map( - &workspace_request.workspace_id, - &workspace_request.organisation_id, - vec![], - )?; - - let response = http_client - .get(&url) - .headers(headers_map.into()) - .header( - header::AUTHORIZATION, - format!("Internal {}", state.superposition_token), - ) - .send() - .await; - - match response { - Ok(res) => { - let experiment_groups = res - .json::>() - .await - .map(|experiment_groups| experiment_groups.data) - .map_err(|err| { - log::error!( - "failed to parse experiment groups response with error: {}", - err - ); - unexpected_error!("Failed to parse experiment groups.") - })?; - Ok(experiment_groups) - } - Err(error) => { - log::error!("Failed to fetch experiment groups with error: {:?}", error); - Err(unexpected_error!(error)) - } - } -} - pub async fn fetch_webhook_by_event( state: &Data, user: &User, diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index fac01265c..406030a87 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -3,7 +3,6 @@ use fred::{ types::Expiration, }; use serde::{Serialize, de::DeserializeOwned}; -use superposition_macros::unexpected_error; use superposition_types::result as superposition; use crate::{ @@ -33,13 +32,10 @@ pub async fn fetch_from_redis_else_writeback( where T: Serialize + DeserializeOwned, { - if redis_pool.is_none() { + let Some(pool) = redis_pool else { log::trace!("Redis pool not configured, using fallback"); return database_call(db_pool); - } - let pool = redis_pool.ok_or(unexpected_error!( - "Could not access redis pool, this message should never be seen", - ))?; + }; let client = pool.next_connected(); match get_data_from_redis(key.clone(), client).await { Ok(data) => Ok(data), @@ -51,25 +47,21 @@ where ); let data = database_call(db_pool); if let Ok(ref value) = data { - let serialized = serde_json::to_string(value).map_err(|e| { + // If the write to redis fails, do not fail the whole request, just pass the data along + if let Ok(serialized) = serde_json::to_string(value).map_err(|e| { log::error!("Failed to serialize data for redis writeback: {}", e); - unexpected_error!( - "Failed to serialize data for redis writeback due to: {}", - e - ) - })?; - let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); - let expiration = Some(Expiration::EX(key_ttl)); - client - .set::<(), String, String>(key, serialized, expiration, None, false) - .await - .map_err(|e| { - log::error!("Failed to write back data to redis: {}", e); - unexpected_error!( - "Failed to write back data to redis due to: {}", - e + }) { + let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); + let expiration = Some(Expiration::EX(key_ttl)); + let _ = client + .set::<(), String, String>( + key, serialized, expiration, None, false, ) - })?; + .await + .map_err(|e| { + log::error!("Failed to write back data to redis: {}", e); + }); + } } data } diff --git a/crates/superposition_sdk/src/json_errors.rs b/crates/superposition_sdk/src/json_errors.rs index 4f1e5d482..6ddf43d7f 100644 --- a/crates/superposition_sdk/src/json_errors.rs +++ b/crates/superposition_sdk/src/json_errors.rs @@ -97,9 +97,7 @@ pub fn parse_error_metadata( #[cfg(test)] mod test { - use crate::json_errors::{ - parse_error_body, parse_error_metadata, sanitize_error_code, - }; + use crate::json_errors::{parse_error_body, parse_error_metadata, sanitize_error_code}; use aws_smithy_runtime_api::client::orchestrator::HttpResponse; use aws_smithy_types::{body::SdkBody, error::ErrorMetadata}; use std::borrow::Cow; @@ -149,11 +147,9 @@ mod test { fn ignore_unrecognized_fields() { assert_eq!( Some(Cow::Borrowed("FooError")), - parse_error_body( - br#"{ "__type": "FooError", "asdf": 5, "fdsa": {}, "foo": "1" }"# - ) - .unwrap() - .code + parse_error_body(br#"{ "__type": "FooError", "asdf": 5, "fdsa": {}, "foo": "1" }"#) + .unwrap() + .code ); } @@ -213,3 +209,4 @@ mod test { ); } } + From b64b3b3fcc5412f0d08dd1c1216ff1c2a0718fcb Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 12 Feb 2026 16:06:18 +0530 Subject: [PATCH 09/22] feat: add workspace settings caching through redis in middleware --- .../src/middlewares/workspace_context.rs | 27 ++++++---- .../superposition/src/workspace/handlers.rs | 51 ++++++++++++++++++- 2 files changed, 67 insertions(+), 11 deletions(-) diff --git a/crates/service_utils/src/middlewares/workspace_context.rs b/crates/service_utils/src/middlewares/workspace_context.rs index d1c76af19..c903c2b22 100644 --- a/crates/service_utils/src/middlewares/workspace_context.rs +++ b/crates/service_utils/src/middlewares/workspace_context.rs @@ -10,9 +10,12 @@ use actix_web::{ }; use futures_util::future::LocalBoxFuture; use regex::Regex; -use superposition_macros::{bad_argument, unexpected_error}; +use superposition_macros::bad_argument; +use superposition_types::database::models::Workspace; use crate::helpers::get_workspace; +use crate::redis::fetch_from_redis_else_writeback; +use crate::service::get_db_connection; use crate::{ extensions::HttpRequestExt, service::types::{AppState, OrganisationId, SchemaName, WorkspaceContext}, @@ -136,15 +139,19 @@ where } (true, Some(workspace_id)) => { let schema = format!("{}_{}", *organisation, *workspace_id); - let schema_name = SchemaName(schema); - let workspace_settings = { - let mut db_conn = app_state - .db_pool - .get() - .map_err(|err| unexpected_error!("{}", err))?; - - get_workspace(&schema_name, &mut db_conn)? - }; + let schema_name = SchemaName(schema.clone()); + let workspace_settings = + fetch_from_redis_else_writeback::( + schema, + &schema_name, + app_state.redis.clone(), + app_state.db_pool.clone(), + |db_pool| { + let mut db_conn = get_db_connection(db_pool)?; + get_workspace(&schema_name, &mut db_conn) + }, + ) + .await?; req.extensions_mut().insert(workspace_id.clone()); req.extensions_mut().insert(WorkspaceContext { diff --git a/crates/superposition/src/workspace/handlers.rs b/crates/superposition/src/workspace/handlers.rs index d28447bfb..623382bcf 100644 --- a/crates/superposition/src/workspace/handlers.rs +++ b/crates/superposition/src/workspace/handlers.rs @@ -11,7 +11,9 @@ use diesel::{ connection::SimpleConnection, r2d2::{ConnectionManager, PooledConnection}, }; +use fred::{prelude::KeysInterface, types::Expiration}; use regex::Regex; +use serde::Serialize; use service_utils::{ encryption::{ encrypt_workspace_key, generate_encryption_key, @@ -163,6 +165,10 @@ async fn create_handler( setup_workspace_schema(transaction_conn, &workspace_schema_name)?; Ok(inserted_workspace.remove(0)) })?; + + put_workspace_in_redis(created_workspace.clone(), &state, &workspace_schema_name) + .await; + let response = WorkspaceResponse::from(created_workspace); Ok(Json(response)) } @@ -174,6 +180,7 @@ async fn create_handler( async fn update_handler( workspace_name: web::Path, request: Json, + app_state: web::Data, db_conn: DbConnection, org_id: OrganisationId, user: User, @@ -211,10 +218,44 @@ async fn update_handler( Ok(updated_workspace) })?; + + put_workspace_in_redis(updated_workspace.clone(), &app_state, &schema_name.0).await; + let response = WorkspaceResponse::from(updated_workspace); Ok(Json(response)) } +async fn put_workspace_in_redis( + workspace: T, + state: &Data, + schema_name: &str, +) where + T: Serialize, +{ + let redis_pool = match &state.redis { + Some(pool) => pool, + None => { + log::debug!("Redis not configured, skipping workspace cache update"); + return; + } + }; + + let key_ttl: i64 = + service_utils::helpers::get_from_env_or_default("REDIS_KEY_TTL", 604800); + let expiration = Some(Expiration::EX(key_ttl)); + + if let Ok(serialized) = serde_json::to_string(&workspace) { + let client = redis_pool.next_connected(); + + if let Err(e) = client + .set::<(), &str, String>(schema_name, serialized, expiration, None, false) + .await + { + log::warn!("Failed to update Redis cache with workspace: {}", e); + } + }; +} + #[authorized] #[get("")] async fn list_handler( @@ -352,6 +393,10 @@ async fn migrate_schema_handler( Ok(()) })?; + // Refetch workspace after transaction to get updated data + let workspace = get_workspace(&schema_name, &mut conn)?; + put_workspace_in_redis(workspace.clone(), &state, &schema_name.0).await; + let response = WorkspaceResponse::from(workspace); Ok(Json(response)) } @@ -377,7 +422,7 @@ pub async fn rotate_encryption_key_handler( let schema_name = SchemaName(format!("{}_{}", *org_id, workspace_name.into_inner())); let workspace = get_workspace(&schema_name, &mut conn)?; let workspace_context = WorkspaceContext { - schema_name, + schema_name: schema_name.clone(), organisation_id: org_id, workspace_id: WorkspaceId(workspace.workspace_name.clone()), settings: workspace, @@ -393,6 +438,10 @@ pub async fn rotate_encryption_key_handler( ) })?; + // Refetch workspace after transaction to get updated data + let workspace = get_workspace(&schema_name, &mut conn)?; + put_workspace_in_redis(workspace, &state, &schema_name.0).await; + Ok(Json(KeyRotationResponse { total_secrets_re_encrypted, })) From ffe9908884e5e3267dc3854fb2fc3902024cd444 Mon Sep 17 00:00:00 2001 From: datron Date: Mon, 23 Feb 2026 18:34:16 +0530 Subject: [PATCH 10/22] fix: address first pass comments --- .env.example | 2 - .../src/api/context/handlers.rs | 32 ++++------- .../src/api/default_config/handlers.rs | 9 ++-- .../src/api/dimension/handlers.rs | 16 ++---- crates/context_aware_config/src/helpers.rs | 24 +++++++-- .../src/api/experiment_groups/handlers.rs | 51 +++++------------- .../src/api/experiment_groups/helpers.rs | 2 +- .../src/api/experiments/handlers.rs | 53 +++++-------------- .../src/api/experiments/helpers.rs | 2 +- crates/service_utils/src/service.rs | 2 +- 10 files changed, 67 insertions(+), 126 deletions(-) diff --git a/.env.example b/.env.example index d1149b324..b27a8bc49 100644 --- a/.env.example +++ b/.env.example @@ -38,8 +38,6 @@ AUTH_PROVIDER=DISABLED AUTH_Z_PROVIDER=DISABLED WORKER_ID=1 # MASTER_ENCRYPTION_KEY - add this for enabling secrets in local -ENCRYPTED_KEYS="" # Used for webhook secrets, API keys, etc. -# ENCRYPTED_KEYS="SERVICE1_API_KEY,SERVICE2_API_KEY" # REDIS_URL="" # REDIS_POOL_SIZE="10" # REDIS_MAX_ATTEMPTS="10" diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index 1f9c89a71..6a0b9e623 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -178,12 +178,9 @@ async fn create_handler( )); let DbConnection(mut conn) = db_conn; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; Ok(http_resp.json(put_response)) } @@ -271,12 +268,9 @@ async fn update_handler( )); let DbConnection(mut conn) = db_conn; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; Ok(http_resp.json(override_resp)) } @@ -387,7 +381,7 @@ async fn move_handler( )); let DbConnection(mut conn) = db_conn; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) .await { @@ -623,7 +617,7 @@ async fn delete_handler( })?; let DbConnection(mut conn) = db_conn; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) .await { @@ -858,12 +852,9 @@ async fn bulk_operations_handler( })?; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; let data = WebhookData { payload: &webhook_contexts, @@ -974,16 +965,13 @@ async fn weight_recompute_handler( let version_id = add_config_version(&state, tags, config_version_desc, transaction_conn, &workspace_context.schema_name)?; Ok(version_id) })?; - if let Err(e) = put_config_in_redis( + let _ = put_config_in_redis( config_version_id, state, &workspace_context.schema_name, &mut conn, ) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; let data = WebhookData { payload: &response, diff --git a/crates/context_aware_config/src/api/default_config/handlers.rs b/crates/context_aware_config/src/api/default_config/handlers.rs index e0fb03095..4b2672033 100644 --- a/crates/context_aware_config/src/api/default_config/handlers.rs +++ b/crates/context_aware_config/src/api/default_config/handlers.rs @@ -164,7 +164,7 @@ async fn create_handler( Ok(version_id) })?; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) .await { @@ -318,12 +318,9 @@ async fn update_handler( Ok((val, version_id)) })?; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; let data = WebhookData { payload: &db_row, diff --git a/crates/context_aware_config/src/api/dimension/handlers.rs b/crates/context_aware_config/src/api/dimension/handlers.rs index 2572cefcc..140753471 100644 --- a/crates/context_aware_config/src/api/dimension/handlers.rs +++ b/crates/context_aware_config/src/api/dimension/handlers.rs @@ -241,12 +241,9 @@ async fn create_handler( } })?; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; let data = WebhookData { payload: &inserted_dimension, @@ -477,12 +474,9 @@ async fn update_handler( Ok((result, is_mandatory, version_id)) })?; - if let Err(e) = + let _ = put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; let data = WebhookData { payload: &result, @@ -663,7 +657,7 @@ async fn delete_handler( } })?; - if let Err(e) = put_config_in_redis( + let _ = put_config_in_redis( version_id, state, &workspace_context.schema_name, diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index 4c7943a06..dd0da676e 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -277,7 +277,11 @@ pub async fn put_config_in_redis( None, false, ) - .await; + .await + .map_err(|e| { + log::warn!("failed to set config in redis: {}", e); + unexpected_error!("failed to set config in redis") + })?; let _ = redis_pool .set::<(), String, String>( last_modified_at_key, @@ -286,7 +290,11 @@ pub async fn put_config_in_redis( None, false, ) - .await; + .await + .map_err(|e| { + log::warn!("failed to set last_modified_key in redis: {}", e); + unexpected_error!("failed to set last_modified_key in redis") + })?; if let Ok(uuid) = event_log::event_log .select(event_log::id) .filter(event_log::table_name.eq("contexts")) @@ -302,7 +310,11 @@ pub async fn put_config_in_redis( None, false, ) - .await; + .await + .map_err(|e| { + log::warn!("failed to set audit_id in redis: {}", e); + unexpected_error!("failed to set audit_id in redis") + })?; } let _ = redis_pool .set::<(), String, i64>( @@ -312,7 +324,11 @@ pub async fn put_config_in_redis( None, false, ) - .await; + .await + .map_err(|e| { + log::warn!("failed to set config_version_key in redis: {}", e); + unexpected_error!("failed to set config_version_keyx in redis") + })?; Ok(()) } diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index 92742d00b..94ef9c253 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -145,18 +145,12 @@ async fn create_handler( .get_result::(transaction_conn)?; Ok(new_experiment_group) })?; - if let Err(err) = put_experiment_groups_in_redis( + let _ = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!( - "Failed to update experiment groups in redis after creation: {}", - err - ); - } + .await; Ok(Json(new_experiment_group)) } @@ -201,15 +195,12 @@ async fn update_handler( .returning(ExperimentGroup::as_returning()) .schema_name(&workspace_context.schema_name) .get_result(&mut conn)?; - if let Err(err) = put_experiment_groups_in_redis( + let _ = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update experiment groups in redis: {}", err); - } + .await; Ok(Json(updated_group)) } @@ -259,15 +250,12 @@ async fn add_members_handler( &user, ) })?; - if let Err(err) = put_experiment_groups_in_redis( + let _ = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update experiment groups in redis: {}", err); - } + .await; Ok(experiment_group) } @@ -310,15 +298,12 @@ async fn remove_members_handler( &user, ) })?; - if let Err(err) = put_experiment_groups_in_redis( + let _ = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update experiment groups in redis: {}", err); - } + .await; Ok(experiment_group) } @@ -470,18 +455,12 @@ async fn delete_handler( .execute(conn)?; Ok(Json(marked_group)) }); - if let Err(err) = put_experiment_groups_in_redis( + let _ = put_experiment_groups_in_redis( state.redis.clone(), &mut db_conn, &workspace_context.schema_name, ) - .await - { - log::error!( - "Failed to update experiment groups in redis after creation: {}", - err - ); - } + .await; result } @@ -542,18 +521,12 @@ async fn backfill_handler( } Ok(results) })?; - if let Err(err) = put_experiment_groups_in_redis( + let _ = put_experiment_groups_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!( - "Failed to update experiment groups in redis after creation: {}", - err - ); - } + .await; Ok(Json(experiment_groups)) } diff --git a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs index 672f99e6a..70cd3ef7a 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs @@ -491,7 +491,7 @@ pub async fn put_experiment_groups_in_redis( .set::<(), String, String>(key, serialized, expiration, None, false) .await .map_err(|e| { - log::error!("Failed to write experiment groups to redis: {}", e); + log::warn!("Failed to write experiment groups to redis: {}", e); unexpected_error!("Failed to write experiment groups to redis: {}", e) })?; diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index 0545293ac..20c26afe2 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -381,19 +381,12 @@ async fn create_handler( })?; // Update Redis cache with active experiments and experiment groups - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!( - "Failed to update redis cache for experiments after creating experiment {}: {}", - inserted_experiment.id, - err - ); - } + .await; let response = ExperimentResponse::from(inserted_experiment); let data = WebhookData { @@ -451,15 +444,12 @@ async fn conclude_handler( .await?; // Update Redis cache with active experiments and experiment groups - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update redis cache for experiments: {}", err); - } + .await; let experiment_response = ExperimentResponse::from(response); @@ -729,15 +719,12 @@ async fn discard_handler( .await?; // Update Redis cache with active experiments and experiment groups - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update redis cache for experiments: {}", err); - } + .await; let experiment_response = ExperimentResponse::from(response); @@ -1398,15 +1385,12 @@ async fn ramp_handler( let (_, config_version_id) = fetch_cac_config(&state, &workspace_context).await?; let experiment_response = ExperimentResponse::from(updated_experiment); - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update redis cache for experiments: {}", err); - } + .await; let webhook_event = if matches!(experiment.status, ExperimentStatusType::CREATED) { WebhookEvent::ExperimentStarted @@ -1750,15 +1734,12 @@ async fn update_handler( })?; // Update Redis cache with active experiments and experiment groups - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update redis cache for experiments: {}", err); - } + .await; let experiment_response = ExperimentResponse::from(updated_experiment); @@ -1813,15 +1794,12 @@ async fn pause_handler( .await?; // Update Redis cache with active experiments and experiment groups - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update redis cache for experiments: {}", err); - } + .await; let experiment_response = ExperimentResponse::from(response); @@ -1912,15 +1890,12 @@ async fn resume_handler( .await?; // Update Redis cache with active experiments and experiment groups - if let Err(err) = put_experiments_in_redis( + let _ = put_experiments_in_redis( state.redis.clone(), &mut conn, &workspace_context.schema_name, ) - .await - { - log::error!("Failed to update redis cache for experiments: {}", err); - } + .await; let experiment_response = ExperimentResponse::from(response); diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index eeb797fab..d316dfd2a 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -895,7 +895,7 @@ pub async fn put_experiments_in_redis( .set::<(), String, String>(key, serialized, expiration, None, false) .await .map_err(|e| { - log::error!("Failed to write experiments to redis: {}", e); + log::warn!("Failed to write experiments to redis: {}", e); unexpected_error!("Failed to write experiments to redis: {}", e) })?; diff --git a/crates/service_utils/src/service.rs b/crates/service_utils/src/service.rs index cd18545c5..6d0406844 100644 --- a/crates/service_utils/src/service.rs +++ b/crates/service_utils/src/service.rs @@ -17,7 +17,7 @@ pub fn get_db_connection( Ok(DbConnection(conn)) } Err(e) => { - log::info!("Unable to get db connection from pool, error: {e}"); + log::error!("Unable to get db connection from pool, error: {e}"); Err(unexpected_error!( "Could not get a DB connection, contact an admin and check logs for further information" )) From 5188e0b2d579802debb722bde117647ac8ed169c Mon Sep 17 00:00:00 2001 From: datron Date: Wed, 25 Feb 2026 15:06:26 +0530 Subject: [PATCH 11/22] fix: rebase with main --- .../src/api/config/handlers.rs | 19 +++++++------------ .../src/api/context/handlers.rs | 6 +----- crates/context_aware_config/src/helpers.rs | 11 ++++------- .../src/api/experiments/helpers.rs | 3 +-- crates/service_utils/src/helpers.rs | 4 ++-- crates/service_utils/src/service/types.rs | 1 - crates/superposition/src/app_state.rs | 2 -- 7 files changed, 15 insertions(+), 31 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 190e5b5d4..9e9489066 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -47,28 +47,23 @@ use superposition_types::{ }; use uuid::Uuid; -use crate::api::context::{self, helpers::query_description}; use crate::{ api::{ + config::helpers::{ + add_audit_id_to_header, add_config_version_to_header, + add_last_modified_to_header, generate_config_from_version, + get_config_version, get_max_created_at, is_not_modified, + }, context::{self, helpers::query_description}, - dimension::fetch_dimensions_info_map, - }, - helpers::{generate_cac, generate_detailed_cac, get_config_from_redis}, -use crate::api::{ - config::helpers::{ - add_config_version_to_header, add_last_modified_to_header, - generate_config_from_version, get_config_version, get_max_created_at, - is_not_modified, }, - context::{self, helpers::query_description}, + helpers::{generate_cac, generate_detailed_cac}, }; -use crate::helpers::{calculate_context_weight, generate_cac}; use super::helpers::{apply_prefix_filter_to_config, resolve, setup_query_data}; #[allow(clippy::let_and_return)] pub fn endpoints() -> Scope { - let scope = Scope::new("") + Scope::new("") .service(get_handler) .service(get_toml_handler) .service(resolve_handler) diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index 6a0b9e623..b90cb47fa 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -50,14 +50,10 @@ use superposition_types::{ use crate::{ api::context::{ - hash, helpers::{query_description, validate_ctx}, operations, }, - helpers::{ - add_config_version, calculate_context_weight, put_config_in_redis, - validate_change_reason, - }, + helpers::{add_config_version, put_config_in_redis, validate_change_reason}, }; pub fn endpoints() -> Scope { diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index dd0da676e..bcffa03ed 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -4,12 +4,9 @@ use actix_web::{ http::header::{HeaderMap, HeaderName, HeaderValue}, web::Data, }; -use bigdecimal::{BigDecimal, Num}; use chrono::{DateTime, Utc}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper}; use fred::{interfaces::KeysInterface, types::Expiration}; -use jsonschema::{Draft, JSONSchema}; -use num_bigint::BigUint; use serde_json::{Map, Value, json}; use service_utils::{ helpers::get_from_env_or_default, @@ -269,7 +266,7 @@ pub async fn put_config_in_redis( let audit_id_key = format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name); let config_version_key = format!("{}{CONFIG_VERSION_KEY_SUFFIX}", **schema_name); let last_modified = DateTime::to_rfc2822(&Utc::now()); - let _ = redis_pool + redis_pool .set::<(), String, String>( config_key, parsed_config, @@ -282,7 +279,7 @@ pub async fn put_config_in_redis( log::warn!("failed to set config in redis: {}", e); unexpected_error!("failed to set config in redis") })?; - let _ = redis_pool + redis_pool .set::<(), String, String>( last_modified_at_key, last_modified, @@ -302,7 +299,7 @@ pub async fn put_config_in_redis( .order_by(event_log::timestamp.desc()) .first::(db_conn) { - let _ = redis_pool + redis_pool .set::<(), String, String>( audit_id_key, uuid.to_string(), @@ -316,7 +313,7 @@ pub async fn put_config_in_redis( unexpected_error!("failed to set audit_id in redis") })?; } - let _ = redis_pool + redis_pool .set::<(), String, i64>( config_version_key, version_id, diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index d316dfd2a..aefe35cc3 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -37,8 +37,7 @@ use superposition_types::{ models::{ ChangeReason, experimentation::{ - Experiment, ExperimentStatusType, GroupType, Variant, - VariantType, + Experiment, ExperimentStatusType, GroupType, Variant, VariantType, }, }, schema::experiments::dsl as experiments, diff --git a/crates/service_utils/src/helpers.rs b/crates/service_utils/src/helpers.rs index cfb967fc5..a0068b397 100644 --- a/crates/service_utils/src/helpers.rs +++ b/crates/service_utils/src/helpers.rs @@ -13,7 +13,7 @@ use diesel::{ RunQueryDsl, SelectableHelper, }; -use log::info; +use log::warn; use once_cell::sync::Lazy; use regex::Regex; use reqwest::{ @@ -80,7 +80,7 @@ where match std::env::var(name) { Ok(env) => env.parse().unwrap(), Err(err) => { - info!( + warn!( "{name} ENV failed to load due to {err}, using default value {default}" ); default diff --git a/crates/service_utils/src/service/types.rs b/crates/service_utils/src/service/types.rs index 6edca986b..bd50a36ec 100644 --- a/crates/service_utils/src/service/types.rs +++ b/crates/service_utils/src/service/types.rs @@ -10,7 +10,6 @@ use actix_web::{Error, FromRequest, HttpMessage, error, web::Data}; use derive_more::{Deref, DerefMut}; use diesel::PgConnection; use diesel::r2d2::{ConnectionManager, PooledConnection}; -use jsonschema::JSONSchema; use secrecy::SecretString; use snowflake::SnowflakeIdGenerator; use superposition_types::database::models::Workspace; diff --git a/crates/superposition/src/app_state.rs b/crates/superposition/src/app_state.rs index 89ad745a6..0083ac256 100644 --- a/crates/superposition/src/app_state.rs +++ b/crates/superposition/src/app_state.rs @@ -4,8 +4,6 @@ use std::{ time::Duration, }; -use context_aware_config::helpers::get_meta_schema; - use fred::{ clients::RedisPool, interfaces::ClientLike, From 4669d8dd98bb8cb1f3fea01f9001af06312272ca Mon Sep 17 00:00:00 2001 From: datron Date: Mon, 2 Mar 2026 12:56:18 +0530 Subject: [PATCH 12/22] chore: rebase with main --- .../src/api/config/handlers.rs | 61 ++++++++--------- .../src/api/config/helpers.rs | 17 +---- .../src/api/context/handlers.rs | 65 +++++++------------ .../src/api/default_config/handlers.rs | 35 ++++++---- .../src/api/dimension/handlers.rs | 29 +++++---- .../src/api/experiment_groups/handlers.rs | 50 +++++++------- .../src/api/experiments/handlers.rs | 51 +++++++-------- .../src/api/experiments/helpers.rs | 8 +-- crates/service_utils/src/helpers.rs | 9 ++- .../src/middlewares/workspace_context.rs | 6 +- crates/service_utils/src/redis.rs | 11 ++-- crates/superposition/src/resolve/handlers.rs | 38 +++++------ crates/superposition_types/src/result.rs | 2 + 13 files changed, 171 insertions(+), 211 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 9e9489066..ea4eec4ae 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -24,7 +24,7 @@ use superposition_core::{ serialize_to_toml, }; use superposition_derives::authorized; -use superposition_macros::{bad_argument, db_error, not_found, unexpected_error}; +use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DimensionInfo, OverrideWithKeys, Overrides, PaginatedResponse, User, @@ -43,7 +43,7 @@ use superposition_types::{ }, schema::{config_versions::dsl as config_versions, event_log::dsl as event_log}, }, - result as superposition, + result::{self as superposition, DieselResult}, }; use uuid::Uuid; @@ -75,7 +75,7 @@ pub fn endpoints() -> Scope { pub fn fetch_audit_id( conn: &mut DBConnection, schema_name: &SchemaName, -) -> Option { +) -> DieselResult { event_log::event_log .select(event_log::id) .filter(event_log::table_name.eq("contexts")) @@ -83,7 +83,6 @@ pub fn fetch_audit_id( .schema_name(schema_name) .first::(conn) .map(|uuid| uuid.to_string()) - .ok() } fn generate_subsets(map: &Map) -> Vec> { @@ -508,13 +507,7 @@ async fn get_handler( schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_max_created_at(&mut conn, schema_name).map_err(|e| { - log::error!("failed to fetch max timestamp from event_log: {e}"); - db_error!(e) - }) - }, + |conn| get_max_created_at(conn, schema_name), ) .await .ok(); @@ -536,13 +529,20 @@ async fn get_handler( schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; + |conn| { generate_config_from_version( &mut Some(version), - &mut conn, + conn, &workspace_context.schema_name, ) + .map_err(|err| { + log::error!("failed to generate config from version with error: {}", err); + // can't throw the AppError from here because fetch_from_redis_else_writeback + // expects a DieselResult error type, so we log the actual error and return NotFound + // which will trigger generate_cac in the fallback and if + // that also fails then it will return the actual error + diesel::result::Error::NotFound + }) }, ) .await @@ -562,11 +562,7 @@ async fn get_handler( schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - fetch_audit_id(&mut conn, &workspace_context.schema_name) - .ok_or(not_found!("Audit ID not found")) - }, + |conn| fetch_audit_id(conn, &workspace_context.schema_name), ) .await { @@ -633,13 +629,7 @@ async fn resolve_handler( schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_max_created_at(&mut conn, schema_name).map_err(|e| { - log::error!("failed to fetch max timestamp from event_log: {e}"); - db_error!(e) - }) - }, + |conn| get_max_created_at(conn, schema_name), ) .await .ok(); @@ -656,13 +646,20 @@ async fn resolve_handler( schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; + |conn| { generate_config_from_version( &mut Some(config_version), - &mut conn, + conn, &workspace_context.schema_name, ) + .map_err(|err| { + log::error!("failed to generate config from version with error: {}", err); + // can't throw the AppError from here because fetch_from_redis_else_writeback + // expects a DieselResult error type, so we log the actual error and return NotFound + // which will trigger generate_cac in the fallback and if + // that also fails then it will return the actual error + diesel::result::Error::NotFound + }) }, ) .await @@ -690,11 +687,7 @@ async fn resolve_handler( schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - fetch_audit_id(&mut conn, &workspace_context.schema_name) - .ok_or(not_found!("Audit ID not found")) - }, + |conn| fetch_audit_id(conn, &workspace_context.schema_name), ) .await { diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index 808c5e4b0..91c2d8f6a 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -9,13 +9,7 @@ use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, dsl::max}; use serde_json::{Map, Value}; use service_utils::{ redis::{CONFIG_VERSION_KEY_SUFFIX, fetch_from_redis_else_writeback}, - service::{ - get_db_connection, - types::{ - AppHeader, AppState, DbConnection, EncryptionKey, SchemaName, - WorkspaceContext, - }, - }, + service::types::{AppHeader, AppState, EncryptionKey, SchemaName, WorkspaceContext}, }; use superposition_macros::{bad_argument, db_error, unexpected_error}; use superposition_types::{ @@ -65,17 +59,12 @@ pub async fn get_config_version( &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; + |conn| { config_versions::config_versions .select(config_versions::id) .order_by(config_versions::created_at.desc()) .schema_name(&workspace_context.schema_name) - .first::(&mut conn) - .map_err(|e| { - log::error!("failed to fetch config version from db: {}", e); - db_error!(e) - }) + .first::(conn) }, ) .await diff --git a/crates/context_aware_config/src/api/context/handlers.rs b/crates/context_aware_config/src/api/context/handlers.rs index b90cb47fa..0f1eabd96 100644 --- a/crates/context_aware_config/src/api/context/handlers.rs +++ b/crates/context_aware_config/src/api/context/handlers.rs @@ -8,7 +8,7 @@ use bigdecimal::BigDecimal; use chrono::Utc; use diesel::{ Connection, ExpressionMethods, OptionalExtension, QueryDsl, RunQueryDsl, - SelectableHelper, delete, + SelectableHelper, dsl::sql, sql_types::{Bool, Text}, }; @@ -138,15 +138,13 @@ async fn create_handler( })?; let DbConnection(mut conn) = db_conn; - - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( + let _ = put_config_in_redis( version_id, &state, &workspace_context.schema_name, &mut conn, ) - .await?; + .await; let data = WebhookData { payload: &put_response, @@ -173,11 +171,6 @@ async fn create_handler( version_id.to_string(), )); - let DbConnection(mut conn) = db_conn; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await; - Ok(http_resp.json(put_response)) } @@ -228,15 +221,13 @@ async fn update_handler( })?; let DbConnection(mut conn) = db_conn; - - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( + let _ = put_config_in_redis( version_id, &state, &workspace_context.schema_name, &mut conn, ) - .await?; + .await; let data = WebhookData { payload: &override_resp, @@ -263,11 +254,6 @@ async fn update_handler( version_id.to_string(), )); - let DbConnection(mut conn) = db_conn; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await; - Ok(http_resp.json(override_resp)) } @@ -342,14 +328,13 @@ async fn move_handler( let DbConnection(mut conn) = db_conn; - #[cfg(feature = "high-performance-mode")] - put_config_in_redis( + let _ = put_config_in_redis( version_id, &state, &workspace_context.schema_name, &mut conn, ) - .await?; + .await; let data = WebhookData { payload: vec![&move_response.deleted_context, &move_response.context], @@ -376,14 +361,6 @@ async fn move_handler( version_id.to_string(), )); - let DbConnection(mut conn) = db_conn; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } - Ok(http_resp.json(move_response.context)) } @@ -613,12 +590,13 @@ async fn delete_handler( })?; let DbConnection(mut conn) = db_conn; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &deleted_ctx, resource: Resource::Context, @@ -750,7 +728,7 @@ async fn bulk_operations_handler( response.push(ContextBulkResponse::Replace(update_resp)); } ContextAction::Delete(ctx_id) => { - let deleted_ctx = delete(contexts) + let deleted_ctx = diesel::delete(contexts) .filter(id.eq(&ctx_id)) .schema_name(&workspace_context.schema_name) .get_result::(transaction_conn) @@ -847,10 +825,13 @@ async fn bulk_operations_handler( Ok((response, version_id)) })?; - - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await; + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &webhook_contexts, @@ -963,7 +944,7 @@ async fn weight_recompute_handler( })?; let _ = put_config_in_redis( config_version_id, - state, + &state, &workspace_context.schema_name, &mut conn, ) diff --git a/crates/context_aware_config/src/api/default_config/handlers.rs b/crates/context_aware_config/src/api/default_config/handlers.rs index 4b2672033..8c7b2ac76 100644 --- a/crates/context_aware_config/src/api/default_config/handlers.rs +++ b/crates/context_aware_config/src/api/default_config/handlers.rs @@ -164,12 +164,13 @@ async fn create_handler( Ok(version_id) })?; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &default_config, @@ -318,9 +319,13 @@ async fn update_handler( Ok((val, version_id)) })?; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await; + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &db_row, @@ -548,11 +553,13 @@ async fn delete_handler( } })?; - if let Err(e) = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn).await - { - log::error!("Failed to update redis cache with new context: {}", e); - } + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &default_config, diff --git a/crates/context_aware_config/src/api/dimension/handlers.rs b/crates/context_aware_config/src/api/dimension/handlers.rs index 140753471..88bfffbea 100644 --- a/crates/context_aware_config/src/api/dimension/handlers.rs +++ b/crates/context_aware_config/src/api/dimension/handlers.rs @@ -241,9 +241,13 @@ async fn create_handler( } })?; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await; + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &inserted_dimension, @@ -474,9 +478,13 @@ async fn update_handler( Ok((result, is_mandatory, version_id)) })?; - let _ = - put_config_in_redis(version_id, state, &workspace_context.schema_name, &mut conn) - .await; + let _ = put_config_in_redis( + version_id, + &state, + &workspace_context.schema_name, + &mut conn, + ) + .await; let data = WebhookData { payload: &result, @@ -588,7 +596,7 @@ async fn delete_handler( )?; if context_ids.is_empty() { - let (resp, version_id) = conn.transaction::<_, superposition::AppError, _>(|transaction_conn| { + let (version_id, dimension_data) = conn.transaction::<_, superposition::AppError, _>(|transaction_conn| { use dimensions::dsl; if !dimension_data.dependency_graph.is_empty() { @@ -659,14 +667,11 @@ async fn delete_handler( let _ = put_config_in_redis( version_id, - state, + &state, &workspace_context.schema_name, &mut conn, ) - .await - { - log::warn!("Failed to update redis cache with new context: {}", e); - } + .await; let data = WebhookData { payload: &dimension_data, resource: Resource::Dimension, diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index 94ef9c253..e4c16c4b5 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -9,17 +9,15 @@ use diesel::{ }; use serde_json::Value; use service_utils::{ + db::run_query, helpers::{generate_snowflake_id, get_from_env_or_default}, redis::{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, fetch_from_redis_else_writeback}, - service::{ - get_db_connection, - types::{AppState, DbConnection, WorkspaceContext}, - }, + service::types::{AppState, DbConnection, WorkspaceContext}, }; use superposition_derives::authorized; use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ - IsEmpty, PaginatedResponse, SortBy, User, + DBConnection, IsEmpty, PaginatedResponse, SortBy, User, api::experiment_groups::{ ExpGroupCreateRequest, ExpGroupFilters, ExpGroupMemberRequest, ExpGroupUpdateRequest, SortOn, @@ -320,38 +318,43 @@ async fn list_handler( *workspace_context.schema_name, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX ); let read_from_redis = pagination_params.all.is_some_and(|e| e) && filters.is_empty(); - let list_experiments_closure = |db_pool| { - let db_conn = get_db_connection(db_pool)?; - list_experiment_groups_db( - &pagination_params, - filters, - db_conn, - &workspace_context, - ) - }; if read_from_redis { fetch_from_redis_else_writeback::>( key, &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), - list_experiments_closure, + |conn| { + list_experiment_groups_db( + &pagination_params, + filters, + conn, + &workspace_context, + ) + }, ) .await .map(Json) .map_err(|e| unexpected_error!(e)) } else { - list_experiments_closure(state.db_pool.clone()).map(Json) + run_query(&state.db_pool, |conn| { + list_experiment_groups_db( + &pagination_params, + filters, + conn, + &workspace_context, + ) + }) + .map(Json) } } fn list_experiment_groups_db( pagination_params: &superposition_query::Query, filters: superposition_query::Query, - db_conn: DbConnection, + conn: &mut DBConnection, workspace_context: &WorkspaceContext, -) -> superposition::Result> { - let DbConnection(mut conn) = db_conn; +) -> superposition::DieselResult> { let query_builder = |filters: &ExpGroupFilters| { let mut builder = experiment_groups::experiment_groups .schema_name(&workspace_context.schema_name) @@ -388,16 +391,15 @@ fn list_experiment_groups_db( (SortOn::Name, SortBy::Asc) => base_query.order(experiment_groups::name.asc()), }; if let Some(true) = pagination_params.all { - let result: ExperimentGroups = - base_query.get_results::(&mut conn)?; + let result: ExperimentGroups = base_query.get_results::(conn)?; return Ok(PaginatedResponse::all(result)); } - let total_items = count_query.count().get_result(&mut conn)?; + let total_items = count_query.count().get_result(conn)?; let limit = pagination_params.count.unwrap_or(10); let offset = (pagination_params.page.unwrap_or(1) - 1) * limit; let query = base_query.limit(limit).offset(offset); - let data = query.load::(&mut conn)?; + let data = query.load::(conn)?; let total_pages = (total_items as f64 / limit as f64).ceil() as i64; Ok(PaginatedResponse { total_pages, @@ -437,7 +439,7 @@ async fn delete_handler( let marked_group = diesel::update(experiment_groups::experiment_groups) .filter(experiment_groups::id.eq(&id)) .set(( - experiment_groups::last_modified_by.eq(user.email), + experiment_groups::last_modified_by.eq(user.get_email()), experiment_groups::last_modified_at.eq(chrono::Utc::now()), )) .returning(ExperimentGroup::as_returning()) diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index 20c26afe2..6a1f5b69a 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -23,6 +23,7 @@ use experimentation_client::{ use reqwest::{Method, StatusCode}; use serde_json::{Map, Value}; use service_utils::{ + db::run_query, helpers::{ WebhookData, construct_request_headers, execute_webhook_call, fetch_dimensions_info_map, generate_snowflake_id, request, @@ -40,7 +41,7 @@ use superposition_derives::authorized; use superposition_macros::{bad_argument, unexpected_error}; use superposition_types::{ Cac, Condition, Contextual, DBConnection, DimensionInfo, Exp, ListResponse, - Overrides, PaginatedResponse, SortBy, User, + Overrides, PaginatedResponse, Resource, SortBy, User, api::{ DimensionMatchStrategy, context::{ @@ -86,9 +87,8 @@ use crate::api::{ }, experiments::{ helpers::{ - fetch_and_validate_change_reason_with_function, fetch_webhook_by_event, - put_experiments_in_redis, validate_control_overrides, - validate_delete_experiment_variants, + fetch_and_validate_change_reason_with_function, put_experiments_in_redis, + validate_control_overrides, validate_delete_experiment_variants, }, types::StartedByChangeSet, }, @@ -983,17 +983,12 @@ async fn list_handler( &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; + |conn| { event_log::event_log .filter(event_log::table_name.eq("experiments")) .select(diesel::dsl::max(event_log::timestamp)) .schema_name(&workspace_context.schema_name) - .first(&mut conn) - .map_err(|e| { - log::error!("failed to fetch max timestamp from event_log: {e}"); - unexpected_error!("failed to fetch max timestamp from event_log: {e}") - }) + .first(conn) }, ) .await?; @@ -1028,8 +1023,7 @@ async fn list_handler( &workspace_context.schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(conn) = get_db_connection(db_pool)?; + |conn| { list_experiments_db( pagination_params.clone(), filters.clone(), @@ -1042,14 +1036,15 @@ async fn list_handler( .await?; Ok(HttpResponse::Ok().json(response)) } else { - let DbConnection(conn) = get_db_connection(state.db_pool.clone())?; - let paginated_response = list_experiments_db( - pagination_params, - filters, - dimension_params, - conn, - &workspace_context, - )?; + let paginated_response = run_query(&state.db_pool, |conn| { + list_experiments_db( + pagination_params, + filters, + dimension_params, + conn, + &workspace_context, + ) + })?; Ok(HttpResponse::Ok().json(paginated_response)) } } @@ -1058,9 +1053,9 @@ fn list_experiments_db( pagination_params: superposition_query::Query, filters: superposition_query::Query, dimension_params: DimensionQuery, - mut conn: DBConnection, + conn: &mut DBConnection, workspace_context: &WorkspaceContext, -) -> superposition::Result> { +) -> superposition::DieselResult> { let dimension_params = dimension_params.into_inner(); let query_builder = |filters: &ExperimentListFilters| { @@ -1128,7 +1123,7 @@ fn list_experiments_db( || filters.global_experiments_only.unwrap_or_default(); let paginated_response = if perform_in_memory_filter { - let all_experiments: Vec = base_query.load(&mut conn)?; + let all_experiments: Vec = base_query.load(conn)?; let filtered_experiments = if filters.global_experiments_only.unwrap_or_default() { all_experiments @@ -1137,7 +1132,7 @@ fn list_experiments_db( .collect() } else { let dimensions_info = - fetch_dimensions_info_map(&mut conn, &workspace_context.schema_name)?; + fetch_dimensions_info_map(conn, &workspace_context.schema_name)?; let dimension_params = evaluate_local_cohorts_skip_unresolved( &dimensions_info, &dimension_params, @@ -1181,13 +1176,13 @@ fn list_experiments_db( } } } else if show_all { - let result = base_query.load::(&mut conn)?; + let result = base_query.load::(conn)?; PaginatedResponse::all(result.into_iter().map(ExperimentResponse::from).collect()) } else { let count_query = query_builder(&filters); - let number_of_experiments = count_query.count().get_result(&mut conn)?; + let number_of_experiments = count_query.count().get_result(conn)?; let query = base_query.limit(limit).offset(offset); - let experiment_list = query.load::(&mut conn)?; + let experiment_list = query.load::(conn)?; PaginatedResponse { total_pages: (number_of_experiments as f64 / limit as f64).ceil() as i64, diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index aefe35cc3..d4abc91ad 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -39,6 +39,7 @@ use superposition_types::{ experimentation::{ Experiment, ExperimentStatusType, GroupType, Variant, VariantType, }, + others::{Webhook, WebhookEvent}, }, schema::experiments::dsl as experiments, }, @@ -458,11 +459,8 @@ pub async fn fetch_webhook_by_event( ) })?; - let headers_map = construct_header_map( - &workspace_context.workspace_id, - &workspace_context.organisation_id, - vec![("x-user", user_str)], - )?; + let headers_map = + construct_header_map(workspace_context, vec![("x-user", user_str)])?; let response = http_client .get(&url) diff --git a/crates/service_utils/src/helpers.rs b/crates/service_utils/src/helpers.rs index a0068b397..a04ca74b4 100644 --- a/crates/service_utils/src/helpers.rs +++ b/crates/service_utils/src/helpers.rs @@ -213,11 +213,10 @@ pub fn parse_config_tags( pub fn get_workspace( workspace_schema_name: &SchemaName, db_conn: &mut DBConnection, -) -> result::Result { - let workspace = workspaces::dsl::workspaces +) -> Result { + workspaces::dsl::workspaces .filter(workspaces::workspace_schema_name.eq(workspace_schema_name.to_string())) - .get_result::(db_conn)?; - Ok(workspace) + .get_result::(db_conn) } fn has_pattern_in_headers(headers: &CustomHeaders) -> (bool, bool) { @@ -465,7 +464,7 @@ where pub fn fetch_dimensions_info_map( conn: &mut DBConnection, schema_name: &SchemaName, -) -> result::Result> { +) -> result::DieselResult> { let dimensions_map = dimensions::table .select((dimension, DimensionInfo::as_select())) .schema_name(schema_name) diff --git a/crates/service_utils/src/middlewares/workspace_context.rs b/crates/service_utils/src/middlewares/workspace_context.rs index c903c2b22..665a44939 100644 --- a/crates/service_utils/src/middlewares/workspace_context.rs +++ b/crates/service_utils/src/middlewares/workspace_context.rs @@ -15,7 +15,6 @@ use superposition_types::database::models::Workspace; use crate::helpers::get_workspace; use crate::redis::fetch_from_redis_else_writeback; -use crate::service::get_db_connection; use crate::{ extensions::HttpRequestExt, service::types::{AppState, OrganisationId, SchemaName, WorkspaceContext}, @@ -146,10 +145,7 @@ where &schema_name, app_state.redis.clone(), app_state.db_pool.clone(), - |db_pool| { - let mut db_conn = get_db_connection(db_pool)?; - get_workspace(&schema_name, &mut db_conn) - }, + |db_conn| get_workspace(&schema_name, db_conn), ) .await?; diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index 406030a87..2d4247dc4 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -3,10 +3,11 @@ use fred::{ types::Expiration, }; use serde::{Serialize, de::DeserializeOwned}; -use superposition_types::result as superposition; +use superposition_types::{DBConnection, result as superposition}; use crate::{ - db::PgSchemaConnectionPool, helpers::get_from_env_or_default, + db::{PgSchemaConnectionPool, run_query}, + helpers::get_from_env_or_default, service::types::SchemaName, }; @@ -27,14 +28,14 @@ pub async fn fetch_from_redis_else_writeback( schema_name: &SchemaName, redis_pool: Option, db_pool: PgSchemaConnectionPool, - database_call: impl FnOnce(PgSchemaConnectionPool) -> superposition::Result, + query_fn: impl FnOnce(&mut DBConnection) -> superposition::DieselResult, ) -> superposition::Result where T: Serialize + DeserializeOwned, { let Some(pool) = redis_pool else { log::trace!("Redis pool not configured, using fallback"); - return database_call(db_pool); + return run_query(&db_pool, query_fn); }; let client = pool.next_connected(); match get_data_from_redis(key.clone(), client).await { @@ -45,7 +46,7 @@ where **schema_name, e ); - let data = database_call(db_pool); + let data = run_query(&db_pool, query_fn); if let Ok(ref value) = data { // If the write to redis fails, do not fail the whole request, just pass the data along if let Ok(serialized) = serde_json::to_string(value).map_err(|e| { diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index 1dc4c4dfa..a4f14cb2a 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -27,7 +27,7 @@ use superposition_core::experiment::{ get_applicable_variants_from_group_response, }; use superposition_derives::authorized; -use superposition_macros::{db_error, not_found, unexpected_error}; +use superposition_macros::{db_error, unexpected_error}; use superposition_types::{ Config, PaginatedResponse, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, @@ -70,13 +70,7 @@ async fn resolve_with_exp_handler( &schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - get_max_created_at(&mut conn, &schema_name).map_err(|e| { - log::error!("failed to fetch max timestamp from event_log: {e}"); - db_error!(e) - }) - }, + |conn| get_max_created_at(conn, &schema_name), ) .await .ok(); @@ -95,13 +89,20 @@ async fn resolve_with_exp_handler( &schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; + |conn| { generate_config_from_version( &mut Some(config_version), - &mut conn, + conn, &workspace_context.schema_name, ) + .map_err(|err| { + log::error!("failed to generate config from version with error: {}", err); + // can't throw the AppError from here because fetch_from_redis_else_writeback + // expects a DieselResult error type, so we log the actual error and return NotFound + // which will trigger generate_cac in the fallback and if + // that also fails then it will return the actual error + diesel::result::Error::NotFound + }) }, ) .await @@ -119,15 +120,10 @@ async fn resolve_with_exp_handler( &schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; + |conn| { let groups = experiment_groups::experiment_groups .schema_name(&workspace_context.schema_name) - .load::(&mut conn) - .map_err(|e| { - log::error!("failed to fetch experiment groups: {e}"); - db_error!(e) - })?; + .load::(conn)?; let total_items = groups.len() as i64; Ok(PaginatedResponse { total_pages: 1, @@ -214,11 +210,7 @@ async fn resolve_with_exp_handler( &schema_name, state.redis.clone(), state.db_pool.clone(), - |db_pool| { - let DbConnection(mut conn) = get_db_connection(db_pool)?; - fetch_audit_id(&mut conn, &workspace_context.schema_name) - .ok_or(not_found!("Audit ID not found")) - }, + |conn| fetch_audit_id(conn, &workspace_context.schema_name), ) .await { diff --git a/crates/superposition_types/src/result.rs b/crates/superposition_types/src/result.rs index 410e558db..fe99ae170 100644 --- a/crates/superposition_types/src/result.rs +++ b/crates/superposition_types/src/result.rs @@ -147,3 +147,5 @@ impl std::fmt::Debug for AppError { error_chain_fmt(self, f) } } + +pub type DieselResult = core::result::Result; From 21efa6d44ccb10cdafe51967ec6047b572c21144 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 11:48:11 +0530 Subject: [PATCH 13/22] fix: address PR comments --- .../client/SuperpositionAsyncClientImpl.java | 2 +- .../superposition/client/SuperpositionClientImpl.java | 2 +- crates/context_aware_config/src/api/config/handlers.rs | 2 +- crates/superposition/src/workspace/handlers.rs | 9 +++------ 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java index 7649261ca..e08d63e6d 100644 --- a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java +++ b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionAsyncClientImpl.java @@ -267,8 +267,8 @@ final class SuperpositionAsyncClientImpl extends Client implements SuperpositionAsyncClient { private static final TypeRegistry TYPE_REGISTRY = TypeRegistry.builder() .putType(NotAuthorizedException.$ID, NotAuthorizedException.class, NotAuthorizedException::builder) - .putType(ValidationException.$ID, ValidationException.class, ValidationException::builder) .putType(AccessDeniedException.$ID, AccessDeniedException.class, AccessDeniedException::builder) + .putType(ValidationException.$ID, ValidationException.class, ValidationException::builder) .putType(InternalFailureException.$ID, InternalFailureException.class, InternalFailureException::builder) .putType(UnknownOperationException.$ID, UnknownOperationException.class, UnknownOperationException::builder) .putType(MalformedRequestException.$ID, MalformedRequestException.class, MalformedRequestException::builder) diff --git a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java index bfc6f4349..904ada31e 100644 --- a/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java +++ b/clients/java/sdk/src/main/java/io/juspay/superposition/client/SuperpositionClientImpl.java @@ -267,8 +267,8 @@ final class SuperpositionClientImpl extends Client implements SuperpositionClient { private static final TypeRegistry TYPE_REGISTRY = TypeRegistry.builder() .putType(NotAuthorizedException.$ID, NotAuthorizedException.class, NotAuthorizedException::builder) - .putType(ValidationException.$ID, ValidationException.class, ValidationException::builder) .putType(AccessDeniedException.$ID, AccessDeniedException.class, AccessDeniedException::builder) + .putType(ValidationException.$ID, ValidationException.class, ValidationException::builder) .putType(InternalFailureException.$ID, InternalFailureException.class, InternalFailureException::builder) .putType(UnknownOperationException.$ID, UnknownOperationException.class, UnknownOperationException::builder) .putType(MalformedRequestException.$ID, MalformedRequestException.class, MalformedRequestException::builder) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index ea4eec4ae..2b6b7a8ed 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -82,7 +82,7 @@ pub fn fetch_audit_id( .order_by(event_log::timestamp.desc()) .schema_name(schema_name) .first::(conn) - .map(|uuid| uuid.to_string()) + .map(String::from) } fn generate_subsets(map: &Map) -> Vec> { diff --git a/crates/superposition/src/workspace/handlers.rs b/crates/superposition/src/workspace/handlers.rs index 623382bcf..4600d398f 100644 --- a/crates/superposition/src/workspace/handlers.rs +++ b/crates/superposition/src/workspace/handlers.rs @@ -13,7 +13,6 @@ use diesel::{ }; use fred::{prelude::KeysInterface, types::Expiration}; use regex::Regex; -use serde::Serialize; use service_utils::{ encryption::{ encrypt_workspace_key, generate_encryption_key, @@ -225,13 +224,11 @@ async fn update_handler( Ok(Json(response)) } -async fn put_workspace_in_redis( - workspace: T, +async fn put_workspace_in_redis( + workspace: Workspace, state: &Data, schema_name: &str, -) where - T: Serialize, -{ +) { let redis_pool = match &state.redis { Some(pool) => pool, None => { From a5f20b10837fc420c91cb46608fe7e89b48b0132 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 12:59:59 +0530 Subject: [PATCH 14/22] fix: remove clones on db_pool and redis_pool --- .../src/api/config/handlers.rs | 24 +++++++++---------- .../src/api/config/helpers.rs | 4 ++-- .../src/api/experiment_groups/handlers.rs | 4 ++-- .../src/api/experiments/handlers.rs | 8 +++---- .../src/middlewares/workspace_context.rs | 4 ++-- crates/service_utils/src/redis.rs | 8 +++---- crates/superposition/src/resolve/handlers.rs | 16 ++++++------- 7 files changed, 34 insertions(+), 34 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 2b6b7a8ed..9c8a805ac 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -505,8 +505,8 @@ async fn get_handler( let max_created_at = fetch_from_redis_else_writeback::>( format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name), schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| get_max_created_at(conn, schema_name), ) .await @@ -527,8 +527,8 @@ async fn get_handler( let mut config = fetch_from_redis_else_writeback::( format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, version), schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { generate_config_from_version( &mut Some(version), @@ -560,8 +560,8 @@ async fn get_handler( if let Ok(audit_id) = fetch_from_redis_else_writeback::( format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| fetch_audit_id(conn, &workspace_context.schema_name), ) .await @@ -627,8 +627,8 @@ async fn resolve_handler( let max_created_at = fetch_from_redis_else_writeback::>( format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name), schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| get_max_created_at(conn, schema_name), ) .await @@ -644,8 +644,8 @@ async fn resolve_handler( let mut config = fetch_from_redis_else_writeback::( format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, config_version,), schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { generate_config_from_version( &mut Some(config_version), @@ -685,8 +685,8 @@ async fn resolve_handler( if let Ok(audit_id) = fetch_from_redis_else_writeback::( format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| fetch_audit_id(conn, &workspace_context.schema_name), ) .await diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index 91c2d8f6a..48a7914ab 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -57,8 +57,8 @@ pub async fn get_config_version( *workspace_context.schema_name ), &workspace_context.schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { config_versions::config_versions .select(config_versions::id) diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index e4c16c4b5..ba4e2e485 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -322,8 +322,8 @@ async fn list_handler( fetch_from_redis_else_writeback::>( key, &workspace_context.schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { list_experiment_groups_db( &pagination_params, diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index 6a1f5b69a..ba0c31082 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -981,8 +981,8 @@ async fn list_handler( *workspace_context.schema_name ), &workspace_context.schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { event_log::event_log .filter(event_log::table_name.eq("experiments")) @@ -1021,8 +1021,8 @@ async fn list_handler( *workspace_context.schema_name ), &workspace_context.schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { list_experiments_db( pagination_params.clone(), diff --git a/crates/service_utils/src/middlewares/workspace_context.rs b/crates/service_utils/src/middlewares/workspace_context.rs index 665a44939..ac7f6d67b 100644 --- a/crates/service_utils/src/middlewares/workspace_context.rs +++ b/crates/service_utils/src/middlewares/workspace_context.rs @@ -143,8 +143,8 @@ where fetch_from_redis_else_writeback::( schema, &schema_name, - app_state.redis.clone(), - app_state.db_pool.clone(), + &app_state.redis, + &app_state.db_pool, |db_conn| get_workspace(&schema_name, db_conn), ) .await?; diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index 2d4247dc4..a2a2b61b5 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -26,8 +26,8 @@ pub const EXPERIMENT_GROUPS_LIST_KEY_SUFFIX: &str = "::experiment_groups_list"; pub async fn fetch_from_redis_else_writeback( key: String, schema_name: &SchemaName, - redis_pool: Option, - db_pool: PgSchemaConnectionPool, + redis_pool: &Option, + db_pool: &PgSchemaConnectionPool, query_fn: impl FnOnce(&mut DBConnection) -> superposition::DieselResult, ) -> superposition::Result where @@ -35,7 +35,7 @@ where { let Some(pool) = redis_pool else { log::trace!("Redis pool not configured, using fallback"); - return run_query(&db_pool, query_fn); + return run_query(db_pool, query_fn); }; let client = pool.next_connected(); match get_data_from_redis(key.clone(), client).await { @@ -46,7 +46,7 @@ where **schema_name, e ); - let data = run_query(&db_pool, query_fn); + let data = run_query(db_pool, query_fn); if let Ok(ref value) = data { // If the write to redis fails, do not fail the whole request, just pass the data along if let Ok(serialized) = serde_json::to_string(value).map_err(|e| { diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index a4f14cb2a..b956aa751 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -68,8 +68,8 @@ async fn resolve_with_exp_handler( let max_created_at = fetch_from_redis_else_writeback::>( format!("{}{LAST_MODIFIED_KEY_SUFFIX}", *schema_name), &schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| get_max_created_at(conn, &schema_name), ) .await @@ -87,8 +87,8 @@ async fn resolve_with_exp_handler( let mut config = fetch_from_redis_else_writeback::( format!("{}::{}{CONFIG_KEY_SUFFIX}", *schema_name, config_version), &schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { generate_config_from_version( &mut Some(config_version), @@ -118,8 +118,8 @@ async fn resolve_with_exp_handler( fetch_from_redis_else_writeback::>( format!("{}{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}", *schema_name), &schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| { let groups = experiment_groups::experiment_groups .schema_name(&workspace_context.schema_name) @@ -208,8 +208,8 @@ async fn resolve_with_exp_handler( if let Ok(audit_id) = fetch_from_redis_else_writeback::( format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), &schema_name, - state.redis.clone(), - state.db_pool.clone(), + &state.redis, + &state.db_pool, |conn| fetch_audit_id(conn, &workspace_context.schema_name), ) .await From 997724f0d351f2ec1f0f993cefea7e60a594a474 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 13:11:45 +0530 Subject: [PATCH 15/22] chore: cleanup files --- .env.example | 10 +-- crates/superposition/Cargo.toml | 3 - .../include/superposition_core.h | 63 ------------------- docker-compose.yaml | 12 ---- 4 files changed, 5 insertions(+), 83 deletions(-) delete mode 100644 crates/superposition_core/include/superposition_core.h diff --git a/.env.example b/.env.example index b27a8bc49..afd0df460 100644 --- a/.env.example +++ b/.env.example @@ -38,12 +38,12 @@ AUTH_PROVIDER=DISABLED AUTH_Z_PROVIDER=DISABLED WORKER_ID=1 # MASTER_ENCRYPTION_KEY - add this for enabling secrets in local -# REDIS_URL="" -# REDIS_POOL_SIZE="10" -# REDIS_MAX_ATTEMPTS="10" -# REDIS_CONN_TIMEOUT="1000" +REDIS_URL="http://localhost:6379" +REDIS_POOL_SIZE="10" +REDIS_MAX_ATTEMPTS="10" +REDIS_CONN_TIMEOUT="1000" # TTL in seconds -# REDIS_KEY_TTL=604800 +REDIS_KEY_TTL=604800 ################################################ ## Following values are to be set in KMS and not directly in ENV diff --git a/crates/superposition/Cargo.toml b/crates/superposition/Cargo.toml index b0e0de521..d0bbd567c 100644 --- a/crates/superposition/Cargo.toml +++ b/crates/superposition/Cargo.toml @@ -43,8 +43,5 @@ tracing-subscriber = { workspace = true } tracing-actix-web = { workspace = true } json-subscriber = { version = "0.2.7", features = ["tracing-log"] } -[features] - - [lints] workspace = true diff --git a/crates/superposition_core/include/superposition_core.h b/crates/superposition_core/include/superposition_core.h deleted file mode 100644 index d3505a995..000000000 --- a/crates/superposition_core/include/superposition_core.h +++ /dev/null @@ -1,63 +0,0 @@ -#include -#include -#include -#include - -/** - * # Safety - * - * Caller ensures that `ebuf` is a sufficiently long buffer to store the - * error message. - */ -char *core_get_resolved_config(const char *default_config_json, - const char *contexts_json, - const char *overrides_json, - const char *dimensions, - const char *query_data_json, - const char *merge_strategy_str, - const char *filter_prefixes_json, - const char *experimentation_json, - char *ebuf); - -/** - * # Safety - * - * Caller ensures that `ebuf` is a sufficiently long buffer to store the - * error message. - */ -char *core_get_resolved_config_with_reasoning(const char *default_config_json, - const char *contexts_json, - const char *overrides_json, - const char *dimensions, - const char *query_data_json, - const char *merge_strategy_str, - const char *filter_prefixes_json, - const char *experimentation_json, - char *ebuf); - -int32_t core_test_connection(void); - -/** - * # Safety - * - * This function is unsafe because: - * - `s` must be a valid pointer to a C string previously allocated by this library - * - `s` must not be null - * - The caller must ensure `s` is not used after this function is called - * - Double-free will cause undefined behavior - */ -void core_free_string(char *s); - -/** - * # Safety - * - * Caller ensures that `ebuf` is a sufficiently long buffer to store the - * error message. - */ -char *core_get_applicable_variants(const char *experiments_json, - const char *experiment_groups_json, - const char *dimensions, - const char *query_data_json, - const char *identifier, - const char *filter_prefixes_json, - char *ebuf); diff --git a/docker-compose.yaml b/docker-compose.yaml index 60cbc3e60..add7e3cbc 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -11,18 +11,6 @@ services: - ./docker-compose/postgres/data:/var/lib/postgresql/data restart: on-failure - # localstack: - # build: ./docker-compose/localstack/ - # container_name: superposition_localstack - # ports: - # - "4510-4559:4510-4559" # external service port range - # - "4566:4566" # LocalStack Edge Proxy - # - "4571:4571" - # environment: - # LOCALSTACK_SERVICES: s3, sns, sqs, logs, cloudwatch, kms - # AWS_DEFAULT_REGION: ap-south-1 - # EDGE_PORT: 4566 - redis: image: redis:7 container_name: superposition_redis From a600d57619bdf9e513d15d0d541e9aec33567456 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 14:36:49 +0530 Subject: [PATCH 16/22] fix: address 2nd pass of comments --- .../src/api/config/handlers.rs | 11 ++-- crates/context_aware_config/src/helpers.rs | 15 +----- .../src/api/experiments/handlers.rs | 26 +++------- .../src/api/experiments/helpers.rs | 48 ------------------ crates/service_utils/Cargo.toml | 4 +- crates/service_utils/src/service.rs | 25 ---------- crates/service_utils/src/service/types.rs | 20 +++++--- crates/superposition/Cargo.toml | 2 +- crates/superposition/src/resolve/handlers.rs | 50 +++++++++---------- .../superposition/src/workspace/handlers.rs | 13 +++-- 10 files changed, 60 insertions(+), 154 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 9c8a805ac..dc16079b8 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -14,10 +14,7 @@ use service_utils::{ AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, fetch_from_redis_else_writeback, }, - service::{ - get_db_connection, - types::{AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext}, - }, + service::types::{AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext}, }; use superposition_core::{ helpers::{calculate_context_weight, hash}, @@ -668,7 +665,11 @@ async fn resolve_handler( let (is_smithy, query_data) = setup_query_data(&req, &body, &dimension_params)?; let resolved_config = { - let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; + let mut conn = state.db_pool.get().map_err(|e| { + log::error!("Unable to get db connection from pool, error: {e}"); + unexpected_error!("Unable to get db connection from pool, error: {}", e) + })?; + // TODO: resolve doesn't return diesel::error, figure that out resolve( &mut config, query_data, diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index bcffa03ed..ba65b841e 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -20,10 +20,6 @@ use service_utils::{ service::types::{AppState, EncryptionKey, SchemaName, WorkspaceContext}, }; use superposition_macros::{db_error, unexpected_error, validation_error}; -use superposition_types::database::{ - models::Workspace, schema::event_log::dsl as event_log, - superposition_schema::superposition::workspaces, -}; use superposition_types::{ Cac, Condition, Config, Context, DBConnection, DefaultConfigInfo, DefaultConfigsWithSchema, DetailedConfig, DimensionInfo, OverrideWithKeys, Overrides, @@ -43,6 +39,7 @@ use superposition_types::{ config_versions, contexts::dsl::{self as ctxt}, default_configs::dsl as def_conf, + event_log::dsl as event_log, }, }, logic::dimensions_to_start_from, @@ -230,16 +227,6 @@ pub fn add_config_version( Ok(version_id) } -pub fn get_workspace( - workspace_schema_name: &String, - db_conn: &mut DBConnection, -) -> superposition::Result { - let workspace = workspaces::dsl::workspaces - .filter(workspaces::workspace_schema_name.eq(workspace_schema_name)) - .get_result::(db_conn)?; - Ok(workspace) -} - pub async fn put_config_in_redis( version_id: i64, state: &Data, diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index ba0c31082..364e5dd97 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -32,9 +32,8 @@ use service_utils::{ EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, EXPERIMENTS_LIST_KEY_SUFFIX, fetch_from_redis_else_writeback, }, - service::{ - get_db_connection, - types::{AppHeader, AppState, CustomHeaders, DbConnection, WorkspaceContext}, + service::types::{ + AppHeader, AppState, CustomHeaders, DbConnection, WorkspaceContext, }, }; use superposition_derives::authorized; @@ -856,13 +855,13 @@ pub async fn discard( Ok((updated_experiment, config_version_id)) } -pub async fn get_applicable_variants_helper( +pub fn get_applicable_variants_helper( db_conn: &mut PooledConnection>, context: Map, dimensions_info: &HashMap, identifier: String, workspace_context: &WorkspaceContext, -) -> superposition::Result<(Vec, HashMap)> { +) -> superposition::DieselResult<(Vec, HashMap)> { use superposition_types::database::schema::experiments::dsl; let experiment_groups = experiment_groups::experiment_groups @@ -932,19 +931,10 @@ async fn get_applicable_variants_handler( return Err(bad_argument!("Invalid input for the method")); } }; - let (applicable_variants, exps) = { - let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; - let di = fetch_dimensions_info_map(&mut conn, &workspace_context.schema_name)?; - let (av, e) = get_applicable_variants_helper( - &mut conn, - context, - &di, - identifier, - &workspace_context, - ) - .await?; - (av, e) - }; + let (applicable_variants, exps) = run_query(&state.db_pool, |conn| { + let di = fetch_dimensions_info_map(conn, &workspace_context.schema_name)?; + get_applicable_variants_helper(conn, context, &di, identifier, &workspace_context) + })?; let variants = exps .into_iter() diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index d4abc91ad..2af9c0b96 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -443,54 +443,6 @@ pub async fn fetch_cac_config( } } -pub async fn fetch_webhook_by_event( - state: &Data, - user: &User, - event: &WebhookEvent, - workspace_context: &WorkspaceContext, -) -> superposition::Result { - let http_client = reqwest::Client::new(); - let url = format!("{}/webhook/event/{event}", state.cac_host); - let user_str = serde_json::to_string(user).map_err(|err| { - log::error!("Something went wrong, failed to stringify user data {err}"); - unexpected_error!( - "Something went wrong, failed to stringify user data {}", - err - ) - })?; - - let headers_map = - construct_header_map(workspace_context, vec![("x-user", user_str)])?; - - let response = http_client - .get(&url) - .headers(headers_map.into()) - .header( - header::AUTHORIZATION, - format!("Internal {}", state.superposition_token), - ) - .send() - .await; - - match response { - Ok(res) => { - if res.status() == 404 { - log::info!("No Webhook found for event: {}", event); - return Ok(Webhook::default()); - } - let webhook = res.json::().await.map_err(|err| { - log::error!("failed to parse Webhook response with error: {}", err); - unexpected_error!("Failed to parse Webhook.") - })?; - Ok(webhook) - } - Err(error) => { - log::error!("Failed to fetch Webhook with error: {:?}", error); - Err(unexpected_error!(error)) - } - } -} - pub fn handle_experiment_group_membership( experiment: &Experiment, new_group_id: &Option, diff --git a/crates/service_utils/Cargo.toml b/crates/service_utils/Cargo.toml index 9b51d1c2a..336fce23c 100644 --- a/crates/service_utils/Cargo.toml +++ b/crates/service_utils/Cargo.toml @@ -31,18 +31,16 @@ secrecy = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } strum_macros = { workspace = true } +superposition_macros = { workspace = true } superposition_types = { workspace = true, features = [ "result", "api", "diesel_derives", ] } -superposition_macros = { workspace = true } url = { workspace = true } urlencoding = "~2.1.2" tracing-actix-web = { workspace = true } -[features] - [lints] workspace = true diff --git a/crates/service_utils/src/service.rs b/crates/service_utils/src/service.rs index 6d0406844..cd408564e 100644 --- a/crates/service_utils/src/service.rs +++ b/crates/service_utils/src/service.rs @@ -1,26 +1 @@ -use crate::db::PgSchemaConnectionPool; -use diesel::Connection; -use superposition_macros::unexpected_error; -use superposition_types::result as superposition; -use types::DbConnection; - pub mod types; - -pub fn get_db_connection( - db_pool: PgSchemaConnectionPool, -) -> superposition::Result { - match db_pool.get() { - Ok(mut conn) => { - conn.set_prepared_statement_cache_size( - diesel::connection::CacheSize::Disabled, - ); - Ok(DbConnection(conn)) - } - Err(e) => { - log::error!("Unable to get db connection from pool, error: {e}"); - Err(unexpected_error!( - "Could not get a DB connection, contact an admin and check logs for further information" - )) - } - } -} diff --git a/crates/service_utils/src/service/types.rs b/crates/service_utils/src/service/types.rs index bd50a36ec..f5ad16a6c 100644 --- a/crates/service_utils/src/service/types.rs +++ b/crates/service_utils/src/service/types.rs @@ -8,8 +8,8 @@ use std::{ use actix_web::{Error, FromRequest, HttpMessage, error, web::Data}; use derive_more::{Deref, DerefMut}; -use diesel::PgConnection; use diesel::r2d2::{ConnectionManager, PooledConnection}; +use diesel::{Connection, PgConnection}; use secrecy::SecretString; use snowflake::SnowflakeIdGenerator; use superposition_types::database::models::Workspace; @@ -188,12 +188,18 @@ impl FromRequest for DbConnection { } }; - let result = super::get_db_connection(app_state.db_pool.clone()).map_err(|e| { - log::error!("Failed to inject DB connection, error: {}", e); - error::ErrorInternalServerError( - "A database error occurred, please contact an admin or check logs", - ) - }); + let result = match app_state.db_pool.get() { + Ok(mut conn) => { + conn.set_prepared_statement_cache_size( + diesel::connection::CacheSize::Disabled, + ); + Ok(DbConnection(conn)) + } + Err(e) => { + log::info!("Unable to get db connection from pool, error: {e}"); + Err(error::ErrorInternalServerError("")) + } + }; ready(result) } diff --git a/crates/superposition/Cargo.toml b/crates/superposition/Cargo.toml index d0bbd567c..585e37fa1 100644 --- a/crates/superposition/Cargo.toml +++ b/crates/superposition/Cargo.toml @@ -17,7 +17,6 @@ context_aware_config = { path = "../context_aware_config" } diesel = { workspace = true } dotenv = "0.15.0" experimentation_platform = { path = "../experimentation_platform" } -superposition_core = { workspace = true } fred = { workspace = true } frontend = { path = "../frontend" } idgenerator = "2.0.0" @@ -30,6 +29,7 @@ rs-snowflake = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } service_utils = { workspace = true } +superposition_core = { workspace = true } superposition_derives = { workspace = true } superposition_macros = { workspace = true } superposition_types = { workspace = true, features = [ diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index b956aa751..cd0fb1e4f 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -12,14 +12,12 @@ use context_aware_config::api::config::helpers::{ use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl}; use serde_json::{Map, Value}; use service_utils::{ + db::run_query, redis::{ AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, fetch_from_redis_else_writeback, }, - service::{ - get_db_connection, - types::{AppHeader, AppState, DbConnection, WorkspaceContext}, - }, + service::types::{AppHeader, AppState, WorkspaceContext}, }; use std::collections::{HashMap, HashSet}; use superposition_core::experiment::{ @@ -27,7 +25,7 @@ use superposition_core::experiment::{ get_applicable_variants_from_group_response, }; use superposition_derives::authorized; -use superposition_macros::{db_error, unexpected_error}; +use superposition_macros::unexpected_error; use superposition_types::{ Config, PaginatedResponse, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, @@ -159,26 +157,23 @@ async fn resolve_with_exp_handler( // Fetch experiments from database (these are filtered by specific IDs, so caching all wouldn't help much) let exps: HashMap = if !exp_ids.is_empty() { - let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; - dsl::experiments - .filter( - dsl::id - .eq_any(exp_ids) - .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), - ) - .schema_name(&workspace_context.schema_name) - .load::(&mut conn) - .map_err(|e| { - log::error!("failed to fetch experiments: {e}"); - db_error!(e) - })? - .into_iter() - .map(|exp| { - let ffi_exp = FfiExperiment::from(exp); - let id = ffi_exp.id.clone(); - (id, ffi_exp) - }) - .collect() + run_query(&state.db_pool, |conn| { + Ok(dsl::experiments + .filter( + dsl::id + .eq_any(exp_ids) + .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), + ) + .schema_name(&workspace_context.schema_name) + .load::(conn)? + .into_iter() + .map(|exp| { + let ffi_exp = FfiExperiment::from(exp); + let id = ffi_exp.id.clone(); + (id, ffi_exp) + }) + .collect()) + })? } else { HashMap::new() }; @@ -189,7 +184,10 @@ async fn resolve_with_exp_handler( } let resolved_config = { - let DbConnection(mut conn) = get_db_connection(state.db_pool.clone())?; + let mut conn = state.db_pool.get().map_err(|e| { + log::error!("Unable to get db connection from pool, error: {e}"); + unexpected_error!("Unable to get db connection from pool: {}", e) + })?; resolve( &mut config, query_data, diff --git a/crates/superposition/src/workspace/handlers.rs b/crates/superposition/src/workspace/handlers.rs index 4600d398f..ed3214127 100644 --- a/crates/superposition/src/workspace/handlers.rs +++ b/crates/superposition/src/workspace/handlers.rs @@ -165,8 +165,7 @@ async fn create_handler( Ok(inserted_workspace.remove(0)) })?; - put_workspace_in_redis(created_workspace.clone(), &state, &workspace_schema_name) - .await; + put_workspace_in_redis(&created_workspace, &state, &workspace_schema_name).await; let response = WorkspaceResponse::from(created_workspace); Ok(Json(response)) @@ -179,7 +178,7 @@ async fn create_handler( async fn update_handler( workspace_name: web::Path, request: Json, - app_state: web::Data, + app_state: Data, db_conn: DbConnection, org_id: OrganisationId, user: User, @@ -218,14 +217,14 @@ async fn update_handler( Ok(updated_workspace) })?; - put_workspace_in_redis(updated_workspace.clone(), &app_state, &schema_name.0).await; + put_workspace_in_redis(&updated_workspace, &app_state, &schema_name.0).await; let response = WorkspaceResponse::from(updated_workspace); Ok(Json(response)) } async fn put_workspace_in_redis( - workspace: Workspace, + workspace: &Workspace, state: &Data, schema_name: &str, ) { @@ -392,7 +391,7 @@ async fn migrate_schema_handler( // Refetch workspace after transaction to get updated data let workspace = get_workspace(&schema_name, &mut conn)?; - put_workspace_in_redis(workspace.clone(), &state, &schema_name.0).await; + put_workspace_in_redis(&workspace, &state, &schema_name.0).await; let response = WorkspaceResponse::from(workspace); Ok(Json(response)) @@ -437,7 +436,7 @@ pub async fn rotate_encryption_key_handler( // Refetch workspace after transaction to get updated data let workspace = get_workspace(&schema_name, &mut conn)?; - put_workspace_in_redis(workspace, &state, &schema_name.0).await; + put_workspace_in_redis(&workspace, &state, &schema_name.0).await; Ok(Json(KeyRotationResponse { total_secrets_re_encrypted, From 3e7e6109dd943771076d887bf81631c185983988 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 15:52:55 +0530 Subject: [PATCH 17/22] fix: 3rd pass of comments --- .../src/api/config/handlers.rs | 1 - crates/context_aware_config/src/helpers.rs | 2 +- .../src/api/experiment_groups/handlers.rs | 12 ++++++------ .../src/api/experiment_groups/helpers.rs | 2 +- .../src/api/experiments/handlers.rs | 14 +++++++------- .../src/api/experiments/helpers.rs | 3 +-- crates/service_utils/src/redis.rs | 10 +++++----- crates/superposition/src/workspace/handlers.rs | 2 +- 8 files changed, 22 insertions(+), 24 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index dc16079b8..21b62f443 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -58,7 +58,6 @@ use crate::{ use super::helpers::{apply_prefix_filter_to_config, resolve, setup_query_data}; -#[allow(clippy::let_and_return)] pub fn endpoints() -> Scope { Scope::new("") .service(get_handler) diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index ba65b841e..9ca50ac56 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -304,7 +304,7 @@ pub async fn put_config_in_redis( .set::<(), String, i64>( config_version_key, version_id, - expiration.clone(), + expiration, None, false, ) diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index ba4e2e485..d773a5e0f 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -144,7 +144,7 @@ async fn create_handler( Ok(new_experiment_group) })?; let _ = put_experiment_groups_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -194,7 +194,7 @@ async fn update_handler( .schema_name(&workspace_context.schema_name) .get_result(&mut conn)?; let _ = put_experiment_groups_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -249,7 +249,7 @@ async fn add_members_handler( ) })?; let _ = put_experiment_groups_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -297,7 +297,7 @@ async fn remove_members_handler( ) })?; let _ = put_experiment_groups_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -458,7 +458,7 @@ async fn delete_handler( Ok(Json(marked_group)) }); let _ = put_experiment_groups_in_redis( - state.redis.clone(), + &state.redis, &mut db_conn, &workspace_context.schema_name, ) @@ -524,7 +524,7 @@ async fn backfill_handler( Ok(results) })?; let _ = put_experiment_groups_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) diff --git a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs index 70cd3ef7a..2d51a59ea 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/helpers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/helpers.rs @@ -459,7 +459,7 @@ pub fn fetch_experiment_group( } pub async fn put_experiment_groups_in_redis( - redis_pool: Option, + redis_pool: &Option, conn: &mut DBConnection, schema_name: &SchemaName, ) -> superposition::Result<()> { diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index 364e5dd97..57e36be58 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -381,7 +381,7 @@ async fn create_handler( // Update Redis cache with active experiments and experiment groups let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -444,7 +444,7 @@ async fn conclude_handler( // Update Redis cache with active experiments and experiment groups let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -719,7 +719,7 @@ async fn discard_handler( // Update Redis cache with active experiments and experiment groups let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -1371,7 +1371,7 @@ async fn ramp_handler( let experiment_response = ExperimentResponse::from(updated_experiment); let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -1720,7 +1720,7 @@ async fn update_handler( // Update Redis cache with active experiments and experiment groups let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -1780,7 +1780,7 @@ async fn pause_handler( // Update Redis cache with active experiments and experiment groups let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) @@ -1876,7 +1876,7 @@ async fn resume_handler( // Update Redis cache with active experiments and experiment groups let _ = put_experiments_in_redis( - state.redis.clone(), + &state.redis, &mut conn, &workspace_context.schema_name, ) diff --git a/crates/experimentation_platform/src/api/experiments/helpers.rs b/crates/experimentation_platform/src/api/experiments/helpers.rs index 2af9c0b96..316be8e0e 100644 --- a/crates/experimentation_platform/src/api/experiments/helpers.rs +++ b/crates/experimentation_platform/src/api/experiments/helpers.rs @@ -39,7 +39,6 @@ use superposition_types::{ experimentation::{ Experiment, ExperimentStatusType, GroupType, Variant, VariantType, }, - others::{Webhook, WebhookEvent}, }, schema::experiments::dsl as experiments, }, @@ -805,7 +804,7 @@ pub async fn fetch_and_validate_change_reason_with_function( } pub async fn put_experiments_in_redis( - redis_pool: Option, + redis_pool: &Option, conn: &mut DBConnection, schema_name: &SchemaName, ) -> superposition::Result<()> { diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index a2a2b61b5..1eadadfd5 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -79,10 +79,10 @@ where use fred::interfaces::MetricsInterface; log::debug!("Started redis fetch for config"); - let config = { + let data = { // this block is so that the client connection is dropped // before we move on to parsing the config - let config = client + let data = client .get::(key_name.clone()) .await .map_err(|e| { @@ -92,7 +92,7 @@ where let metrics = client.take_latency_metrics(); let network_metrics = client.take_network_latency_metrics(); log::trace!( - "Network metrics for config fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", + "Network metrics for data fetch in milliseconds :: max: {}, min: {}, avg: {}; Latency metrics :: max: {}, min: {}, avg: {}", network_metrics.max, network_metrics.min, network_metrics.avg, @@ -100,10 +100,10 @@ where metrics.min, metrics.avg ); - config + data }; - let value = serde_json::from_str::(&config).map_err(|e| { + let value = serde_json::from_str::(&data).map_err(|e| { log::error!("Failed to parse value from redis: {}", e); format!("Failed to parse value from redis due to: {}", e) })?; diff --git a/crates/superposition/src/workspace/handlers.rs b/crates/superposition/src/workspace/handlers.rs index ed3214127..58f716542 100644 --- a/crates/superposition/src/workspace/handlers.rs +++ b/crates/superposition/src/workspace/handlers.rs @@ -240,7 +240,7 @@ async fn put_workspace_in_redis( service_utils::helpers::get_from_env_or_default("REDIS_KEY_TTL", 604800); let expiration = Some(Expiration::EX(key_ttl)); - if let Ok(serialized) = serde_json::to_string(&workspace) { + if let Ok(serialized) = serde_json::to_string(workspace) { let client = redis_pool.next_connected(); if let Err(e) = client From 53ed61fcba2447917a8e81786a90b57c654e73dc Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 16:28:14 +0530 Subject: [PATCH 18/22] fix: update comments on and simplify `fetch_from_redis_else_writeback` --- crates/service_utils/src/redis.rs | 62 ++++++++++++++++--------------- 1 file changed, 32 insertions(+), 30 deletions(-) diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index 1eadadfd5..ff6a4a363 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -21,8 +21,8 @@ pub const EXPERIMENT_GROUPS_LIST_KEY_SUFFIX: &str = "::experiment_groups_list"; /// Fetch data from Redis if available, else fall back to database call and write back to Redis /// if redis is disabled read from the database directly -/// the fallback function is expected to return Result -/// You can use move closures to capture variables in the database_call +/// the fallback function is expected to return Result +/// You can use move closures to capture variables in the run_query pub async fn fetch_from_redis_else_writeback( key: String, schema_name: &SchemaName, @@ -37,36 +37,38 @@ where log::trace!("Redis pool not configured, using fallback"); return run_query(db_pool, query_fn); }; + let client = pool.next_connected(); - match get_data_from_redis(key.clone(), client).await { - Ok(data) => Ok(data), - Err(e) => { - log::info!( - "Falling back to DB for schema {} due to Redis error: {}", - **schema_name, - e - ); - let data = run_query(db_pool, query_fn); - if let Ok(ref value) = data { - // If the write to redis fails, do not fail the whole request, just pass the data along - if let Ok(serialized) = serde_json::to_string(value).map_err(|e| { - log::error!("Failed to serialize data for redis writeback: {}", e); - }) { - let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); - let expiration = Some(Expiration::EX(key_ttl)); - let _ = client - .set::<(), String, String>( - key, serialized, expiration, None, false, - ) - .await - .map_err(|e| { - log::error!("Failed to write back data to redis: {}", e); - }); - } - } - data - } + + if let Ok(data) = get_data_from_redis(key.clone(), client).await { + return Ok(data); + } + + log::info!( + "Cache miss for schema {}, falling back to DB", + **schema_name, + ); + + let data = run_query(db_pool, query_fn)?; + + // Best-effort writeback — don't fail the request if Redis write fails + if let Ok(serialized) = serde_json::to_string(&data) { + let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); + let _ = client + .set::<(), String, String>( + key, + serialized, + Some(Expiration::EX(key_ttl)), + None, + false, + ) + .await + .map_err(|e| log::error!("Failed to write back to Redis: {e}")); + } else { + log::error!("Failed to serialize data for Redis writeback"); } + + Ok(data) } pub async fn get_data_from_redis( From efb11dd94f4015ff00c741f804d54dc786bd4f65 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 16:53:41 +0530 Subject: [PATCH 19/22] fix: change function name --- .../src/api/config/handlers.rs | 14 +++++++------- .../context_aware_config/src/api/config/helpers.rs | 4 ++-- .../src/api/experiment_groups/handlers.rs | 4 ++-- .../src/api/experiments/handlers.rs | 6 +++--- .../src/middlewares/workspace_context.rs | 4 ++-- crates/service_utils/src/redis.rs | 8 ++++---- crates/superposition/src/resolve/handlers.rs | 10 +++++----- 7 files changed, 25 insertions(+), 25 deletions(-) diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index 21b62f443..b017ae59a 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -12,7 +12,7 @@ use service_utils::{ helpers::fetch_dimensions_info_map, redis::{ AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, - fetch_from_redis_else_writeback, + read_through_cache, }, service::types::{AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext}, }; @@ -498,7 +498,7 @@ async fn get_handler( let mut response = HttpResponse::Ok(); let is_smithy = req.method() != actix_web::http::Method::GET; let schema_name = &workspace_context.schema_name; - let max_created_at = fetch_from_redis_else_writeback::>( + let max_created_at = read_through_cache::>( format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name), schema_name, &state.redis, @@ -520,7 +520,7 @@ async fn get_handler( let version = get_config_version(&query_filters.version, &workspace_context, &state).await?; - let mut config = fetch_from_redis_else_writeback::( + let mut config = read_through_cache::( format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, version), schema_name, &state.redis, @@ -553,7 +553,7 @@ async fn get_handler( config = config.filter_by_dimensions(&context); } add_last_modified_to_header(max_created_at, is_smithy, &mut response); - if let Ok(audit_id) = fetch_from_redis_else_writeback::( + if let Ok(audit_id) = read_through_cache::( format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), schema_name, &state.redis, @@ -620,7 +620,7 @@ async fn resolve_handler( let query_filters = query_filters.into_inner(); let schema_name = &workspace_context.schema_name; - let max_created_at = fetch_from_redis_else_writeback::>( + let max_created_at = read_through_cache::>( format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name), schema_name, &state.redis, @@ -637,7 +637,7 @@ async fn resolve_handler( let config_version = get_config_version(&query_filters.version, &workspace_context, &state).await?; - let mut config = fetch_from_redis_else_writeback::( + let mut config = read_through_cache::( format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, config_version,), schema_name, &state.redis, @@ -682,7 +682,7 @@ async fn resolve_handler( let mut resp = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut resp); - if let Ok(audit_id) = fetch_from_redis_else_writeback::( + if let Ok(audit_id) = read_through_cache::( format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), schema_name, &state.redis, diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index 48a7914ab..86341d7bb 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -8,7 +8,7 @@ use chrono::{DateTime, Timelike, Utc}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, dsl::max}; use serde_json::{Map, Value}; use service_utils::{ - redis::{CONFIG_VERSION_KEY_SUFFIX, fetch_from_redis_else_writeback}, + redis::{CONFIG_VERSION_KEY_SUFFIX, read_through_cache}, service::types::{AppHeader, AppState, EncryptionKey, SchemaName, WorkspaceContext}, }; use superposition_macros::{bad_argument, db_error, unexpected_error}; @@ -51,7 +51,7 @@ pub async fn get_config_version( ), _ => match workspace_context.settings.config_version { Some(v) => Ok(v), - None => fetch_from_redis_else_writeback::( + None => read_through_cache::( format!( "{}{CONFIG_VERSION_KEY_SUFFIX}", *workspace_context.schema_name diff --git a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs index d773a5e0f..d181a6f39 100644 --- a/crates/experimentation_platform/src/api/experiment_groups/handlers.rs +++ b/crates/experimentation_platform/src/api/experiment_groups/handlers.rs @@ -11,7 +11,7 @@ use serde_json::Value; use service_utils::{ db::run_query, helpers::{generate_snowflake_id, get_from_env_or_default}, - redis::{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, fetch_from_redis_else_writeback}, + redis::{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, read_through_cache}, service::types::{AppState, DbConnection, WorkspaceContext}, }; use superposition_derives::authorized; @@ -319,7 +319,7 @@ async fn list_handler( ); let read_from_redis = pagination_params.all.is_some_and(|e| e) && filters.is_empty(); if read_from_redis { - fetch_from_redis_else_writeback::>( + read_through_cache::>( key, &workspace_context.schema_name, &state.redis, diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index 57e36be58..ec34dea64 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -30,7 +30,7 @@ use service_utils::{ }, redis::{ EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, EXPERIMENTS_LIST_KEY_SUFFIX, - fetch_from_redis_else_writeback, + read_through_cache, }, service::types::{ AppHeader, AppState, CustomHeaders, DbConnection, WorkspaceContext, @@ -965,7 +965,7 @@ async fn list_handler( dimension_params: DimensionQuery, state: Data, ) -> superposition::Result { - let max_event_timestamp = fetch_from_redis_else_writeback::>>( + let max_event_timestamp = read_through_cache::>>( format!( "{}{EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX}", *workspace_context.schema_name @@ -1005,7 +1005,7 @@ async fn list_handler( && dimension_params.is_empty(); if read_from_redis { let response = - fetch_from_redis_else_writeback::>( + read_through_cache::>( format!( "{}{EXPERIMENTS_LIST_KEY_SUFFIX}", *workspace_context.schema_name diff --git a/crates/service_utils/src/middlewares/workspace_context.rs b/crates/service_utils/src/middlewares/workspace_context.rs index ac7f6d67b..cb2d88714 100644 --- a/crates/service_utils/src/middlewares/workspace_context.rs +++ b/crates/service_utils/src/middlewares/workspace_context.rs @@ -14,7 +14,7 @@ use superposition_macros::bad_argument; use superposition_types::database::models::Workspace; use crate::helpers::get_workspace; -use crate::redis::fetch_from_redis_else_writeback; +use crate::redis::read_through_cache; use crate::{ extensions::HttpRequestExt, service::types::{AppState, OrganisationId, SchemaName, WorkspaceContext}, @@ -140,7 +140,7 @@ where let schema = format!("{}_{}", *organisation, *workspace_id); let schema_name = SchemaName(schema.clone()); let workspace_settings = - fetch_from_redis_else_writeback::( + read_through_cache::( schema, &schema_name, &app_state.redis, diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index ff6a4a363..dc8d3c1d0 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -23,19 +23,19 @@ pub const EXPERIMENT_GROUPS_LIST_KEY_SUFFIX: &str = "::experiment_groups_list"; /// if redis is disabled read from the database directly /// the fallback function is expected to return Result /// You can use move closures to capture variables in the run_query -pub async fn fetch_from_redis_else_writeback( +pub async fn read_through_cache( key: String, schema_name: &SchemaName, redis_pool: &Option, db_pool: &PgSchemaConnectionPool, - query_fn: impl FnOnce(&mut DBConnection) -> superposition::DieselResult, + fallback_fn: impl FnOnce(&mut DBConnection) -> superposition::DieselResult, ) -> superposition::Result where T: Serialize + DeserializeOwned, { let Some(pool) = redis_pool else { log::trace!("Redis pool not configured, using fallback"); - return run_query(db_pool, query_fn); + return run_query(db_pool, fallback_fn); }; let client = pool.next_connected(); @@ -49,7 +49,7 @@ where **schema_name, ); - let data = run_query(db_pool, query_fn)?; + let data = run_query(db_pool, fallback_fn)?; // Best-effort writeback — don't fail the request if Redis write fails if let Ok(serialized) = serde_json::to_string(&data) { diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index cd0fb1e4f..f13eef5b4 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -15,7 +15,7 @@ use service_utils::{ db::run_query, redis::{ AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, - LAST_MODIFIED_KEY_SUFFIX, fetch_from_redis_else_writeback, + LAST_MODIFIED_KEY_SUFFIX, read_through_cache, }, service::types::{AppHeader, AppState, WorkspaceContext}, }; @@ -63,7 +63,7 @@ async fn resolve_with_exp_handler( let identifier_query = identifier_query.into_inner(); let schema_name = workspace_context.schema_name.clone(); - let max_created_at = fetch_from_redis_else_writeback::>( + let max_created_at = read_through_cache::>( format!("{}{LAST_MODIFIED_KEY_SUFFIX}", *schema_name), &schema_name, &state.redis, @@ -82,7 +82,7 @@ async fn resolve_with_exp_handler( let config_version = get_config_version(&query_filters.version, &workspace_context, &state).await?; - let mut config = fetch_from_redis_else_writeback::( + let mut config = read_through_cache::( format!("{}::{}{CONFIG_KEY_SUFFIX}", *schema_name, config_version), &schema_name, &state.redis, @@ -113,7 +113,7 @@ async fn resolve_with_exp_handler( // Fetch experiment groups from redis let experiment_groups = - fetch_from_redis_else_writeback::>( + read_through_cache::>( format!("{}{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}", *schema_name), &schema_name, &state.redis, @@ -203,7 +203,7 @@ async fn resolve_with_exp_handler( add_last_modified_to_header(max_created_at, is_smithy, &mut resp); // Fetch audit_id from redis - if let Ok(audit_id) = fetch_from_redis_else_writeback::( + if let Ok(audit_id) = read_through_cache::( format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), &schema_name, &state.redis, From 35cffca3499936f547b946572ebacb3febf0cb92 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 18:15:51 +0530 Subject: [PATCH 20/22] fix: resolving comments --- Cargo.lock | 2 - crates/context_aware_config/Cargo.toml | 1 - crates/context_aware_config/src/api/config.rs | 1 - .../src/api/config/handlers.rs | 54 +---- .../src/api/config/helpers.rs | 23 +- crates/context_aware_config/src/helpers.rs | 37 +--- .../src/api/experiments/handlers.rs | 196 ++++++++++-------- .../src/middlewares/workspace_context.rs | 17 +- crates/service_utils/src/redis.rs | 1 - crates/service_utils/src/service/types.rs | 1 - crates/superposition/Cargo.toml | 1 - crates/superposition/src/resolve/handlers.rs | 118 ++--------- .../superposition/src/workspace/handlers.rs | 39 ++-- 13 files changed, 157 insertions(+), 334 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c36c9c3b0..01cefb8c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1641,7 +1641,6 @@ dependencies = [ "superposition_derives", "superposition_macros", "superposition_types", - "uuid", ] [[package]] @@ -5577,7 +5576,6 @@ dependencies = [ "serde", "serde_json", "service_utils", - "superposition_core", "superposition_derives", "superposition_macros", "superposition_types", diff --git a/crates/context_aware_config/Cargo.toml b/crates/context_aware_config/Cargo.toml index a790b940f..1f52dd3d4 100644 --- a/crates/context_aware_config/Cargo.toml +++ b/crates/context_aware_config/Cargo.toml @@ -35,7 +35,6 @@ superposition_types = { workspace = true, features = [ "diesel_derives", "server", ] } -uuid = { workspace = true } [features] disable_db_data_validation = ["superposition_types/disable_db_data_validation"] diff --git a/crates/context_aware_config/src/api/config.rs b/crates/context_aware_config/src/api/config.rs index 0f0a44d03..62a998ddb 100644 --- a/crates/context_aware_config/src/api/config.rs +++ b/crates/context_aware_config/src/api/config.rs @@ -1,4 +1,3 @@ mod handlers; pub use handlers::endpoints; -pub use handlers::fetch_audit_id; pub mod helpers; diff --git a/crates/context_aware_config/src/api/config/handlers.rs b/crates/context_aware_config/src/api/config/handlers.rs index b017ae59a..34f1052f3 100644 --- a/crates/context_aware_config/src/api/config/handlers.rs +++ b/crates/context_aware_config/src/api/config/handlers.rs @@ -10,11 +10,8 @@ use itertools::Itertools; use serde_json::{Map, Value, json}; use service_utils::{ helpers::fetch_dimensions_info_map, - redis::{ - AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, - read_through_cache, - }, - service::types::{AppHeader, AppState, DbConnection, SchemaName, WorkspaceContext}, + redis::{CONFIG_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, read_through_cache}, + service::types::{AppState, DbConnection, WorkspaceContext}, }; use superposition_core::{ helpers::{calculate_context_weight, hash}, @@ -38,18 +35,17 @@ use superposition_types::{ ChangeReason, cac::{ConfigVersion, ConfigVersionListItem}, }, - schema::{config_versions::dsl as config_versions, event_log::dsl as event_log}, + schema::config_versions::dsl as config_versions, }, - result::{self as superposition, DieselResult}, + result::{self as superposition}, }; -use uuid::Uuid; use crate::{ api::{ config::helpers::{ - add_audit_id_to_header, add_config_version_to_header, - add_last_modified_to_header, generate_config_from_version, - get_config_version, get_max_created_at, is_not_modified, + add_config_version_to_header, add_last_modified_to_header, + generate_config_from_version, get_config_version, get_max_created_at, + is_not_modified, }, context::{self, helpers::query_description}, }, @@ -68,19 +64,6 @@ pub fn endpoints() -> Scope { .service(get_version_handler) } -pub fn fetch_audit_id( - conn: &mut DBConnection, - schema_name: &SchemaName, -) -> DieselResult { - event_log::event_log - .select(event_log::id) - .filter(event_log::table_name.eq("contexts")) - .order_by(event_log::timestamp.desc()) - .schema_name(schema_name) - .first::(conn) - .map(String::from) -} - fn generate_subsets(map: &Map) -> Vec> { let mut subsets = Vec::new(); let keys: Vec = map.keys().cloned().collect_vec(); @@ -553,17 +536,6 @@ async fn get_handler( config = config.filter_by_dimensions(&context); } add_last_modified_to_header(max_created_at, is_smithy, &mut response); - if let Ok(audit_id) = read_through_cache::( - format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), - schema_name, - &state.redis, - &state.db_pool, - |conn| fetch_audit_id(conn, &workspace_context.schema_name), - ) - .await - { - response.insert_header((AppHeader::XAuditId.to_string(), audit_id)); - } add_config_version_to_header(&Some(version), &mut response); Ok(response.json(config)) } @@ -597,7 +569,6 @@ async fn get_toml_handler( let mut response = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, false, &mut response); - add_audit_id_to_header(&mut conn, &mut response, &workspace_context.schema_name); response.insert_header(("Content-Type", "application/toml")); Ok(response.body(toml_str)) @@ -682,17 +653,6 @@ async fn resolve_handler( let mut resp = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut resp); - if let Ok(audit_id) = read_through_cache::( - format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name), - schema_name, - &state.redis, - &state.db_pool, - |conn| fetch_audit_id(conn, &workspace_context.schema_name), - ) - .await - { - resp.insert_header((AppHeader::XAuditId.to_string(), audit_id)); - } add_config_version_to_header(&Some(config_version), &mut resp); Ok(resp.json(resolved_config)) } diff --git a/crates/context_aware_config/src/api/config/helpers.rs b/crates/context_aware_config/src/api/config/helpers.rs index 86341d7bb..84af4b414 100644 --- a/crates/context_aware_config/src/api/config/helpers.rs +++ b/crates/context_aware_config/src/api/config/helpers.rs @@ -16,12 +16,9 @@ use superposition_types::{ Config, DBConnection, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, custom_query::{CommaSeparatedStringQParams, DimensionQuery, QueryMap}, - database::schema::{ - config_versions::dsl as config_versions, event_log::dsl as event_log, - }, + database::schema::config_versions::dsl as config_versions, result as superposition, }; -use uuid::Uuid; use crate::helpers::{evaluate_remote_cohorts, generate_cac}; @@ -73,24 +70,6 @@ pub async fn get_config_version( } } -pub fn add_audit_id_to_header( - conn: &mut DBConnection, - resp_builder: &mut HttpResponseBuilder, - schema_name: &SchemaName, -) { - if let Ok(uuid) = event_log::event_log - .select(event_log::id) - .filter(event_log::table_name.eq("contexts")) - .order_by(event_log::timestamp.desc()) - .schema_name(schema_name) - .first::(conn) - { - resp_builder.insert_header((AppHeader::XAuditId.to_string(), uuid.to_string())); - } else { - log::error!("Failed to fetch contexts from event_log"); - } -} - pub fn add_last_modified_to_header( max_created_at: Option>, is_smithy: bool, diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index 9ca50ac56..a324f6005 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -10,10 +10,7 @@ use fred::{interfaces::KeysInterface, types::Expiration}; use serde_json::{Map, Value, json}; use service_utils::{ helpers::get_from_env_or_default, - redis::{ - AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, - LAST_MODIFIED_KEY_SUFFIX, - }, + redis::{CONFIG_KEY_SUFFIX, CONFIG_VERSION_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX}, }; use service_utils::{ helpers::{fetch_dimensions_info_map, generate_snowflake_id}, @@ -39,13 +36,11 @@ use superposition_types::{ config_versions, contexts::dsl::{self as ctxt}, default_configs::dsl as def_conf, - event_log::dsl as event_log, }, }, logic::dimensions_to_start_from, result as superposition, }; -use uuid::Uuid; use crate::{ api::{ @@ -250,7 +245,6 @@ pub async fn put_config_in_redis( })?; let config_key = format!("{}::{}{CONFIG_KEY_SUFFIX}", **schema_name, version_id); let last_modified_at_key = format!("{}{LAST_MODIFIED_KEY_SUFFIX}", **schema_name); - let audit_id_key = format!("{}{AUDIT_ID_KEY_SUFFIX}", **schema_name); let config_version_key = format!("{}{CONFIG_VERSION_KEY_SUFFIX}", **schema_name); let last_modified = DateTime::to_rfc2822(&Utc::now()); redis_pool @@ -279,35 +273,8 @@ pub async fn put_config_in_redis( log::warn!("failed to set last_modified_key in redis: {}", e); unexpected_error!("failed to set last_modified_key in redis") })?; - if let Ok(uuid) = event_log::event_log - .select(event_log::id) - .filter(event_log::table_name.eq("contexts")) - .schema_name(schema_name) - .order_by(event_log::timestamp.desc()) - .first::(db_conn) - { - redis_pool - .set::<(), String, String>( - audit_id_key, - uuid.to_string(), - expiration.clone(), - None, - false, - ) - .await - .map_err(|e| { - log::warn!("failed to set audit_id in redis: {}", e); - unexpected_error!("failed to set audit_id in redis") - })?; - } redis_pool - .set::<(), String, i64>( - config_version_key, - version_id, - expiration, - None, - false, - ) + .set::<(), String, i64>(config_version_key, version_id, expiration, None, false) .await .map_err(|e| { log::warn!("failed to set config_version_key in redis: {}", e); diff --git a/crates/experimentation_platform/src/api/experiments/handlers.rs b/crates/experimentation_platform/src/api/experiments/handlers.rs index ec34dea64..031d7c25d 100644 --- a/crates/experimentation_platform/src/api/experiments/handlers.rs +++ b/crates/experimentation_platform/src/api/experiments/handlers.rs @@ -29,8 +29,8 @@ use service_utils::{ fetch_dimensions_info_map, generate_snowflake_id, request, }, redis::{ - EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, EXPERIMENTS_LIST_KEY_SUFFIX, - read_through_cache, + EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, EXPERIMENTS_LAST_MODIFIED_KEY_SUFFIX, + EXPERIMENTS_LIST_KEY_SUFFIX, read_through_cache, }, service::types::{ AppHeader, AppState, CustomHeaders, DbConnection, WorkspaceContext, @@ -380,12 +380,9 @@ async fn create_handler( })?; // Update Redis cache with active experiments and experiment groups - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let response = ExperimentResponse::from(inserted_experiment); let data = WebhookData { @@ -443,12 +440,9 @@ async fn conclude_handler( .await?; // Update Redis cache with active experiments and experiment groups - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let experiment_response = ExperimentResponse::from(response); @@ -718,12 +712,9 @@ async fn discard_handler( .await?; // Update Redis cache with active experiments and experiment groups - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let experiment_response = ExperimentResponse::from(response); @@ -855,19 +846,30 @@ pub async fn discard( Ok((updated_experiment, config_version_id)) } -pub fn get_applicable_variants_helper( - db_conn: &mut PooledConnection>, +pub async fn get_applicable_variants_helper( context: Map, dimensions_info: &HashMap, identifier: String, workspace_context: &WorkspaceContext, -) -> superposition::DieselResult<(Vec, HashMap)> { + app_state: &Data, +) -> superposition::Result<(Vec, HashMap)> { use superposition_types::database::schema::experiments::dsl; - let experiment_groups = experiment_groups::experiment_groups - .schema_name(&workspace_context.schema_name) - .load::(db_conn)?; - + let experiment_groups = read_through_cache::>( + format!( + "{}{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}", + *workspace_context.schema_name + ), + &workspace_context.schema_name, + &app_state.redis, + &app_state.db_pool, + |conn| { + experiment_groups::experiment_groups + .schema_name(&workspace_context.schema_name) + .load::(conn) + }, + ) + .await?; let context = evaluate_local_cohorts(dimensions_info, &context); let buckets = @@ -878,21 +880,41 @@ pub fn get_applicable_variants_helper( .filter_map(|(_, bucket)| bucket.experiment_id.parse::().ok()) .collect::>(); - let exps = dsl::experiments - .filter( - dsl::id - .eq_any(exp_ids) - .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), - ) - .schema_name(&workspace_context.schema_name) - .load::(db_conn)? - .into_iter() - .map(|exp| { - let exp_response = ExperimentResponse::from(exp); - let id = exp_response.id.clone(); - (id, exp_response) - }) - .collect::>(); + let exps = read_through_cache::>( + format!( + "{}{EXPERIMENTS_LIST_KEY_SUFFIX}", + *workspace_context.schema_name + ), + &workspace_context.schema_name, + &app_state.redis, + &app_state.db_pool, + |conn| { + let experiments = dsl::experiments + .filter( + dsl::id + .eq_any(&exp_ids) + .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), + ) + .schema_name(&workspace_context.schema_name) + .load::(conn)?; + Ok(PaginatedResponse { + data: experiments + .into_iter() + .map(ExperimentResponse::from) + .collect(), + total_pages: 1, + total_items: exp_ids.len() as i64, + }) + }, + ) + .await? + .data + .into_iter() + .map(|exp| { + let id = exp.id.clone(); + (id, exp) + }) + .collect::>(); let applicable_variants = get_applicable_variants_from_group_response(&exps, &context, &buckets); @@ -931,11 +953,18 @@ async fn get_applicable_variants_handler( return Err(bad_argument!("Invalid input for the method")); } }; - let (applicable_variants, exps) = run_query(&state.db_pool, |conn| { - let di = fetch_dimensions_info_map(conn, &workspace_context.schema_name)?; - get_applicable_variants_helper(conn, context, &di, identifier, &workspace_context) - })?; + let di = run_query(&state.db_pool, |conn| { + fetch_dimensions_info_map(conn, &workspace_context.schema_name) + })?; + let (applicable_variants, exps) = get_applicable_variants_helper( + context, + &di, + identifier, + &workspace_context, + &state, + ) + .await?; let variants = exps .into_iter() .filter_map(|(_, experiment)| { @@ -1004,26 +1033,25 @@ async fn list_handler( .is_some_and(|v| *v == ExperimentStatusType::active_list()) && dimension_params.is_empty(); if read_from_redis { - let response = - read_through_cache::>( - format!( - "{}{EXPERIMENTS_LIST_KEY_SUFFIX}", - *workspace_context.schema_name - ), - &workspace_context.schema_name, - &state.redis, - &state.db_pool, - |conn| { - list_experiments_db( - pagination_params.clone(), - filters.clone(), - dimension_params.clone(), - conn, - &workspace_context, - ) - }, - ) - .await?; + let response = read_through_cache::>( + format!( + "{}{EXPERIMENTS_LIST_KEY_SUFFIX}", + *workspace_context.schema_name + ), + &workspace_context.schema_name, + &state.redis, + &state.db_pool, + |conn| { + list_experiments_db( + pagination_params.clone(), + filters.clone(), + dimension_params.clone(), + conn, + &workspace_context, + ) + }, + ) + .await?; Ok(HttpResponse::Ok().json(response)) } else { let paginated_response = run_query(&state.db_pool, |conn| { @@ -1370,12 +1398,9 @@ async fn ramp_handler( let (_, config_version_id) = fetch_cac_config(&state, &workspace_context).await?; let experiment_response = ExperimentResponse::from(updated_experiment); - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let webhook_event = if matches!(experiment.status, ExperimentStatusType::CREATED) { WebhookEvent::ExperimentStarted @@ -1719,12 +1744,9 @@ async fn update_handler( })?; // Update Redis cache with active experiments and experiment groups - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let experiment_response = ExperimentResponse::from(updated_experiment); @@ -1779,12 +1801,9 @@ async fn pause_handler( .await?; // Update Redis cache with active experiments and experiment groups - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let experiment_response = ExperimentResponse::from(response); @@ -1875,12 +1894,9 @@ async fn resume_handler( .await?; // Update Redis cache with active experiments and experiment groups - let _ = put_experiments_in_redis( - &state.redis, - &mut conn, - &workspace_context.schema_name, - ) - .await; + let _ = + put_experiments_in_redis(&state.redis, &mut conn, &workspace_context.schema_name) + .await; let experiment_response = ExperimentResponse::from(response); diff --git a/crates/service_utils/src/middlewares/workspace_context.rs b/crates/service_utils/src/middlewares/workspace_context.rs index cb2d88714..cd65d0338 100644 --- a/crates/service_utils/src/middlewares/workspace_context.rs +++ b/crates/service_utils/src/middlewares/workspace_context.rs @@ -139,15 +139,14 @@ where (true, Some(workspace_id)) => { let schema = format!("{}_{}", *organisation, *workspace_id); let schema_name = SchemaName(schema.clone()); - let workspace_settings = - read_through_cache::( - schema, - &schema_name, - &app_state.redis, - &app_state.db_pool, - |db_conn| get_workspace(&schema_name, db_conn), - ) - .await?; + let workspace_settings = read_through_cache::( + schema, + &schema_name, + &app_state.redis, + &app_state.db_pool, + |db_conn| get_workspace(&schema_name, db_conn), + ) + .await?; req.extensions_mut().insert(workspace_id.clone()); req.extensions_mut().insert(WorkspaceContext { diff --git a/crates/service_utils/src/redis.rs b/crates/service_utils/src/redis.rs index dc8d3c1d0..8663a2616 100644 --- a/crates/service_utils/src/redis.rs +++ b/crates/service_utils/src/redis.rs @@ -12,7 +12,6 @@ use crate::{ }; pub const LAST_MODIFIED_KEY_SUFFIX: &str = "::cac_config::last_modified_at"; -pub const AUDIT_ID_KEY_SUFFIX: &str = "::cac_config::audit_id"; pub const CONFIG_VERSION_KEY_SUFFIX: &str = "::cac_config::config_version"; pub const CONFIG_KEY_SUFFIX: &str = "::cac_config"; pub const EXPERIMENTS_LIST_KEY_SUFFIX: &str = "::experiments_list"; diff --git a/crates/service_utils/src/service/types.rs b/crates/service_utils/src/service/types.rs index f5ad16a6c..ea2858f1b 100644 --- a/crates/service_utils/src/service/types.rs +++ b/crates/service_utils/src/service/types.rs @@ -34,7 +34,6 @@ pub enum AppEnv { #[strum(serialize_all = "kebab-case")] pub enum AppHeader { XConfigVersion, - XAuditId, LastModified, } diff --git a/crates/superposition/Cargo.toml b/crates/superposition/Cargo.toml index 585e37fa1..3c7b1a1a6 100644 --- a/crates/superposition/Cargo.toml +++ b/crates/superposition/Cargo.toml @@ -29,7 +29,6 @@ rs-snowflake = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } service_utils = { workspace = true } -superposition_core = { workspace = true } superposition_derives = { workspace = true } superposition_macros = { workspace = true } superposition_types = { workspace = true, features = [ diff --git a/crates/superposition/src/resolve/handlers.rs b/crates/superposition/src/resolve/handlers.rs index f13eef5b4..014eab07c 100644 --- a/crates/superposition/src/resolve/handlers.rs +++ b/crates/superposition/src/resolve/handlers.rs @@ -3,38 +3,23 @@ use actix_web::{ web::{Data, Header, Json}, }; use chrono::{DateTime, Utc}; -use context_aware_config::api::config::fetch_audit_id; use context_aware_config::api::config::helpers::{ add_config_version_to_header, add_last_modified_to_header, generate_config_from_version, get_config_version, get_max_created_at, is_not_modified, resolve, setup_query_data, }; -use diesel::{BoolExpressionMethods, ExpressionMethods, QueryDsl, RunQueryDsl}; +use experimentation_platform::api::experiments::handlers::get_applicable_variants_helper; use serde_json::{Map, Value}; use service_utils::{ - db::run_query, - redis::{ - AUDIT_ID_KEY_SUFFIX, CONFIG_KEY_SUFFIX, EXPERIMENT_GROUPS_LIST_KEY_SUFFIX, - LAST_MODIFIED_KEY_SUFFIX, read_through_cache, - }, - service::types::{AppHeader, AppState, WorkspaceContext}, -}; -use std::collections::{HashMap, HashSet}; -use superposition_core::experiment::{ - FfiExperiment, FfiExperimentGroup, get_applicable_buckets_from_group, - get_applicable_variants_from_group_response, + redis::{CONFIG_KEY_SUFFIX, LAST_MODIFIED_KEY_SUFFIX, read_through_cache}, + service::types::{AppState, WorkspaceContext}, }; use superposition_derives::authorized; use superposition_macros::unexpected_error; use superposition_types::{ - Config, PaginatedResponse, + Config, api::config::{ContextPayload, MergeStrategy, ResolveConfigQuery}, custom_query::{self as superposition_query, CustomQuery, DimensionQuery, QueryMap}, - database::{ - models::experimentation::{Experiment, ExperimentGroup, ExperimentStatusType}, - schema::{experiment_groups::dsl as experiment_groups, experiments::dsl}, - }, - logic::evaluate_local_cohorts, result as superposition, }; @@ -107,81 +92,19 @@ async fn resolve_with_exp_handler( .map_err(|e| unexpected_error!("failed to generate config: {}", e))?; if let (None, Some(identifier)) = - (query_filters.version.clone(), identifier_query.identifier) + (&query_filters.version, identifier_query.identifier) { let context_map: &Map = &query_data; - - // Fetch experiment groups from redis - let experiment_groups = - read_through_cache::>( - format!("{}{EXPERIMENT_GROUPS_LIST_KEY_SUFFIX}", *schema_name), - &schema_name, - &state.redis, - &state.db_pool, - |conn| { - let groups = experiment_groups::experiment_groups - .schema_name(&workspace_context.schema_name) - .load::(conn)?; - let total_items = groups.len() as i64; - Ok(PaginatedResponse { - total_pages: 1, - total_items, - data: groups, - }) - }, - ) - .await; - - let experiment_groups: Vec = experiment_groups - .map(|paginated| paginated.data) - .unwrap_or_default(); - - // Convert to FfiExperimentGroup for superposition_core functions - let ffi_experiment_groups: Vec = experiment_groups - .into_iter() - .map(FfiExperimentGroup::from) - .collect(); - - let context = evaluate_local_cohorts(&config.dimensions, context_map); - - let buckets = get_applicable_buckets_from_group( - &ffi_experiment_groups, - &context, - &identifier, - ); - - let exp_ids = buckets - .iter() - .filter_map(|(_, bucket)| bucket.experiment_id.parse::().ok()) - .collect::>(); - - // Fetch experiments from database (these are filtered by specific IDs, so caching all wouldn't help much) - let exps: HashMap = if !exp_ids.is_empty() { - run_query(&state.db_pool, |conn| { - Ok(dsl::experiments - .filter( - dsl::id - .eq_any(exp_ids) - .and(dsl::status.eq(ExperimentStatusType::INPROGRESS)), - ) - .schema_name(&workspace_context.schema_name) - .load::(conn)? - .into_iter() - .map(|exp| { - let ffi_exp = FfiExperiment::from(exp); - let id = ffi_exp.id.clone(); - (id, ffi_exp) - }) - .collect()) - })? - } else { - HashMap::new() - }; - - let applicable_variants = - get_applicable_variants_from_group_response(&exps, &context, &buckets); + let (applicable_variants, _) = get_applicable_variants_helper( + context_map.clone(), + &config.dimensions, + identifier, + &workspace_context, + &state, + ) + .await?; query_data.insert("variantIds".to_string(), applicable_variants.into()); - } + }; let resolved_config = { let mut conn = state.db_pool.get().map_err(|e| { @@ -202,19 +125,6 @@ async fn resolve_with_exp_handler( let mut resp = HttpResponse::Ok(); add_last_modified_to_header(max_created_at, is_smithy, &mut resp); - // Fetch audit_id from redis - if let Ok(audit_id) = read_through_cache::( - format!("{}{AUDIT_ID_KEY_SUFFIX}", schema_name.0), - &schema_name, - &state.redis, - &state.db_pool, - |conn| fetch_audit_id(conn, &workspace_context.schema_name), - ) - .await - { - resp.insert_header((AppHeader::XAuditId.to_string(), audit_id)); - } - add_config_version_to_header(&Some(config_version), &mut resp); Ok(resp.json(resolved_config)) } diff --git a/crates/superposition/src/workspace/handlers.rs b/crates/superposition/src/workspace/handlers.rs index 58f716542..da1cec714 100644 --- a/crates/superposition/src/workspace/handlers.rs +++ b/crates/superposition/src/workspace/handlers.rs @@ -198,28 +198,27 @@ async fn update_handler( .first::(&mut conn)?; } - let updated_workspace = - conn.transaction::(|transaction_conn| { - let updated_workspace = diesel::update(workspaces::table) - .filter(workspaces::organisation_id.eq(&org_id.0)) - .filter(workspaces::workspace_name.eq(workspace_name)) - .set(( - request, - workspaces::last_modified_by.eq(user.get_email()), - workspaces::last_modified_at.eq(timestamp), - )) - .get_result::(transaction_conn) - .map_err(|err| { - log::error!("failed to update workspace with error: {}", err); - err - })?; - - Ok(updated_workspace) - })?; + conn.transaction::<(), superposition::AppError, _>(|transaction_conn| { + diesel::update(workspaces::table) + .filter(workspaces::organisation_id.eq(&org_id.0)) + .filter(workspaces::workspace_name.eq(workspace_name)) + .set(( + request, + workspaces::last_modified_by.eq(user.get_email()), + workspaces::last_modified_at.eq(timestamp), + )) + .execute(transaction_conn) + .map_err(|err| { + log::error!("failed to update workspace with error: {}", err); + err + })?; + Ok(()) + })?; - put_workspace_in_redis(&updated_workspace, &app_state, &schema_name.0).await; + let workspace = get_workspace(&schema_name, &mut conn)?; + put_workspace_in_redis(&workspace, &app_state, &schema_name.0).await; - let response = WorkspaceResponse::from(updated_workspace); + let response = WorkspaceResponse::from(workspace); Ok(Json(response)) } From 2bc3a06b573ec8433ddac5b9c1ce77575695c8d3 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 18:24:10 +0530 Subject: [PATCH 21/22] fix: remove json! macro --- crates/context_aware_config/src/helpers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/context_aware_config/src/helpers.rs b/crates/context_aware_config/src/helpers.rs index a324f6005..b9495cf56 100644 --- a/crates/context_aware_config/src/helpers.rs +++ b/crates/context_aware_config/src/helpers.rs @@ -239,7 +239,7 @@ pub async fn put_config_in_redis( let key_ttl: i64 = get_from_env_or_default("REDIS_KEY_TTL", 604800); let expiration = Some(Expiration::EX(key_ttl)); let raw_config = generate_cac(db_conn, schema_name)?; - let parsed_config = serde_json::to_string(&json!(raw_config)).map_err(|e| { + let parsed_config = serde_json::to_string(&raw_config).map_err(|e| { log::error!("failed to convert cac config to string: {}", e); unexpected_error!("could not convert cac config to string") })?; From 4527c0e8524dd398256506c8d07b8e7d5d4cc874 Mon Sep 17 00:00:00 2001 From: datron Date: Thu, 5 Mar 2026 19:55:32 +0530 Subject: [PATCH 22/22] fix: builds --- .env.example | 2 +- .github/workflows/ci_check_pr.yaml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.env.example b/.env.example index afd0df460..773c35ac2 100644 --- a/.env.example +++ b/.env.example @@ -38,7 +38,7 @@ AUTH_PROVIDER=DISABLED AUTH_Z_PROVIDER=DISABLED WORKER_ID=1 # MASTER_ENCRYPTION_KEY - add this for enabling secrets in local -REDIS_URL="http://localhost:6379" +# REDIS_URL="http://localhost:6379" REDIS_POOL_SIZE="10" REDIS_MAX_ATTEMPTS="10" REDIS_CONN_TIMEOUT="1000" diff --git a/.github/workflows/ci_check_pr.yaml b/.github/workflows/ci_check_pr.yaml index 897cfca27..3a31525ca 100644 --- a/.github/workflows/ci_check_pr.yaml +++ b/.github/workflows/ci_check_pr.yaml @@ -44,7 +44,7 @@ env: jobs: formatting: name: Check formatting - runs-on: anton1 + runs-on: codebuild-superposition-${{ github.run_id }}-${{ github.run_attempt }} steps: - name: Checkout repository uses: actions/checkout@v4 @@ -101,7 +101,7 @@ jobs: test: name: Testing - runs-on: anton1 + runs-on: codebuild-superposition-${{ github.run_id }}-${{ github.run_attempt }} services: postgres: image: public.ecr.aws/docker/library/postgres:15-alpine3.21 @@ -195,7 +195,7 @@ jobs: java-build: name: Java build - runs-on: anton1 + runs-on: codebuild-superposition-${{ github.run_id }}-${{ github.run_attempt }} defaults: run: working-directory: clients/java @@ -252,7 +252,7 @@ jobs: provider-tests: name: Provider Tests - runs-on: anton1 + runs-on: codebuild-superposition-${{ github.run_id }}-${{ github.run_attempt }} strategy: matrix: provider: