diff --git a/Cargo.lock b/Cargo.lock index 026aeaca5..cd48886b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2009,6 +2009,31 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hive-console-sdk" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "752a852d62a36b0492125778563012cf6f84ca4ac8a8e2566e85f8f0f9a4c345" +dependencies = [ + "anyhow", + "async-trait", + "axum-core", + "graphql-parser", + "graphql-tools", + "md5", + "moka", + "reqwest", + "reqwest-middleware", + "reqwest-retry", + "serde", + "serde_json", + "sha2", + "thiserror 2.0.17", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hive-router" version = "0.0.15" @@ -2019,6 +2044,7 @@ dependencies = [ "futures", "graphql-parser", "graphql-tools", + "hive-console-sdk", "hive-router-config", "hive-router-plan-executor", "hive-router-query-planner", @@ -2059,6 +2085,7 @@ dependencies = [ "http", "humantime-serde", "jsonwebtoken", + "regex-automata", "retry-policies", "schemars 1.0.4", "serde", @@ -2256,6 +2283,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", + "webpki-roots", ] [[package]] @@ -2803,6 +2831,12 @@ dependencies = [ "digest", ] +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + [[package]] name = "memchr" version = "2.7.6" @@ -4290,7 +4324,9 @@ dependencies = [ "base64 0.22.1", "bytes", "encoding_rs", + "futures-channel", "futures-core", + "futures-util", "h2", "http", "http-body", @@ -4323,6 +4359,7 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", + "webpki-roots", ] [[package]] @@ -6181,6 +6218,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b130c0d2d49f8b6889abc456e795e82525204f27c42cf767cf0d7734e089b8" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index 85b88ee75..00857d320 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,3 +59,4 @@ retry-policies = "0.4.0" reqwest-retry = "0.7.0" reqwest-middleware = "0.4.2" vrl = { version = "0.27.0", features = ["compiler", "parser", "value", "diagnostic", "stdlib", "core"] } +regex-automata = "0.4.10" diff --git a/bin/router/Cargo.toml b/bin/router/Cargo.toml index 5eddacd3f..ce5f37392 100644 --- a/bin/router/Cargo.toml +++ b/bin/router/Cargo.toml @@ -19,6 +19,7 @@ path = "src/main.rs" hive-router-query-planner = { path = "../../lib/query-planner", version = "2.0.2" } hive-router-plan-executor = { path = "../../lib/executor", version = "6.0.0" } hive-router-config = { path = "../../lib/router-config", version = "0.0.10" } +hive-console-sdk = "0.1.0" tokio = { workspace = true } futures = { workspace = true } @@ -45,11 +46,11 @@ reqwest-retry = { workspace = true } reqwest-middleware = { workspace = true } vrl = { workspace = true } serde_json = { workspace = true } +regex-automata = { workspace = true } mimalloc = { version = "0.1.48", features = ["v3"] } moka = { version = "0.12.10", features = ["future"] } ulid = "1.2.1" tokio-util = "0.7.16" cookie = "0.18.1" -regex-automata = "0.4.10" arc-swap = "1.7.1" diff --git a/bin/router/src/lib.rs b/bin/router/src/lib.rs index 6a3f7f5c0..2e471d2f5 100644 --- a/bin/router/src/lib.rs +++ b/bin/router/src/lib.rs @@ -19,7 +19,7 @@ use crate::{ }, jwt::JwtAuthRuntime, logger::configure_logging, - pipeline::graphql_request_handler, + pipeline::{graphql_request_handler, usage_reporting::init_hive_user_agent}, }; pub use crate::{schema_state::SchemaState, shared_state::RouterSharedState}; @@ -111,11 +111,23 @@ pub async fn configure_app_from_config( false => None, }; + let hive_usage_agent = match router_config.usage_reporting.enabled { + true => Some(init_hive_user_agent( + bg_tasks_manager, + &router_config.usage_reporting, + )), + false => None, + }; + let router_config_arc = Arc::new(router_config); let schema_state = SchemaState::new_from_config(bg_tasks_manager, router_config_arc.clone()).await?; let schema_state_arc = Arc::new(schema_state); - let shared_state = Arc::new(RouterSharedState::new(router_config_arc, jwt_runtime)?); + let shared_state = Arc::new(RouterSharedState::new( + router_config_arc, + jwt_runtime, + hive_usage_agent, + )?); Ok((shared_state, schema_state_arc)) } diff --git a/bin/router/src/pipeline/mod.rs b/bin/router/src/pipeline/mod.rs index 2b4721972..1f63d9bd4 100644 --- a/bin/router/src/pipeline/mod.rs +++ b/bin/router/src/pipeline/mod.rs @@ -1,4 +1,4 @@ -use std::sync::Arc; +use std::{sync::Arc, time::Instant}; use hive_router_plan_executor::execution::{ client_request_details::{ClientRequestDetails, JwtRequestDetails, OperationDetails}, @@ -46,6 +46,7 @@ pub mod normalize; pub mod parser; pub mod progressive_override; pub mod query_plan; +pub mod usage_reporting; pub mod validation; static GRAPHIQL_HTML: &str = include_str!("../../static/graphiql.html"); @@ -111,6 +112,7 @@ pub async fn execute_pipeline( shared_state: &Arc, schema_state: &Arc, ) -> Result { + let start = Instant::now(); perform_csrf_prevention(req, &shared_state.router_config.csrf)?; let mut execution_request = get_execution_request(req, body_bytes).await?; @@ -190,5 +192,19 @@ pub async fn execute_pipeline( ) .await?; + if shared_state.router_config.usage_reporting.enabled { + if let Some(hive_usage_agent) = &shared_state.hive_usage_agent { + usage_reporting::collect_usage_report( + supergraph.supergraph_schema.clone(), + start.elapsed(), + req, + &client_request_details, + hive_usage_agent, + &shared_state.router_config.usage_reporting, + &execution_result, + ); + } + } + Ok(execution_result) } diff --git a/bin/router/src/pipeline/usage_reporting.rs b/bin/router/src/pipeline/usage_reporting.rs new file mode 100644 index 000000000..1e1b3c457 --- /dev/null +++ b/bin/router/src/pipeline/usage_reporting.rs @@ -0,0 +1,105 @@ +use std::{ + sync::Arc, + time::{Duration, SystemTime, UNIX_EPOCH}, +}; + +use async_trait::async_trait; +use graphql_parser::schema::Document; +use hive_console_sdk::agent::{ExecutionReport, UsageAgent}; +use hive_router_config::usage_reporting::UsageReportingConfig; +use hive_router_plan_executor::execution::{ + client_request_details::ClientRequestDetails, plan::PlanExecutionOutput, +}; +use ntex::web::HttpRequest; +use rand::Rng; +use tokio_util::sync::CancellationToken; + +use crate::{ + background_tasks::{BackgroundTask, BackgroundTasksManager}, + consts::ROUTER_VERSION, +}; + +pub fn init_hive_user_agent( + bg_tasks_manager: &mut BackgroundTasksManager, + usage_config: &UsageReportingConfig, +) -> Arc { + let user_agent = format!("hive-router/{}", ROUTER_VERSION); + let hive_user_agent = hive_console_sdk::agent::UsageAgent::new( + usage_config.access_token.clone(), + usage_config.endpoint.clone(), + usage_config.target_id.clone(), + usage_config.buffer_size, + usage_config.connect_timeout, + usage_config.request_timeout, + usage_config.accept_invalid_certs, + usage_config.flush_interval, + user_agent, + ); + let hive_user_agent_arc = Arc::new(hive_user_agent); + bg_tasks_manager.register_task(hive_user_agent_arc.clone()); + hive_user_agent_arc +} + +#[inline] +pub fn collect_usage_report( + schema: Arc>, + duration: Duration, + req: &HttpRequest, + client_request_details: &ClientRequestDetails, + hive_usage_agent: &UsageAgent, + usage_config: &UsageReportingConfig, + execution_result: &PlanExecutionOutput, +) { + let mut rng = rand::rng(); + let sampled = rng.random::() < usage_config.sample_rate.as_f64(); + if !sampled { + return; + } + if client_request_details + .operation + .name + .is_some_and(|op_name| usage_config.exclude.contains(&op_name.to_string())) + { + return; + } + let client_name = get_header_value(req, &usage_config.client_name_header); + let client_version = get_header_value(req, &usage_config.client_version_header); + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as u64; + let execution_report = ExecutionReport { + schema, + client_name: client_name.map(|s| s.to_owned()), + client_version: client_version.map(|s| s.to_owned()), + timestamp, + duration, + ok: execution_result.error_count == 0, + errors: execution_result.error_count, + operation_body: client_request_details.operation.query.to_owned(), + operation_name: client_request_details + .operation + .name + .map(|op_name| op_name.to_owned()), + persisted_document_hash: None, + }; + + if let Err(err) = hive_usage_agent.add_report(execution_report) { + tracing::error!("Failed to send usage report: {}", err); + } +} + +fn get_header_value<'req>(req: &'req HttpRequest, header_name: &str) -> Option<&'req str> { + req.headers().get(header_name).and_then(|v| v.to_str().ok()) +} + +#[async_trait] +impl BackgroundTask for UsageAgent { + fn id(&self) -> &str { + "hive_console_usage_report_task" + } + + async fn run(&self, token: CancellationToken) { + self.start_flush_interval(Some(token)).await + } +} diff --git a/bin/router/src/schema_state.rs b/bin/router/src/schema_state.rs index f14cc6cf0..6112409d9 100644 --- a/bin/router/src/schema_state.rs +++ b/bin/router/src/schema_state.rs @@ -1,5 +1,6 @@ use arc_swap::{ArcSwap, Guard}; use async_trait::async_trait; +use graphql_parser::schema::Document; use graphql_tools::validation::utils::ValidationError; use hive_router_config::{supergraph::SupergraphSource, HiveRouterConfig}; use hive_router_plan_executor::{ @@ -39,6 +40,7 @@ pub struct SupergraphData { pub metadata: SchemaMetadata, pub planner: Planner, pub subgraph_executor_map: SubgraphExecutorMap, + pub supergraph_schema: Arc>, } #[derive(Debug, thiserror::Error)] @@ -124,6 +126,7 @@ impl SchemaState { )?; Ok(SupergraphData { + supergraph_schema: Arc::new(parsed_supergraph_sdl), metadata, planner, subgraph_executor_map, diff --git a/bin/router/src/shared_state.rs b/bin/router/src/shared_state.rs index f36bda6cd..e182b5a69 100644 --- a/bin/router/src/shared_state.rs +++ b/bin/router/src/shared_state.rs @@ -1,4 +1,5 @@ use graphql_tools::validation::validate::ValidationPlan; +use hive_console_sdk::agent::UsageAgent; use hive_router_config::HiveRouterConfig; use hive_router_plan_executor::headers::{ compile::compile_headers_plan, errors::HeaderRuleCompileError, plan::HeaderRulesPlan, @@ -18,12 +19,14 @@ pub struct RouterSharedState { pub override_labels_evaluator: OverrideLabelsEvaluator, pub cors_runtime: Option, pub jwt_auth_runtime: Option, + pub hive_usage_agent: Option>, } impl RouterSharedState { pub fn new( router_config: Arc, jwt_auth_runtime: Option, + hive_usage_agent: Option>, ) -> Result { Ok(Self { validation_plan: graphql_tools::validation::rules::default_rules_validation_plan(), @@ -36,6 +39,7 @@ impl RouterSharedState { ) .map_err(Box::new)?, jwt_auth_runtime, + hive_usage_agent, }) } } @@ -48,4 +52,6 @@ pub enum SharedStateError { CORSConfig(#[from] Box), #[error("invalid override labels config: {0}")] OverrideLabelsCompile(#[from] Box), + #[error("error creating hive usage agent: {0}")] + UsageAgent(#[from] Box), } diff --git a/docs/README.md b/docs/README.md index fb474b9d2..7bddc0114 100644 --- a/docs/README.md +++ b/docs/README.md @@ -16,6 +16,7 @@ |[**query\_planner**](#query_planner)|`object`|Query planning configuration.
Default: `{"allow_expose":false,"timeout":"10s"}`
|| |[**supergraph**](#supergraph)|`object`|Configuration for the Federation supergraph source. By default, the router will use a local file-based supergraph source (`./supergraph.graphql`).
|| |[**traffic\_shaping**](#traffic_shaping)|`object`|Configuration for the traffic-shaper executor. Use these configurations to control how requests are being executed to subgraphs.
Default: `{"dedupe_enabled":true,"max_connections_per_host":100,"pool_idle_timeout_seconds":50}`
|| +|[**usage\_reporting**](#usage_reporting)|`object`|Configuration for usage reporting to GraphQL Hive.
Default: `{"accept_invalid_certs":false,"access_token":"","buffer_size":1000,"client_name_header":"graphql-client-name","client_version_header":"graphql-client-version","connect_timeout":"5s","enabled":false,"endpoint":"https://app.graphql-hive.com/usage","exclude":[],"flush_interval":"5s","request_timeout":"15s","sample_rate":"100%","target_id":null}`
|yes| **Additional Properties:** not allowed **Example** @@ -110,6 +111,20 @@ traffic_shaping: dedupe_enabled: true max_connections_per_host: 100 pool_idle_timeout_seconds: 50 +usage_reporting: + accept_invalid_certs: false + access_token: '' + buffer_size: 1000 + client_name_header: graphql-client-name + client_version_header: graphql-client-version + connect_timeout: 5s + enabled: false + endpoint: https://app.graphql-hive.com/usage + exclude: [] + flush_interval: 5s + request_timeout: 15s + sample_rate: 100% + target_id: null ``` @@ -1829,4 +1844,58 @@ pool_idle_timeout_seconds: 50 ``` + +## usage\_reporting: object + +Configuration for usage reporting to GraphQL Hive. + + +**Properties** + +|Name|Type|Description|Required| +|----|----|-----------|--------| +|**accept\_invalid\_certs**|`boolean`|Accepts invalid SSL certificates
Default: false
Default: `false`
|no| +|**access\_token**|`string`|Your [Registry Access Token](https://the-guild.dev/graphql/hive/docs/management/targets#registry-access-tokens) with write permission.
|yes| +|**buffer\_size**|`integer`|A maximum number of operations to hold in a buffer before sending to Hive Console
Default: 1000
Default: `1000`
Format: `"uint"`
Minimum: `0`
|no| +|**client\_name\_header**|`string`|Default: `"graphql-client-name"`
|no| +|**client\_version\_header**|`string`|Default: `"graphql-client-version"`
|no| +|**connect\_timeout**|`string`|A timeout for only the connect phase of a request to Hive Console
Default: 5 seconds
Default: `"5s"`
|no| +|**enabled**|`boolean`|Default: `false`
|no| +|**endpoint**|`string`|For self-hosting, you can override `/usage` endpoint (defaults to `https://app.graphql-hive.com/usage`).
Default: `"https://app.graphql-hive.com/usage"`
|no| +|[**exclude**](#usage_reportingexclude)|`string[]`|A list of operations (by name) to be ignored by Hive.
Default:
|no| +|**flush\_interval**|`string`|Frequency of flushing the buffer to the server
Default: 5 seconds
Default: `"5s"`
|no| +|**request\_timeout**|`string`|A timeout for the entire request to Hive Console
Default: 15 seconds
Default: `"15s"`
|no| +|**sample\_rate**|`string`|Sample rate to determine sampling.
0% = never being sent
50% = half of the requests being sent
100% = always being sent
Default: 100%
Default: `"100%"`
|no| +|**target\_id**|`string`, `null`|A target ID, this can either be a slug following the format “$organizationSlug/$projectSlug/$targetSlug” (e.g “the-guild/graphql-hive/staging”) or an UUID (e.g. “a0f4c605-6541-4350-8cfe-b31f21a4bf80”). To be used when the token is configured with an organization access token.
|no| + +**Additional Properties:** not allowed +**Example** + +```yaml +accept_invalid_certs: false +access_token: '' +buffer_size: 1000 +client_name_header: graphql-client-name +client_version_header: graphql-client-version +connect_timeout: 5s +enabled: false +endpoint: https://app.graphql-hive.com/usage +exclude: [] +flush_interval: 5s +request_timeout: 15s +sample_rate: 100% +target_id: null + +``` + + +### usage\_reporting\.exclude\[\]: array + +A list of operations (by name) to be ignored by Hive. +Example: ["IntrospectionQuery", "MeQuery"] + + +**Items** + +**Item Type:** `string` diff --git a/lib/executor/Cargo.toml b/lib/executor/Cargo.toml index c3f6f9117..12c93a5f8 100644 --- a/lib/executor/Cargo.toml +++ b/lib/executor/Cargo.toml @@ -30,9 +30,9 @@ xxhash-rust = { workspace = true } tokio = { workspace = true, features = ["sync"] } dashmap = { workspace = true } vrl = { workspace = true } +regex-automata = { workspace = true } ahash = "0.8.12" -regex-automata = "0.4.10" strum = { version = "0.27.2", features = ["derive"] } ntex-http = "0.1.15" ordered-float = "4.2.0" diff --git a/lib/executor/src/execution/plan.rs b/lib/executor/src/execution/plan.rs index f86356312..ce6c796c0 100644 --- a/lib/executor/src/execution/plan.rs +++ b/lib/executor/src/execution/plan.rs @@ -64,6 +64,7 @@ pub struct QueryPlanExecutionContext<'exec, 'req> { pub struct PlanExecutionOutput { pub body: Vec, pub headers: HeaderMap, + pub error_count: usize, } pub async fn execute_query_plan<'exec, 'req>( @@ -101,6 +102,7 @@ pub async fn execute_query_plan<'exec, 'req>( })?; let final_response = &exec_ctx.final_response; + let error_count = exec_ctx.errors.len(); // Added for usage reporting let body = project_by_operation( final_response, exec_ctx.errors, @@ -118,6 +120,7 @@ pub async fn execute_query_plan<'exec, 'req>( Ok(PlanExecutionOutput { body, headers: response_headers, + error_count, }) } diff --git a/lib/router-config/Cargo.toml b/lib/router-config/Cargo.toml index 93edcc3b5..47aa092a1 100644 --- a/lib/router-config/Cargo.toml +++ b/lib/router-config/Cargo.toml @@ -23,6 +23,7 @@ http = { workspace = true } jsonwebtoken = { workspace = true } retry-policies = { workspace = true} tracing = { workspace = true } +regex-automata = { workspace = true } schemars = "1.0.4" humantime-serde = "1.1.1" diff --git a/lib/router-config/src/env_overrides.rs b/lib/router-config/src/env_overrides.rs index f61012967..778f90a33 100644 --- a/lib/router-config/src/env_overrides.rs +++ b/lib/router-config/src/env_overrides.rs @@ -33,6 +33,10 @@ pub struct EnvVarOverrides { pub hive_console_cdn_key: Option, #[envconfig(from = "HIVE_CDN_POLL_INTERVAL")] pub hive_console_cdn_poll_interval: Option, + #[envconfig(from = "HIVE_ACCESS_TOKEN")] + pub hive_access_token: Option, + #[envconfig(from = "HIVE_TARGET")] + pub hive_target: Option, } #[derive(Debug, thiserror::Error)] @@ -99,6 +103,14 @@ impl EnvVarOverrides { } } + if let Some(hive_access_token) = self.hive_access_token.take() { + config = config.set_override("usage_reporting.access_token", hive_access_token)?; + if let Some(hive_target) = self.hive_target.take() { + config = config.set_override("usage_reporting.target_id", hive_target)?; + } + config = config.set_override("usage_reporting.enabled", true)?; + } + // GraphiQL overrides if let Some(graphiql_enabled) = self.graphiql_enabled.take() { config = config.set_override("graphiql.enabled", graphiql_enabled)?; diff --git a/lib/router-config/src/lib.rs b/lib/router-config/src/lib.rs index 537244c9e..3f23b1df2 100644 --- a/lib/router-config/src/lib.rs +++ b/lib/router-config/src/lib.rs @@ -12,6 +12,7 @@ pub mod primitives; pub mod query_planner; pub mod supergraph; pub mod traffic_shaping; +pub mod usage_reporting; use config::{Config, File, FileFormat, FileSourceFile}; use envconfig::Envconfig; @@ -92,6 +93,10 @@ pub struct HiveRouterConfig { /// Configuration for overriding labels. #[serde(default, skip_serializing_if = "HashMap::is_empty")] pub override_labels: OverrideLabelsConfig, + + /// Configuration for usage reporting to GraphQL Hive. + #[serde(default)] + pub usage_reporting: usage_reporting::UsageReportingConfig, } #[derive(Debug, thiserror::Error)] diff --git a/lib/router-config/src/usage_reporting.rs b/lib/router-config/src/usage_reporting.rs new file mode 100644 index 000000000..7ca6761bd --- /dev/null +++ b/lib/router-config/src/usage_reporting.rs @@ -0,0 +1,255 @@ +use std::{fmt::Display, str::FromStr, time::Duration}; + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Deserialize, Serialize, JsonSchema, Clone)] +#[serde(deny_unknown_fields)] +pub struct UsageReportingConfig { + #[serde(default = "default_enabled")] + pub enabled: bool, + + /// Your [Registry Access Token](https://the-guild.dev/graphql/hive/docs/management/targets#registry-access-tokens) with write permission. + pub access_token: String, + + /// A target ID, this can either be a slug following the format “$organizationSlug/$projectSlug/$targetSlug” (e.g “the-guild/graphql-hive/staging”) or an UUID (e.g. “a0f4c605-6541-4350-8cfe-b31f21a4bf80”). To be used when the token is configured with an organization access token. + #[serde(deserialize_with = "deserialize_target_id")] + pub target_id: Option, + /// For self-hosting, you can override `/usage` endpoint (defaults to `https://app.graphql-hive.com/usage`). + #[serde(default = "default_endpoint")] + pub endpoint: String, + + /// Sample rate to determine sampling. + /// 0% = never being sent + /// 50% = half of the requests being sent + /// 100% = always being sent + /// Default: 100% + #[serde(default = "default_sample_rate")] + #[schemars(with = "String")] + pub sample_rate: Percentage, + + /// A list of operations (by name) to be ignored by Hive. + /// Example: ["IntrospectionQuery", "MeQuery"] + #[serde(default)] + pub exclude: Vec, + + #[serde(default = "default_client_name_header")] + pub client_name_header: String, + #[serde(default = "default_client_version_header")] + pub client_version_header: String, + + /// A maximum number of operations to hold in a buffer before sending to Hive Console + /// Default: 1000 + #[serde(default = "default_buffer_size")] + pub buffer_size: usize, + + /// Accepts invalid SSL certificates + /// Default: false + #[serde(default = "default_accept_invalid_certs")] + pub accept_invalid_certs: bool, + + /// A timeout for only the connect phase of a request to Hive Console + /// Default: 5 seconds + #[serde( + default = "default_connect_timeout", + deserialize_with = "humantime_serde::deserialize", + serialize_with = "humantime_serde::serialize" + )] + #[schemars(with = "String")] + pub connect_timeout: Duration, + + /// A timeout for the entire request to Hive Console + /// Default: 15 seconds + #[serde( + default = "default_request_timeout", + deserialize_with = "humantime_serde::deserialize", + serialize_with = "humantime_serde::serialize" + )] + #[schemars(with = "String")] + pub request_timeout: Duration, + + /// Frequency of flushing the buffer to the server + /// Default: 5 seconds + #[serde( + default = "default_flush_interval", + deserialize_with = "humantime_serde::deserialize", + serialize_with = "humantime_serde::serialize" + )] + #[schemars(with = "String")] + pub flush_interval: Duration, +} + +impl Default for UsageReportingConfig { + fn default() -> Self { + Self { + enabled: default_enabled(), + access_token: String::new(), + target_id: None, + endpoint: default_endpoint(), + sample_rate: default_sample_rate(), + exclude: Vec::new(), + client_name_header: default_client_name_header(), + client_version_header: default_client_version_header(), + buffer_size: default_buffer_size(), + accept_invalid_certs: default_accept_invalid_certs(), + connect_timeout: default_connect_timeout(), + request_timeout: default_request_timeout(), + flush_interval: default_flush_interval(), + } + } +} + +fn default_enabled() -> bool { + false +} + +fn default_endpoint() -> String { + "https://app.graphql-hive.com/usage".to_string() +} + +fn default_sample_rate() -> Percentage { + Percentage::from_f64(1.0).unwrap() +} + +fn default_client_name_header() -> String { + "graphql-client-name".to_string() +} + +fn default_client_version_header() -> String { + "graphql-client-version".to_string() +} + +fn default_buffer_size() -> usize { + 1000 +} + +fn default_accept_invalid_certs() -> bool { + false +} + +fn default_request_timeout() -> Duration { + Duration::from_secs(15) +} + +fn default_connect_timeout() -> Duration { + Duration::from_secs(5) +} + +fn default_flush_interval() -> Duration { + Duration::from_secs(5) +} + +// Target ID regexp for validation: slug format +const TARGET_ID_SLUG_REGEX: &str = r"^[a-zA-Z0-9-_]+\/[a-zA-Z0-9-_]+\/[a-zA-Z0-9-_]+$"; +// Target ID regexp for validation: UUID format +const TARGET_ID_UUID_REGEX: &str = + r"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$"; + +fn deserialize_target_id<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + let opt = Option::::deserialize(deserializer)?; + if let Some(ref s) = opt { + let trimmed_s = s.trim(); + if trimmed_s.is_empty() { + Ok(None) + } else { + let slug_regex = + regex_automata::meta::Regex::new(TARGET_ID_SLUG_REGEX).map_err(|err| { + serde::de::Error::custom(format!( + "Failed to compile target_id slug regex: {}", + err + )) + })?; + if slug_regex.is_match(trimmed_s) { + return Ok(Some(trimmed_s.to_string())); + } + let uuid_regex = + regex_automata::meta::Regex::new(TARGET_ID_UUID_REGEX).map_err(|err| { + serde::de::Error::custom(format!( + "Failed to compile target_id UUID regex: {}", + err + )) + })?; + if uuid_regex.is_match(trimmed_s) { + return Ok(Some(trimmed_s.to_string())); + } + Err(serde::de::Error::custom(format!( + "Invalid target_id format: '{}'. It must be either in slug format '$organizationSlug/$projectSlug/$targetSlug' or UUID format 'a0f4c605-6541-4350-8cfe-b31f21a4bf80'", + trimmed_s + ))) + } + } else { + Ok(None) + } +} + +#[derive(Debug, Clone, Copy)] +pub struct Percentage { + value: f64, +} + +impl Percentage { + pub fn from_f64(value: f64) -> Result { + if !(0.0..=1.0).contains(&value) { + return Err(format!( + "Percentage value must be between 0 and 1, got: {}", + value + )); + } + Ok(Percentage { value }) + } + pub fn as_f64(&self) -> f64 { + self.value + } +} + +impl FromStr for Percentage { + type Err = String; + + fn from_str(s: &str) -> Result { + let s_trimmed = s.trim(); + if let Some(number_part) = s_trimmed.strip_suffix('%') { + let value: f64 = number_part.parse().map_err(|err| { + format!( + "Failed to parse percentage value '{}': {}", + number_part, err + ) + })?; + Ok(Percentage::from_f64(value / 100.0)?) + } else { + Err(format!( + "Percentage value must end with '%', got: '{}'", + s_trimmed + )) + } + } +} + +impl Display for Percentage { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}%", self.value * 100.0) + } +} + +// Deserializer from `n%` string to `Percentage` struct +impl<'de> Deserialize<'de> for Percentage { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Percentage::from_str(&s).map_err(serde::de::Error::custom) + } +} + +// Serializer from `Percentage` struct to `n%` string +impl Serialize for Percentage { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +}