├── dakia ├── src │ ├── qe │ │ ├── mod.rs │ │ └── query.rs │ ├── proxy │ │ ├── mod.rs │ │ └── http │ │ │ ├── mod.rs │ │ │ ├── ctx.rs │ │ │ ├── helpers.rs │ │ │ ├── proxy.rs │ │ │ └── session.rs │ ├── gateway │ │ ├── interceptors │ │ │ ├── request_id │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ └── interceptor.rs │ │ │ ├── rate_limiter │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ └── interceptor.rs │ │ │ ├── short_circuit │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ ├── interceptor.rs │ │ │ │ └── response_parts.rs │ │ │ ├── request_rewrite │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ ├── interceptor.rs │ │ │ │ └── rewrite_parts.rs │ │ │ ├── response_rewrite │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ ├── interceptor.rs │ │ │ │ └── rewrite_parts.rs │ │ │ ├── use_file │ │ │ │ ├── mod.rs │ │ │ │ ├── interceptor.rs │ │ │ │ └── builder.rs │ │ │ ├── basic_auth │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ └── interceptor.rs │ │ │ ├── controller │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ └── interceptor.rs │ │ │ ├── server_version │ │ │ │ ├── mod.rs │ │ │ │ ├── builder.rs │ │ │ │ └── interceptor.rs │ │ │ └── mod.rs │ │ ├── filter │ │ │ ├── mod.rs │ │ │ ├── builder.rs │ │ │ ├── operator.rs │ │ │ ├── executor.rs │ │ │ └── query2filter.rs │ │ ├── interceptor │ │ │ ├── mod.rs │ │ │ ├── hook.rs │ │ │ ├── insertceptor_name.rs │ │ │ ├── interceptor.rs │ │ │ ├── phase.rs │ │ │ └── executor.rs │ │ ├── mod.rs │ │ ├── registry_builder.rs │ │ ├── lb │ │ │ └── mod.rs │ │ ├── interceptor_builder │ │ │ ├── mod.rs │ │ │ └── utils.rs │ │ └── state.rs │ ├── shared │ │ ├── into.rs │ │ ├── mod.rs │ │ ├── registry.rs │ │ ├── mutable_registry.rs │ │ ├── common.rs │ │ ├── pattern_matcher.rs │ │ ├── pattern_registry.rs │ │ └── dakia_state.rs │ ├── config │ │ ├── mod.rs │ │ ├── source_config │ │ │ ├── router_config.rs │ │ │ ├── inet_address.rs │ │ │ ├── interceptor_config.rs │ │ │ ├── downstream_config.rs │ │ │ ├── mod.rs │ │ │ ├── upstream_config.rs │ │ │ ├── gateway_config.rs │ │ │ └── source_dakia_config.rs │ │ ├── upstream.rs │ │ ├── args.rs │ │ └── dakia_config.rs │ ├── error │ │ ├── immut_str.rs │ │ ├── result.rs │ │ └── mod.rs │ └── main.rs ├── Cargo.toml └── build.rs ├── docs ├── README.md ├── quick_start.md ├── cli.md └── config.sample.yaml ├── .gitignore ├── LICENSE └── README.md /dakia/src/qe/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod query; 2 | -------------------------------------------------------------------------------- /dakia/src/proxy/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod http; 2 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Docs 2 | 3 | - [Quick Start](./quick_start.md) 4 | - [CLI](./cli.md) 5 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_id/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | pub use builder::RequestIdInterceptorBuilder; 4 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/rate_limiter/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | 4 | pub use builder::RateLimiterInterceptorBuilder; 5 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/short_circuit/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | mod response_parts; 4 | 5 | pub use builder::ShortCircuitInterceptorBuilder; 6 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_rewrite/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | mod rewrite_parts; 4 | 5 | pub use builder::RequestRewriteInterceptorBuilder; 6 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/response_rewrite/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | mod rewrite_parts; 4 | 5 | pub use builder::ResponseRewriteInterceptorBuilder; 6 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/use_file/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | pub use builder::UseFileInterceptorBuilder; 4 | pub use interceptor::UseFileInterceptor; 5 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/basic_auth/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | pub use builder::BasicAuthInterceptorBuilder; 4 | pub use interceptor::BasicAuthInterceptor; 5 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/controller/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | pub use builder::ControllerInterceptorBuilder; 4 | pub use interceptor::ControllerInterceptor; 5 | -------------------------------------------------------------------------------- /dakia/src/shared/into.rs: -------------------------------------------------------------------------------- 1 | pub trait IntoRef: Sized { 2 | /// Converts this type into the (usually inferred) input type. 3 | #[must_use] 4 | fn into_ref(&self) -> T; 5 | } 6 | -------------------------------------------------------------------------------- /dakia/src/shared/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod common; 2 | pub mod dakia_state; 3 | pub mod into; 4 | pub mod mutable_registry; 5 | pub mod pattern_matcher; 6 | pub mod pattern_registry; 7 | pub mod registry; 8 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/server_version/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod interceptor; 3 | 4 | pub use builder::ServerVersionInterceptorBuilder; 5 | pub use interceptor::ServerVersionInterceptor; 6 | -------------------------------------------------------------------------------- /dakia/src/config/mod.rs: -------------------------------------------------------------------------------- 1 | mod args; 2 | mod dakia_config; 3 | mod upstream; 4 | 5 | pub mod source_config; 6 | pub use args::DakiaArgs; 7 | pub use dakia_config::*; 8 | pub use source_config::InetAddress; 9 | -------------------------------------------------------------------------------- /dakia/src/proxy/http/mod.rs: -------------------------------------------------------------------------------- 1 | mod ctx; 2 | mod helpers; 3 | mod proxy; 4 | mod session; 5 | 6 | pub use ctx::DakiaHttpGatewayCtx; 7 | pub use proxy::Proxy; 8 | pub use session::{HeaderBuffer, Session}; 9 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/router_config.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | pub struct RouterConfig { 5 | pub filter: Option, 6 | pub upstream: String, 7 | } 8 | -------------------------------------------------------------------------------- /dakia/src/gateway/filter/mod.rs: -------------------------------------------------------------------------------- 1 | mod builder; 2 | mod executor; 3 | mod operator; 4 | mod query2filter; 5 | 6 | pub use builder::build_filter_registry; 7 | pub use executor::exec_filter; 8 | pub use operator::Filter; 9 | pub use query2filter::query2filter; 10 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod executor; 2 | mod hook; 3 | mod insertceptor_name; 4 | mod interceptor; 5 | mod phase; 6 | 7 | pub use hook::*; 8 | pub use insertceptor_name::InterceptorName; 9 | pub use interceptor::*; 10 | pub use phase::*; 11 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod basic_auth; 2 | pub mod controller; 3 | pub mod rate_limiter; 4 | pub mod request_id; 5 | pub mod request_rewrite; 6 | pub mod response_rewrite; 7 | pub mod server_version; 8 | pub mod short_circuit; 9 | pub mod use_file; 10 | -------------------------------------------------------------------------------- /dakia/src/config/upstream.rs: -------------------------------------------------------------------------------- 1 | use serde; 2 | 3 | use super::InetAddress; 4 | 5 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 6 | pub struct UpstreamNodeConfig { 7 | pub address: InetAddress, 8 | pub tls: bool, 9 | pub sni: Option, 10 | pub weight: Option, 11 | } 12 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/inet_address.rs: -------------------------------------------------------------------------------- 1 | use serde; 2 | 3 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 4 | pub struct InetAddress { 5 | pub host: String, 6 | pub port: u16, 7 | } 8 | 9 | impl InetAddress { 10 | pub fn get_formatted_address(&self) -> String { 11 | format!("{}:{}", self.host, self.port) 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /dakia/src/shared/registry.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | 3 | use crate::error::DakiaResult; 4 | 5 | // TODO: create a generic struct for Registry to improve runtime performance, as many places traits are not strictly required 6 | #[async_trait] 7 | pub trait Registry { 8 | async fn register(&self, key: String, item: I) -> (); 9 | async fn get(&self, key: &str) -> DakiaResult>; 10 | } 11 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/interceptor_config.rs: -------------------------------------------------------------------------------- 1 | use crate::{gateway::interceptor::InterceptorName, qe::query::Query}; 2 | 3 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] 4 | pub struct InterceptorConfig { 5 | pub name: InterceptorName, 6 | pub enabled: bool, 7 | pub filter: Option, 8 | pub config: Option, 9 | pub rewrite: Option, 10 | pub response: Option, 11 | } 12 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/downstream_config.rs: -------------------------------------------------------------------------------- 1 | use serde; 2 | 3 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 4 | pub struct DownstreamConfig { 5 | pub host: String, 6 | pub port: Option, 7 | } 8 | 9 | impl DownstreamConfig { 10 | pub fn get_formatted_address(&self) -> String { 11 | match self.port { 12 | Some(port) => format!("{}:{}", self.host, port), 13 | None => self.host.clone(), 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.nar 17 | *.ear 18 | *.zip 19 | *.tar.gz 20 | *.rar 21 | 22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 23 | hs_err_pid* 24 | replay_pid* 25 | 26 | 27 | # Added by cargo 28 | 29 | */target 30 | /test 31 | /.vscode 32 | /local/* 33 | todo.txt 34 | /example* 35 | /local_gateway_config* 36 | /design/* 37 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/mod.rs: -------------------------------------------------------------------------------- 1 | mod downstream_config; 2 | mod gateway_config; 3 | mod inet_address; 4 | mod interceptor_config; 5 | mod router_config; 6 | mod upstream_config; 7 | 8 | pub use downstream_config::DownstreamConfig; 9 | pub use gateway_config::find_router_config_or_err; 10 | pub use gateway_config::GatewayConfig; 11 | pub use inet_address::InetAddress; 12 | pub use interceptor_config::*; 13 | pub use router_config::RouterConfig; 14 | pub use upstream_config::*; 15 | mod source_dakia_config; 16 | 17 | pub use source_dakia_config::SourceDakiaRawConfig; 18 | -------------------------------------------------------------------------------- /dakia/src/shared/mutable_registry.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | #[derive(Clone)] 4 | pub struct Registry { 5 | items: HashMap, 6 | } 7 | 8 | impl Registry { 9 | pub fn build() -> Self { 10 | Registry { 11 | items: HashMap::new(), 12 | } 13 | } 14 | } 15 | 16 | impl Registry { 17 | pub fn get(&self, key: &str) -> Option<&I> { 18 | self.items.get(key) 19 | } 20 | 21 | pub fn add(&mut self, key: String, item: I) { 22 | self.items.insert(key, item); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /dakia/src/proxy/http/ctx.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::gateway::state::GatewayState; 4 | 5 | use super::HeaderBuffer; 6 | 7 | pub struct DakiaHttpGatewayCtx { 8 | pub gateway_state: Arc, 9 | pub ds_res_header_buffer: HeaderBuffer, 10 | pub us_req_header_buffer: HeaderBuffer, 11 | } 12 | 13 | impl DakiaHttpGatewayCtx { 14 | pub fn new(gateway_state: Arc) -> DakiaHttpGatewayCtx { 15 | DakiaHttpGatewayCtx { 16 | gateway_state, 17 | ds_res_header_buffer: HeaderBuffer::new(), 18 | us_req_header_buffer: HeaderBuffer::new(), 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /dakia/src/shared/common.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{DakiaError, DakiaResult, ImmutStr}; 2 | 3 | include!(concat!(env!("OUT_DIR"), "/ascii_version.rs")); 4 | include!(concat!(env!("OUT_DIR"), "/dakia_ascii_art.rs")); 5 | 6 | pub fn exit() { 7 | std::process::exit(0); 8 | } 9 | 10 | pub fn get_dakia_ascii_art() -> String { 11 | DAKIA_ASCII_ART.to_string() + "\n\n" + get_ascii_version() 12 | } 13 | 14 | pub fn get_ascii_version() -> &'static str { 15 | ASCII_VERSION 16 | } 17 | 18 | pub fn get_dakia_version() -> &'static str { 19 | env!("CARGO_PKG_VERSION") 20 | } 21 | 22 | pub fn _assert(cond: bool, msg: String) -> DakiaResult<()> { 23 | Ok(if !cond { 24 | return Err(DakiaError::i_explain(ImmutStr::Owned(msg.into_boxed_str()))); 25 | }) 26 | } 27 | -------------------------------------------------------------------------------- /dakia/src/gateway/filter/builder.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | config::source_config::GatewayConfig, error::DakiaResult, gateway::filter::query2filter, 3 | qe::query::extract_key_str_or_err, shared::mutable_registry::Registry, 4 | }; 5 | 6 | use super::Filter; 7 | 8 | pub fn build_filter_registry(gateway_config: &mut GatewayConfig) -> DakiaResult> { 9 | let mut registry: Registry = Registry::build(); 10 | 11 | for filter_config in &mut gateway_config.filters { 12 | let filter_name = extract_key_str_or_err(&filter_config, "name")?.to_string(); 13 | filter_config.remove("name"); 14 | 15 | let filter = query2filter(filter_config)?; 16 | registry.add(filter_name, filter); 17 | } 18 | 19 | Ok(registry) 20 | } 21 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/controller/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{interceptor::Interceptor, interceptor_builder::InterceptorBuilder}, 7 | }; 8 | 9 | use super::ControllerInterceptor; 10 | 11 | pub struct ControllerInterceptorBuilder {} 12 | 13 | impl Default for ControllerInterceptorBuilder { 14 | fn default() -> Self { 15 | Self {} 16 | } 17 | } 18 | 19 | impl InterceptorBuilder for ControllerInterceptorBuilder { 20 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 21 | let interceptor = ControllerInterceptor::build(_interceptor_config.filter); 22 | Ok(Arc::new(interceptor)) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/server_version/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{ 7 | interceptor::Interceptor, interceptor_builder::InterceptorBuilder, 8 | interceptors::server_version::ServerVersionInterceptor, 9 | }, 10 | }; 11 | 12 | pub struct ServerVersionInterceptorBuilder {} 13 | 14 | impl Default for ServerVersionInterceptorBuilder { 15 | fn default() -> Self { 16 | Self {} 17 | } 18 | } 19 | 20 | impl InterceptorBuilder for ServerVersionInterceptorBuilder { 21 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 22 | let interceptor = ServerVersionInterceptor {}; 23 | Ok(Arc::new(interceptor)) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /dakia/src/shared/pattern_matcher.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use pcre2::bytes::Regex; 4 | 5 | use crate::error::{BErrorStd, DakiaResult}; 6 | 7 | #[derive(Debug, Clone)] 8 | pub struct Pcre2PatternMatcher { 9 | regex: Regex, 10 | } 11 | 12 | impl Pcre2PatternMatcher { 13 | pub fn build(pattern: &str) -> DakiaResult { 14 | let pcre2regex = Regex::new(pattern)?; 15 | let matcher = Self { regex: pcre2regex }; 16 | Ok(matcher) 17 | } 18 | } 19 | 20 | impl PatternMatcher for Pcre2PatternMatcher { 21 | fn is_match(&self, text: &[u8]) -> Result { 22 | let is_matched = self.regex.is_match(text)?; 23 | Ok(is_matched) 24 | } 25 | } 26 | 27 | pub trait PatternMatcher: Send + Sync + Debug { 28 | fn is_match(&self, text: &[u8]) -> Result; 29 | } 30 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_id/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{ 7 | interceptor::Interceptor, interceptor_builder::InterceptorBuilder, 8 | interceptors::request_id::interceptor::RequestIdInterceptor, 9 | }, 10 | }; 11 | 12 | pub struct RequestIdInterceptorBuilder {} 13 | 14 | impl Default for RequestIdInterceptorBuilder { 15 | fn default() -> Self { 16 | Self {} 17 | } 18 | } 19 | 20 | impl InterceptorBuilder for RequestIdInterceptorBuilder { 21 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 22 | let interceptor = RequestIdInterceptor::build(_interceptor_config.filter); 23 | Ok(Arc::new(interceptor)) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor/hook.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | pub type HookMask = u8; 4 | 5 | #[derive(PartialEq, Clone, Debug, Eq)] 6 | pub enum Hook { 7 | PreDownstreamResponseHeaderFlush = 0x01, 8 | } 9 | 10 | impl fmt::Display for Hook { 11 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 12 | let phase_str = match self { 13 | Hook::PreDownstreamResponseHeaderFlush => "pre_downstream_request_header_flush", 14 | }; 15 | write!(f, "{}", phase_str) 16 | } 17 | } 18 | 19 | impl Hook { 20 | pub fn mask(&self) -> HookMask { 21 | self.clone() as HookMask 22 | } 23 | 24 | pub fn all_hook_mask() -> HookMask { 25 | Hook::PreDownstreamResponseHeaderFlush.mask() 26 | } 27 | } 28 | 29 | pub fn is_hook_enabled(hook_mask: HookMask, hook: &Hook) -> bool { 30 | (hook_mask & hook.mask()) == hook.mask() 31 | } 32 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/response_rewrite/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{interceptor::Interceptor, interceptor_builder::InterceptorBuilder}, 7 | }; 8 | 9 | use super::{interceptor::ResponseRewriteInterceptor, rewrite_parts::RewriteParts}; 10 | 11 | pub struct ResponseRewriteInterceptorBuilder {} 12 | 13 | impl Default for ResponseRewriteInterceptorBuilder { 14 | fn default() -> Self { 15 | Self {} 16 | } 17 | } 18 | 19 | impl InterceptorBuilder for ResponseRewriteInterceptorBuilder { 20 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 21 | let rewrite_parts = RewriteParts::build(&_interceptor_config)?; 22 | let interceptor = 23 | ResponseRewriteInterceptor::build(_interceptor_config.filter, rewrite_parts); 24 | Ok(Arc::new(interceptor)) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_rewrite/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{interceptor::Interceptor, interceptor_builder::InterceptorBuilder}, 7 | }; 8 | 9 | use super::{interceptor::RequestRewriteInterceptor, rewrite_parts::RewriteParts}; 10 | 11 | pub struct RequestRewriteInterceptorBuilder {} 12 | 13 | impl Default for RequestRewriteInterceptorBuilder { 14 | fn default() -> Self { 15 | Self {} 16 | } 17 | } 18 | 19 | impl InterceptorBuilder for RequestRewriteInterceptorBuilder { 20 | fn build( 21 | &self, 22 | _interceptor_config: InterceptorConfig, 23 | ) -> DakiaResult> { 24 | let rewrite_parts = RewriteParts::build(&_interceptor_config)?; 25 | let interceptor = 26 | RequestRewriteInterceptor::build(_interceptor_config.filter, rewrite_parts); 27 | Ok(Arc::new(interceptor)) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/short_circuit/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{ 7 | interceptor::Interceptor, 8 | interceptor_builder::InterceptorBuilder, 9 | interceptors::short_circuit::{ 10 | interceptor::ShortCircuitInterceptor, response_parts::ResponseParts, 11 | }, 12 | }, 13 | }; 14 | 15 | pub struct ShortCircuitInterceptorBuilder {} 16 | 17 | impl Default for ShortCircuitInterceptorBuilder { 18 | fn default() -> Self { 19 | Self {} 20 | } 21 | } 22 | 23 | impl InterceptorBuilder for ShortCircuitInterceptorBuilder { 24 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 25 | let response_parts = ResponseParts::build(&_interceptor_config)?; 26 | 27 | let interceptor = 28 | ShortCircuitInterceptor::build(_interceptor_config.filter, response_parts); 29 | 30 | Ok(Arc::new(interceptor)) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor/insertceptor_name.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] 2 | #[serde(rename_all = "snake_case")] 3 | pub enum InterceptorName { 4 | ServerVersion, 5 | UseFile, 6 | BasicAuth, 7 | Controller, 8 | RateLimiter, 9 | RequestRewrite, 10 | ResponseRewrite, 11 | ShortCircuit, 12 | RequestId, 13 | } 14 | 15 | impl InterceptorName { 16 | pub fn as_str(&self) -> &'static str { 17 | match self { 18 | InterceptorName::ServerVersion => "server_version", 19 | InterceptorName::UseFile => "use_file", 20 | InterceptorName::BasicAuth => "basic_auth", 21 | InterceptorName::Controller => "controller", 22 | InterceptorName::RateLimiter => "rate_limiter", 23 | InterceptorName::RequestRewrite => "request_rewrite", 24 | InterceptorName::ResponseRewrite => "response_rewrite", 25 | InterceptorName::ShortCircuit => "short_circuit", 26 | InterceptorName::RequestId => "request_id", 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /dakia/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dakia" 3 | version = "0.1.1" 4 | edition = "2021" 5 | authors = ["Rahul Kumar"] 6 | description = "Dakia is a powerful and flexible API Gateway designed for modern web applications" 7 | license = "MIT" 8 | repository = "https://github.com/ats1999/dakia" 9 | documentation = "https://github.com/ats1999/dakia" 10 | 11 | 12 | [dependencies] 13 | tokio = { version = "1", features = ["full"] } 14 | async-trait = "0.1" 15 | env_logger = "0.9" 16 | pingora-core = { version = "0.4.0" } 17 | pingora-proxy = { version = "0.4.0" } 18 | pingora-http = { version = "0.4.0" } 19 | pingora = { version = "0.4.0", features = ["lb"] } 20 | clap = { version = "3.2.25", features = ["derive"] } 21 | serde_yaml = "0.9.34" 22 | serde = "1.0.216" 23 | wildmatch = "2.4.0" 24 | log = "0.4" 25 | bytes = "1.0" 26 | http = "1.0.0" 27 | arc-swap = "1.7.1" 28 | once_cell = "1.20.2" 29 | dashmap = "6.1.0" 30 | crossbeam-epoch = "0.9.18" 31 | rand = "0.8.5" 32 | pcre2 = "0.2.9" 33 | base64 = "0.22.1" 34 | serde_json = "1.0.140" 35 | uuid = { version = "1.16.0", features = ["v4"] } 36 | [build-dependencies] 37 | figlet-rs = "0.1.5" 38 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_id/interceptor.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | 3 | use crate::{ 4 | error::DakiaResult, 5 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask}, 6 | proxy::http::Session, 7 | }; 8 | 9 | pub struct RequestIdInterceptor { 10 | filter: Option, 11 | } 12 | 13 | impl RequestIdInterceptor { 14 | pub fn build(filter: Option) -> Self { 15 | Self { filter } 16 | } 17 | } 18 | 19 | #[async_trait] 20 | impl Interceptor for RequestIdInterceptor { 21 | fn name(&self) -> InterceptorName { 22 | InterceptorName::RequestId 23 | } 24 | 25 | fn phase_mask(&self) -> PhaseMask { 26 | Phase::Init.mask() 27 | } 28 | 29 | fn filter(&self) -> &Option { 30 | &self.filter 31 | } 32 | 33 | async fn init(&self, _session: &mut Session) -> DakiaResult<()> { 34 | let req_id = uuid::Uuid::new_v4(); 35 | let req_id = req_id.to_string().as_bytes().to_vec(); 36 | _session.set_ds_res_header("X-Request-Id".to_string(), req_id.clone()); 37 | _session.set_us_req_header("X-Request-Id".to_string(), req_id); 38 | Ok(()) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /dakia/src/error/immut_str.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | // A data struct that holds either immutable string or reference to static str. 4 | // Compared to String or `Box`, it avoids memory allocation on static str. 5 | #[derive(Debug, PartialEq, Eq, Clone)] 6 | pub enum ImmutStr { 7 | Static(&'static str), 8 | Owned(Box), 9 | } 10 | 11 | impl ImmutStr { 12 | #[inline] 13 | pub fn as_str(&self) -> &str { 14 | match self { 15 | ImmutStr::Static(s) => s, 16 | ImmutStr::Owned(s) => s.as_ref(), 17 | } 18 | } 19 | 20 | pub fn is_owned(&self) -> bool { 21 | match self { 22 | ImmutStr::Static(_) => false, 23 | ImmutStr::Owned(_) => true, 24 | } 25 | } 26 | } 27 | 28 | impl fmt::Display for ImmutStr { 29 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 30 | write!(f, "{}", self.as_str()) 31 | } 32 | } 33 | 34 | impl From<&'static str> for ImmutStr { 35 | fn from(s: &'static str) -> Self { 36 | ImmutStr::Static(s) 37 | } 38 | } 39 | 40 | impl From for ImmutStr { 41 | fn from(s: String) -> Self { 42 | ImmutStr::Owned(s.into_boxed_str()) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /dakia/src/gateway/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod filter; 2 | pub mod interceptor; 3 | pub mod interceptor_builder; 4 | pub mod interceptors; 5 | pub mod lb; 6 | pub mod registry_builder; 7 | pub mod state; 8 | 9 | use super::Proxy; 10 | use pingora::{server::configuration::ServerConf, services::listening::Service}; 11 | use pingora_proxy::{http_proxy_service_with_name, HttpProxy}; 12 | use state::GatewayStateStore; 13 | use std::sync::Arc; 14 | 15 | use crate::error::DakiaResult; 16 | 17 | pub type HttpGateway = Service>; 18 | 19 | pub async fn build_http( 20 | gateway_state_store: Arc, 21 | server_conf: Arc, 22 | ) -> DakiaResult { 23 | let proxy = Proxy::build(gateway_state_store.clone()).await?; 24 | let mut http_proxy_service = 25 | http_proxy_service_with_name(&server_conf, proxy, "Dakia HTTP Proxy"); 26 | 27 | let gateway_state = &gateway_state_store.get_state(); 28 | let bind_addresses = &gateway_state.gateway_config().bind_addresses; 29 | 30 | for inet_address in bind_addresses { 31 | let addr = inet_address.get_formatted_address(); 32 | http_proxy_service.add_tcp(&addr); 33 | } 34 | 35 | Ok(http_proxy_service) 36 | } 37 | -------------------------------------------------------------------------------- /dakia/src/shared/pattern_registry.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use std::{collections::HashMap, sync::Arc}; 3 | use tokio::sync::RwLock; 4 | 5 | use crate::error::DakiaResult; 6 | 7 | use super::{pattern_matcher::PatternMatcher, registry::Registry}; 8 | 9 | pub struct PatternRegistry { 10 | registry: RwLock>>, 11 | } 12 | 13 | impl PatternRegistry { 14 | pub fn build() -> Self { 15 | Self { 16 | registry: RwLock::new(HashMap::new()), 17 | } 18 | } 19 | } 20 | 21 | #[async_trait] 22 | impl Registry> for PatternRegistry { 23 | async fn register(&self, key: String, item: Arc) { 24 | let mut write_guard = self.registry.write().await; 25 | write_guard.insert(key, item); 26 | } 27 | 28 | async fn get(&self, key: &str) -> DakiaResult>> { 29 | let read_guard = self.registry.read().await; 30 | let matcher = read_guard.get(key); 31 | match matcher { 32 | None => Ok(None), 33 | Some(matcher) => Ok(Some(matcher.clone())), 34 | } 35 | } 36 | } 37 | 38 | pub type PatternRegistryType = Arc> + Send + Sync>; 39 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/response_rewrite/interceptor.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | 3 | use crate::{ 4 | error::DakiaResult, 5 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 6 | proxy::http::Session, 7 | }; 8 | 9 | use super::rewrite_parts::RewriteParts; 10 | 11 | pub struct ResponseRewriteInterceptor { 12 | filter: Option, 13 | rewrite_parts: RewriteParts, 14 | } 15 | 16 | impl ResponseRewriteInterceptor { 17 | pub fn build(filter: Option, rewrite_parts: RewriteParts) -> Self { 18 | Self { 19 | filter, 20 | rewrite_parts, 21 | } 22 | } 23 | } 24 | 25 | #[async_trait] 26 | impl Interceptor for ResponseRewriteInterceptor { 27 | fn name(&self) -> InterceptorName { 28 | InterceptorName::ResponseRewrite 29 | } 30 | 31 | fn phase_mask(&self) -> PhaseMask { 32 | Phase::Init.mask() 33 | } 34 | 35 | fn filter(&self) -> &Option { 36 | &self.filter 37 | } 38 | 39 | async fn init(&self, _session: &mut Session) -> DakiaResult<()> { 40 | for (header_name, header_value) in &self.rewrite_parts.header_buffer { 41 | _session.set_ds_res_header(header_name.clone(), header_value.clone()); 42 | } 43 | 44 | Ok(()) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/short_circuit/interceptor.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | 3 | use crate::{ 4 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 5 | proxy::http::Session, 6 | }; 7 | 8 | use super::response_parts::ResponseParts; 9 | 10 | pub struct ShortCircuitInterceptor { 11 | filter: Option, 12 | response_parts: ResponseParts, 13 | } 14 | 15 | impl ShortCircuitInterceptor { 16 | pub fn build(filter: Option, response_parts: ResponseParts) -> Self { 17 | Self { 18 | filter, 19 | response_parts, 20 | } 21 | } 22 | } 23 | 24 | #[async_trait] 25 | impl Interceptor for ShortCircuitInterceptor { 26 | fn name(&self) -> InterceptorName { 27 | InterceptorName::ShortCircuit 28 | } 29 | 30 | fn phase_mask(&self) -> PhaseMask { 31 | Phase::UpstreamProxyFilter.mask() 32 | } 33 | 34 | fn filter(&self) -> &Option { 35 | &self.filter 36 | } 37 | 38 | async fn upstream_proxy_filter(&self, _session: &mut Session) -> PhaseResult { 39 | for (header_name, header_value) in &self.response_parts.header_buffer { 40 | _session.set_ds_res_header(header_name.clone(), header_value.clone()); 41 | } 42 | 43 | if let Some(status_code) = self.response_parts.status_code { 44 | _session.set_res_status(status_code); 45 | } 46 | 47 | Ok(true) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/server_version/interceptor.rs: -------------------------------------------------------------------------------- 1 | use std::sync::OnceLock; 2 | 3 | use async_trait::async_trait; 4 | 5 | use crate::{ 6 | gateway::interceptor::{ 7 | Hook, HookMask, Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult, 8 | }, 9 | proxy::http::Session, 10 | shared::common::get_dakia_version, 11 | }; 12 | 13 | const SERVER_HEADER_NAME: &str = "Server"; 14 | static SERVER_HEADER_BYTES: OnceLock> = OnceLock::new(); 15 | 16 | pub struct ServerVersionInterceptor {} 17 | 18 | impl ServerVersionInterceptor { 19 | fn insert_header(&self, _session: &mut Session) -> PhaseResult { 20 | let header_value = SERVER_HEADER_BYTES.get_or_init(|| { 21 | let hval = format!("Dakia/{}", get_dakia_version()); 22 | hval.as_bytes().to_vec() 23 | }); 24 | 25 | _session.set_ds_res_header(SERVER_HEADER_NAME.to_owned(), header_value.clone()); 26 | Ok(false) 27 | } 28 | } 29 | 30 | #[async_trait] 31 | impl Interceptor for ServerVersionInterceptor { 32 | fn name(&self) -> InterceptorName { 33 | InterceptorName::ServerVersion 34 | } 35 | 36 | fn phase_mask(&self) -> PhaseMask { 37 | Phase::all_phase_mask() 38 | } 39 | 40 | fn hook_mask(&self) -> HookMask { 41 | Hook::PreDownstreamResponseHeaderFlush.mask() 42 | } 43 | 44 | async fn pre_downstream_response_hook(&self, _session: &mut Session) -> PhaseResult { 45 | self.insert_header(_session) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_rewrite/interceptor.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use http::Uri; 3 | 4 | use crate::{ 5 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 6 | proxy::http::Session, 7 | }; 8 | 9 | use super::rewrite_parts::RewriteParts; 10 | 11 | pub struct RequestRewriteInterceptor { 12 | filter: Option, 13 | rewrite_parts: RewriteParts, 14 | } 15 | 16 | impl RequestRewriteInterceptor { 17 | pub fn build(filter: Option, rewrite_parts: RewriteParts) -> Self { 18 | Self { 19 | filter, 20 | rewrite_parts, 21 | } 22 | } 23 | } 24 | 25 | #[async_trait] 26 | impl Interceptor for RequestRewriteInterceptor { 27 | fn name(&self) -> InterceptorName { 28 | InterceptorName::RequestRewrite 29 | } 30 | 31 | fn phase_mask(&self) -> PhaseMask { 32 | Phase::PreUpstreamRequest.mask() 33 | } 34 | 35 | fn filter(&self) -> &Option { 36 | &self.filter 37 | } 38 | 39 | async fn pre_upstream_request(&self, _session: &mut Session) -> PhaseResult { 40 | for (header_name, header_value) in &self.rewrite_parts.header_buffer { 41 | _session.set_us_req_header(header_name.clone(), header_value.clone()); 42 | } 43 | 44 | if let Some(path) = &self.rewrite_parts.path { 45 | let builder = Uri::builder().path_and_query(path.as_slice()); 46 | 47 | _session.set_us_req_uri(builder.build()?)?; 48 | } 49 | 50 | Ok(false) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /dakia/src/gateway/registry_builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::GatewayConfig, 5 | error::DakiaResult, 6 | shared::{ 7 | pattern_matcher::Pcre2PatternMatcher, 8 | pattern_registry::{PatternRegistry, PatternRegistryType}, 9 | registry::Registry, 10 | }, 11 | }; 12 | 13 | use super::lb::{build_lb, LbRegistryType, LoadBalancerRegistry}; 14 | 15 | pub async fn build_ds_host_pattern_registry( 16 | gateway_config: &GatewayConfig, 17 | ) -> DakiaResult { 18 | let pattern_registry = PatternRegistry::build(); 19 | for ds in &gateway_config.downstreams { 20 | let ds_addr = ds.get_formatted_address(); 21 | let pcre2pattern_matcher = Pcre2PatternMatcher::build(&ds_addr)?; 22 | let _ = pattern_registry 23 | .register(ds_addr, Arc::new(pcre2pattern_matcher)) 24 | .await; 25 | } 26 | 27 | Ok(Arc::new(pattern_registry)) 28 | } 29 | 30 | pub async fn build_lb_registry(gateway_config: &GatewayConfig) -> DakiaResult { 31 | let lb_registry = LoadBalancerRegistry::build(); 32 | for upstream_config in &gateway_config.upstreams { 33 | let lb = build_lb(upstream_config)?; 34 | let arc_lb = Arc::new(lb); 35 | 36 | let _ = lb_registry 37 | .register(upstream_config.name.clone(), arc_lb.clone()) 38 | .await; 39 | 40 | if upstream_config.default { 41 | let _ = lb_registry.register("default".to_string(), arc_lb).await; 42 | } 43 | } 44 | 45 | Ok(Arc::new(lb_registry)) 46 | } 47 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/rate_limiter/builder.rs: -------------------------------------------------------------------------------- 1 | use std::{sync::Arc, time::Duration}; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::DakiaResult, 6 | gateway::{ 7 | interceptor::Interceptor, 8 | interceptor_builder::InterceptorBuilder, 9 | interceptors::rate_limiter::interceptor::{RateLimit, RateLimiterInterceptor}, 10 | }, 11 | qe::query::extract_key_i64_or_err, 12 | }; 13 | 14 | pub struct RateLimiterInterceptorBuilder {} 15 | 16 | impl Default for RateLimiterInterceptorBuilder { 17 | fn default() -> Self { 18 | Self {} 19 | } 20 | } 21 | 22 | impl InterceptorBuilder for RateLimiterInterceptorBuilder { 23 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 24 | let config = &_interceptor_config.config.expect( 25 | format!( 26 | "{:?} interceptor config not found.", 27 | _interceptor_config.name 28 | ) 29 | .as_str(), 30 | ); 31 | 32 | let capacity = extract_key_i64_or_err(config, "capacity")?; 33 | let refill_rate = extract_key_i64_or_err(config, "refill_rate")?; 34 | let refill_interval = extract_key_i64_or_err(config, "refill_interval")?; 35 | 36 | let rate_limit = RateLimit { 37 | capacity: capacity as u32, 38 | refill_rate: refill_rate as u32, 39 | refill_interval: Duration::from_millis(refill_interval as u64), 40 | }; 41 | 42 | let interceptor = RateLimiterInterceptor::build(rate_limit); 43 | Ok(Arc::new(interceptor)) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/basic_auth/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::{DakiaError, DakiaResult}, 6 | gateway::{ 7 | interceptor::Interceptor, interceptor_builder::InterceptorBuilder, 8 | interceptors::basic_auth::BasicAuthInterceptor, 9 | }, 10 | qe::query::{extract_key_str_or_err, Query}, 11 | }; 12 | 13 | pub struct BasicAuthInterceptorBuilder {} 14 | 15 | impl BasicAuthInterceptorBuilder { 16 | fn get_user_pass(config: &Query) -> DakiaResult<(String, String)> { 17 | let username = extract_key_str_or_err(config, "username")?; 18 | let password = extract_key_str_or_err(config, "password")?; 19 | Ok((username.to_string(), password.to_string())) 20 | } 21 | } 22 | impl Default for BasicAuthInterceptorBuilder { 23 | fn default() -> Self { 24 | Self {} 25 | } 26 | } 27 | 28 | impl InterceptorBuilder for BasicAuthInterceptorBuilder { 29 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult> { 30 | match &_interceptor_config.config { 31 | Some(config) => { 32 | let (username, password) = BasicAuthInterceptorBuilder::get_user_pass(config)?; 33 | let interceptor = 34 | BasicAuthInterceptor::build(_interceptor_config.filter, username, password); 35 | Ok(Arc::new(interceptor)) 36 | } 37 | None => Err(DakiaError::i_explain(format!( 38 | "config required for interceptor {:?}", 39 | _interceptor_config.name 40 | ))), 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /dakia/src/config/args.rs: -------------------------------------------------------------------------------- 1 | use clap::AppSettings; 2 | use clap::Parser; 3 | 4 | /// A programmable, configurable, and extensible API Gateway! 5 | #[derive(Parser, Debug, Clone)] 6 | #[clap(about = "A programmable, configurable, and extensible API Gateway!", long_about = None)] 7 | #[clap(global_setting(AppSettings::DisableVersionFlag))] 8 | pub struct DakiaArgs { 9 | /// Path to Dakia's local directory for storing configuration, interceptors, filters, extensions and runtime data. 10 | #[clap(long)] 11 | pub dp: Option, 12 | 13 | /// Watch for changes in configuration files, interceptors, filters and extensions and automatically apply updates. 14 | #[clap(short, long)] 15 | pub watch: bool, 16 | 17 | /// Reload configuration files and update runtime settings. 18 | /// May trigger a graceful restart if required. 19 | #[clap(long)] 20 | pub reload: bool, 21 | 22 | /// Test the server configuration without starting the application. 23 | #[clap(short, long)] 24 | pub test: bool, 25 | 26 | /// Display the current version of the API Gateway and exit. 27 | #[clap(short, long)] 28 | pub version: bool, 29 | 30 | /// Enable verbose logging for more detailed output. 31 | /// This is useful for debugging and monitoring. 32 | #[clap(long)] 33 | pub verbose: bool, 34 | 35 | /// Enable debug mode to output additional debugging information. 36 | /// Use this to troubleshoot issues during development or runtime. 37 | #[clap(long)] 38 | pub debug: bool, 39 | 40 | /// Whether this server should try to upgrade from a running old server 41 | /// It'll work only on linux platforms 42 | #[clap(short, long)] 43 | pub upgrade: bool, 44 | } 45 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor/interceptor.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | 3 | use crate::{config::source_config::InterceptorConfig, error::DakiaResult, proxy::http::Session}; 4 | 5 | use super::{HookMask, InterceptorName, PhaseMask}; 6 | 7 | pub type PhaseResult = DakiaResult; 8 | 9 | #[async_trait] 10 | pub trait Interceptor: Send + Sync { 11 | fn name(&self) -> InterceptorName; 12 | 13 | fn phase_mask(&self) -> PhaseMask { 14 | 0 // no phase will be executed 15 | } 16 | 17 | fn hook_mask(&self) -> HookMask { 18 | 0 // no hook will be executed 19 | } 20 | 21 | fn _init(&mut self, _interceptor_config: &InterceptorConfig) -> DakiaResult<()> { 22 | Ok(()) 23 | } 24 | 25 | // if there is no filter, it'll be considered as match 26 | fn filter(&self) -> &Option { 27 | &None 28 | } 29 | 30 | async fn init(&self, _session: &mut Session) -> DakiaResult<()> { 31 | Ok(()) 32 | } 33 | 34 | async fn request_filter(&self, _session: &mut Session) -> PhaseResult { 35 | Ok(false) 36 | } 37 | 38 | async fn upstream_proxy_filter(&self, _session: &mut Session) -> PhaseResult { 39 | Ok(false) 40 | } 41 | 42 | async fn pre_upstream_request(&self, _session: &mut Session) -> PhaseResult { 43 | Ok(false) 44 | } 45 | 46 | async fn post_upstream_response(&self, _session: &mut Session) -> PhaseResult { 47 | Ok(false) 48 | } 49 | 50 | async fn pre_downstream_response(&self, _session: &mut Session) -> PhaseResult { 51 | Ok(false) 52 | } 53 | 54 | async fn pre_downstream_response_hook(&self, _session: &mut Session) -> PhaseResult { 55 | Ok(false) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /dakia/src/gateway/lb/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, sync::Arc}; 2 | 3 | use async_trait::async_trait; 4 | use pingora::lb::{ 5 | selection::{algorithms::RoundRobin, weighted::Weighted}, 6 | LoadBalancer, 7 | }; 8 | 9 | use tokio::sync::RwLock; 10 | 11 | use crate::{ 12 | config::source_config::UpstreamConfig, 13 | error::{DakiaError, DakiaResult}, 14 | shared::registry::Registry, 15 | }; 16 | 17 | type LB = LoadBalancer>; 18 | 19 | pub struct LoadBalancerRegistry { 20 | registry: RwLock>>, 21 | } 22 | 23 | #[async_trait] 24 | impl Registry> for LoadBalancerRegistry { 25 | async fn register(&self, key: String, lb: Arc) { 26 | let mut write_guard = self.registry.write().await; 27 | write_guard.insert(key, lb); 28 | } 29 | 30 | async fn get(&self, key: &str) -> DakiaResult>> { 31 | let read_guard = self.registry.read().await; 32 | let arc_lb = read_guard.get(key).ok_or(DakiaError::i_explain(format!( 33 | "Load balancer {key:?} not found." 34 | )))?; 35 | Ok(Some(arc_lb.clone())) 36 | } 37 | } 38 | 39 | impl LoadBalancerRegistry { 40 | pub fn build() -> Self { 41 | Self { 42 | registry: RwLock::new(HashMap::new()), 43 | } 44 | } 45 | } 46 | 47 | pub fn build_lb(upstream_config: &UpstreamConfig) -> DakiaResult { 48 | let addrs: Vec = upstream_config 49 | .upstream_nodes 50 | .iter() 51 | .map(|node| node.address.get_formatted_address()) 52 | .collect(); 53 | 54 | let lb: LoadBalancer> = LoadBalancer::try_from_iter(addrs)?; 55 | Ok(lb) 56 | } 57 | 58 | pub type LbRegistryType = Arc> + Send + Sync>; 59 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/response_rewrite/rewrite_parts.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::{DakiaError, DakiaResult}, 6 | proxy::http::HeaderBuffer, 7 | qe::query::{extract_key_vec_bytes, Query}, 8 | }; 9 | 10 | pub struct RewriteParts { 11 | pub header_buffer: HeaderBuffer, 12 | } 13 | 14 | pub fn extract_headers(rewrite_config: &Query) -> DakiaResult { 15 | let mut header_buf: HeaderBuffer = HashMap::new(); 16 | 17 | for (header_key, _) in rewrite_config { 18 | if header_key.starts_with("header.") 19 | || header_key.starts_with("res.header.") 20 | || header_key.starts_with("ds.res.header.") 21 | { 22 | // TODO: optimise this to only replace parts which is present 23 | let header_name = header_key 24 | .replace("ds.res.header.", "") 25 | .replace("res.header.", "") 26 | .replace("header.", ""); 27 | 28 | let header_value = extract_key_vec_bytes(rewrite_config, &header_key)?; 29 | header_buf.insert(header_name, header_value.unwrap_or(vec![])); 30 | } 31 | } 32 | 33 | Ok(header_buf) 34 | } 35 | 36 | impl RewriteParts { 37 | pub fn build(interceptor_config: &InterceptorConfig) -> DakiaResult { 38 | match &interceptor_config.rewrite { 39 | Some(rewrite) => { 40 | let header_buffer = extract_headers(rewrite)?; 41 | 42 | Ok(Self { header_buffer }) 43 | } 44 | None => Err(DakiaError::i_explain(format!( 45 | "rewrite config is missing for {:?} interceptor", 46 | interceptor_config.name 47 | ))), 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/upstream_config.rs: -------------------------------------------------------------------------------- 1 | use serde; 2 | 3 | use crate::error::{DakiaError, DakiaResult}; 4 | 5 | use super::inet_address::InetAddress; 6 | 7 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 8 | #[serde(rename_all = "snake_case")] 9 | pub enum NodeSelectionAlgorithm { 10 | RoundRobin, 11 | Weighted, 12 | LeastConnection, 13 | IpHash, 14 | UrlHash, 15 | Random, 16 | } 17 | 18 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 19 | pub struct TrafficDistributionPolicy { 20 | node_selection_algorithm: NodeSelectionAlgorithm, 21 | } 22 | 23 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 24 | pub struct UpstreamNodeConfig { 25 | pub address: InetAddress, 26 | pub tls: bool, 27 | pub sni: Option, 28 | pub weight: Option, 29 | } 30 | 31 | #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] 32 | pub struct UpstreamConfig { 33 | pub name: String, 34 | pub default: bool, 35 | pub upstream_nodes: Vec, 36 | pub traffic_distribution_policy: Option, 37 | } 38 | impl UpstreamConfig { 39 | pub fn find_upstream_node_config(&self, address: InetAddress) -> Option<&UpstreamNodeConfig> { 40 | self.upstream_nodes.iter().find(|node_config| { 41 | node_config.address.get_formatted_address() == address.get_formatted_address() 42 | }) 43 | } 44 | 45 | pub fn find_upstream_node_config_or_err( 46 | &self, 47 | address: InetAddress, 48 | ) -> DakiaResult<&UpstreamNodeConfig> { 49 | let node_config = self.find_upstream_node_config(address); 50 | node_config.ok_or(DakiaError::create_unknown_context( 51 | crate::error::ImmutStr::Static("upstream node config not found".into()), 52 | )) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/use_file/interceptor.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use bytes::Bytes; 3 | use http::StatusCode; 4 | use log::debug; 5 | 6 | use crate::{ 7 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 8 | proxy::http::Session, 9 | }; 10 | 11 | pub struct UseFileInterceptor { 12 | root: String, 13 | filter: Option, 14 | } 15 | 16 | impl UseFileInterceptor { 17 | pub fn build(root: String, filter: Option) -> Self { 18 | UseFileInterceptor { root, filter } 19 | } 20 | } 21 | 22 | #[async_trait] 23 | impl Interceptor for UseFileInterceptor { 24 | fn name(&self) -> InterceptorName { 25 | InterceptorName::UseFile 26 | } 27 | 28 | fn phase_mask(&self) -> PhaseMask { 29 | Phase::UpstreamProxyFilter.mask() 30 | } 31 | 32 | fn filter(&self) -> &Option { 33 | &self.filter 34 | } 35 | 36 | async fn upstream_proxy_filter(&self, _session: &mut Session) -> PhaseResult { 37 | let path = _session.ds_req_path(); 38 | let aboslute_path = format!("{}{}", self.root, path); 39 | 40 | match tokio::fs::read(aboslute_path.clone()).await { 41 | Ok(file_content) => { 42 | _session.set_ds_res_header( 43 | "Content-Length".to_string(), 44 | file_content.len().to_string().as_bytes().to_vec(), 45 | ); 46 | 47 | _session 48 | .write_ds_res_body(Some(Bytes::from(file_content)), true) 49 | .await?; 50 | } 51 | Err(err) => { 52 | debug!("can not read file {aboslute_path} - {err}"); 53 | _session.set_res_status(StatusCode::NOT_FOUND); 54 | } 55 | }; 56 | 57 | Ok(true) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /dakia/src/proxy/http/helpers.rs: -------------------------------------------------------------------------------- 1 | use pingora::lb::Backend; 2 | 3 | use crate::{ 4 | config::{source_config::GatewayConfig, InetAddress}, 5 | error::{DakiaError, DakiaResult}, 6 | shared::pattern_registry::PatternRegistryType, 7 | }; 8 | 9 | fn get_ds_addrs(gateway_config: &GatewayConfig) -> Vec { 10 | // safe to unwrap 11 | gateway_config 12 | .downstreams 13 | .iter() 14 | .map(|d| d.get_formatted_address()) 15 | .collect() 16 | } 17 | 18 | pub async fn is_valid_ds_host( 19 | dakia_config: &GatewayConfig, 20 | ds_host_pattern_registry: &PatternRegistryType, 21 | ds_host: &[u8], 22 | ) -> DakiaResult { 23 | let ds_addrs = get_ds_addrs(dakia_config); 24 | 25 | for ds_addr in ds_addrs { 26 | let pattern = ds_host_pattern_registry 27 | .get(&ds_addr) 28 | .await? 29 | .ok_or(DakiaError::create( 30 | crate::error::ErrorType::InternalError, 31 | crate::error::ErrorSource::Internal, 32 | Some(crate::error::ImmutStr::Owned( 33 | "compiled pattern for downstream not found" 34 | .to_string() 35 | .into_boxed_str(), 36 | )), 37 | None, 38 | ))?; 39 | 40 | let is_matched: bool = pattern.is_match(ds_host).map_err(|e| { 41 | println!("{}", e); 42 | DakiaError::create_internal() 43 | })?; 44 | 45 | if is_matched { 46 | return Ok(true); 47 | } 48 | } 49 | 50 | Ok(false) 51 | } 52 | 53 | pub fn get_inet_addr_from_backend(backend: &Backend) -> InetAddress { 54 | let addr = backend.addr.clone().to_string(); 55 | let parts: Vec<&str> = addr.split(":").collect(); 56 | 57 | InetAddress { 58 | host: parts[0].to_owned(), 59 | // TODO: handle unwrap 60 | port: parts[1].parse().unwrap(), 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/request_rewrite/rewrite_parts.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::{DakiaError, DakiaResult}, 6 | proxy::http::HeaderBuffer, 7 | qe::query::{extract_key_vec_bytes, Query}, 8 | }; 9 | 10 | pub struct RewriteParts { 11 | pub path: Option>, 12 | pub header_buffer: HeaderBuffer, 13 | } 14 | 15 | pub fn extract_headers(rewrite_config: &Query) -> DakiaResult { 16 | let mut header_buf: HeaderBuffer = HashMap::new(); 17 | 18 | for (header_key, _) in rewrite_config { 19 | if header_key.starts_with("header.") 20 | || header_key.starts_with("req.header.") 21 | || header_key.starts_with("us.req.header.") 22 | { 23 | // TODO: optimise this to only replace parts which is present 24 | let header_name = header_key 25 | .replace("us.req.header.", "") 26 | .replace("req.header.", "") 27 | .replace("header.", ""); 28 | 29 | let header_value = extract_key_vec_bytes(rewrite_config, &header_key)?; 30 | header_buf.insert(header_name, header_value.unwrap_or(vec![])); 31 | } 32 | } 33 | 34 | Ok(header_buf) 35 | } 36 | 37 | impl RewriteParts { 38 | pub fn build(interceptor_config: &InterceptorConfig) -> DakiaResult { 39 | match &interceptor_config.rewrite { 40 | Some(rewrite) => { 41 | let header_buffer = extract_headers(rewrite)?; 42 | let path = extract_key_vec_bytes(rewrite, "path")?; 43 | 44 | Ok(Self { 45 | path, 46 | header_buffer, 47 | }) 48 | } 49 | None => Err(DakiaError::i_explain(format!( 50 | "rewrite config is missing for {:?} interceptor", 51 | interceptor_config.name 52 | ))), 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor/phase.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | pub type PhaseMask = u8; 4 | 5 | #[derive(PartialEq, Clone, Debug, Eq)] 6 | pub enum Phase { 7 | Init = 0x01, 8 | RequestFilter = 0x02, 9 | UpstreamProxyFilter = 0x04, 10 | UpstreamPeerSelection = 0x08, 11 | PreUpstreamRequest = 0x10, 12 | PostUpstreamResponse = 0x20, 13 | PreDownstreamResponse = 0x40, 14 | } 15 | 16 | impl Phase { 17 | pub fn mask(&self) -> PhaseMask { 18 | self.clone() as PhaseMask 19 | } 20 | 21 | pub fn all_phase_mask() -> PhaseMask { 22 | let bits = Phase::RequestFilter.mask() 23 | | Phase::UpstreamProxyFilter.mask() 24 | | Phase::UpstreamPeerSelection.mask() 25 | | Phase::PreUpstreamRequest.mask() 26 | | Phase::PostUpstreamResponse.mask(); 27 | 28 | bits as PhaseMask 29 | } 30 | } 31 | 32 | impl Ord for Phase { 33 | fn cmp(&self, other: &Self) -> std::cmp::Ordering { 34 | (self.clone() as PhaseMask).cmp(&(other.clone() as PhaseMask)) 35 | } 36 | } 37 | 38 | impl PartialOrd for Phase { 39 | fn partial_cmp(&self, other: &Self) -> Option { 40 | Some(self.cmp(other)) 41 | } 42 | } 43 | 44 | impl fmt::Display for Phase { 45 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 46 | let phase_str = match self { 47 | Phase::Init => "init", 48 | Phase::RequestFilter => "request_filter", 49 | Phase::UpstreamProxyFilter => "upstream_proxy_filter", 50 | Phase::UpstreamPeerSelection => "upstream_peer_selection", 51 | Phase::PreUpstreamRequest => "pre_upstream_request", 52 | Phase::PostUpstreamResponse => "post_upstream_response", 53 | Phase::PreDownstreamResponse => "pre_downstream_response", 54 | }; 55 | write!(f, "{}", phase_str) 56 | } 57 | } 58 | 59 | pub fn is_phase_enabled(phase_mask: PhaseMask, phase: &Phase) -> bool { 60 | (phase_mask & phase.mask()) == phase.mask() 61 | } 62 | -------------------------------------------------------------------------------- /dakia/build.rs: -------------------------------------------------------------------------------- 1 | use figlet_rs::FIGfont; 2 | use std::env; 3 | use std::fs::File; 4 | use std::io::Write; 5 | 6 | fn main() { 7 | // Get the package version at build time 8 | let version = env!("CARGO_PKG_VERSION"); 9 | 10 | // Generate the ASCII art using figlet 11 | let font = FIGfont::standard().unwrap(); 12 | 13 | let ascii_version = font.convert(version).unwrap(); 14 | 15 | // Embed the ASCII art directly into the Rust code using `println!` 16 | println!("cargo:rerun-if-changed=build.rs"); 17 | 18 | // Output the ASCII art to a file (build.rs will use this in the main code) 19 | let out_dir = env::var("OUT_DIR").unwrap(); 20 | let ascii_version_path = format!("{}/ascii_version.rs", out_dir); 21 | 22 | let mut file = File::create(ascii_version_path).unwrap(); 23 | file.write_all(format!("pub const ASCII_VERSION: &str = r#\"{}\"#;", ascii_version).as_bytes()) 24 | .unwrap(); 25 | 26 | // write dakia ascii art 27 | const DAKIA_ASCII_ART: &str = " 28 | _______ 29 | \\ ___ `'. . .--. 30 | ' |--.\\ \\ .'| |__| 31 | | | \\ ' .' | .--. 32 | | | | ' __ < | | | __ 33 | | | | | .:--.'. | | ____ | | .:--.'. 34 | | | ' .' / | \\ | | | \\ .' | | / | \\ | 35 | | |___.' /' `\" __ | | | |/ . | | `\" __ | | 36 | /_______.'/ .'.''| | | /\\ \\ |__| .'.''| | 37 | \\_______|/ / / | |_ | | \\ \\ / / | |_ 38 | \\ \\._,\\ '/ ' \\ \\ \\ \\ \\._,\\ '/ 39 | `--' `\" '------' '---' `--' `\""; 40 | 41 | let dakia_ascii_art_path = format!("{}/dakia_ascii_art.rs", out_dir); 42 | 43 | let mut file = File::create(dakia_ascii_art_path).unwrap(); 44 | file.write_all( 45 | format!( 46 | "pub const DAKIA_ASCII_ART: &str = r#\"{}\"#;", 47 | DAKIA_ASCII_ART 48 | ) 49 | .as_bytes(), 50 | ) 51 | .unwrap(); 52 | } 53 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/short_circuit/response_parts.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use http::StatusCode; 4 | 5 | use crate::{ 6 | config::source_config::InterceptorConfig, 7 | error::{DakiaError, DakiaResult}, 8 | proxy::http::HeaderBuffer, 9 | qe::query::{extract_key_i64_or_err, extract_key_vec_bytes, Query}, 10 | }; 11 | 12 | pub struct ResponseParts { 13 | pub header_buffer: HeaderBuffer, 14 | pub status_code: Option, 15 | } 16 | 17 | pub fn extract_headers(response_config: &Query) -> DakiaResult { 18 | let mut header_buf: HeaderBuffer = HashMap::new(); 19 | 20 | for (header_key, _) in response_config { 21 | if header_key.starts_with("header.") 22 | || header_key.starts_with("res.header.") 23 | || header_key.starts_with("ds.res.header.") 24 | { 25 | // TODO: optimise this to only replace parts which is present 26 | let header_name = header_key 27 | .replace("ds.res.header.", "") 28 | .replace("res.header.", "") 29 | .replace("header.", ""); 30 | 31 | let header_value = extract_key_vec_bytes(response_config, &header_key)?; 32 | header_buf.insert(header_name, header_value.unwrap_or(vec![])); 33 | } 34 | } 35 | 36 | Ok(header_buf) 37 | } 38 | 39 | impl ResponseParts { 40 | pub fn build(interceptor_config: &InterceptorConfig) -> DakiaResult { 41 | match &interceptor_config.response { 42 | Some(response) => { 43 | let header_buffer = extract_headers(response)?; 44 | let status_code = extract_key_i64_or_err(response, "status").unwrap_or(200); 45 | let status_code = StatusCode::from_u16(status_code as u16)?; 46 | 47 | Ok(Self { 48 | header_buffer, 49 | status_code: Some(status_code), 50 | }) 51 | } 52 | None => Err(DakiaError::i_explain(format!( 53 | "response config is missing for {:?} interceptor", 54 | interceptor_config.name 55 | ))), 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/use_file/builder.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::InterceptorConfig, 5 | error::{DakiaError, DakiaResult}, 6 | gateway::{ 7 | interceptor::{Interceptor, InterceptorName}, 8 | interceptor_builder::InterceptorBuilder, 9 | }, 10 | qe::query, 11 | }; 12 | 13 | use super::UseFileInterceptor; 14 | 15 | pub struct UseFileInterceptorBuilder {} 16 | 17 | impl Default for UseFileInterceptorBuilder { 18 | fn default() -> Self { 19 | Self {} 20 | } 21 | } 22 | 23 | impl UseFileInterceptorBuilder { 24 | fn extract_root(&self, interceptor_config: &InterceptorConfig) -> DakiaResult { 25 | let config = interceptor_config.config.as_ref().expect( 26 | format!( 27 | "config requried for {:?} interceptor", 28 | InterceptorName::UseFile 29 | ) 30 | .as_str(), 31 | ); 32 | let root_val = config.get("root").expect( 33 | format!( 34 | "root value is undefined in config of {:?} interceptor", 35 | InterceptorName::UseFile, 36 | ) 37 | .as_str(), 38 | ); 39 | 40 | let root = match root_val { 41 | query::Value::Scaler(scaler) => match scaler { 42 | query::Scaler::String(root) => Ok(root), 43 | _ => Err(DakiaError::i_explain(format!( 44 | "root value must be an string config of {:?} interceptor", 45 | InterceptorName::UseFile, 46 | ))), 47 | }, 48 | query::Value::Composite(_) => Err(DakiaError::i_explain(format!( 49 | "root value must be an string config of {:?} interceptor", 50 | InterceptorName::UseFile, 51 | ))), 52 | }?; 53 | 54 | Ok(root.clone()) 55 | } 56 | } 57 | 58 | impl InterceptorBuilder for UseFileInterceptorBuilder { 59 | fn build(&self, interceptor_config: InterceptorConfig) -> DakiaResult> { 60 | let root = self.extract_root(&interceptor_config)?; 61 | let interceptor = UseFileInterceptor::build(root, interceptor_config.filter); 62 | Ok(Arc::new(interceptor)) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /dakia/src/error/result.rs: -------------------------------------------------------------------------------- 1 | use std::sync::PoisonError; 2 | use tokio::sync::RwLockReadGuard; 3 | 4 | use super::{DakiaError, ImmutStr}; 5 | 6 | pub type DakiaResult = Result>; 7 | 8 | #[derive(Debug)] 9 | pub enum Error { 10 | DakiaError(DakiaError), 11 | PoisonError(String), 12 | PingoraError(pingora_core::Error), 13 | } 14 | 15 | impl From>> for Error { 16 | fn from(err: PoisonError>) -> Self { 17 | Error::PoisonError(err.to_string()) 18 | } 19 | } 20 | 21 | impl From for Error { 22 | fn from(err: pingora_core::Error) -> Self { 23 | Error::PingoraError(err) 24 | } 25 | } 26 | 27 | impl From> for Box { 28 | fn from(err: Box) -> Self { 29 | // TODO: format this error 30 | err.into() 31 | } 32 | } 33 | 34 | impl From for Box { 35 | fn from(err: http::status::InvalidStatusCode) -> Self { 36 | // TODO: format this error 37 | err.into() 38 | } 39 | } 40 | 41 | impl From for Error { 42 | fn from(err: DakiaError) -> Self { 43 | Error::DakiaError(err) 44 | } 45 | } 46 | 47 | impl From> for Box { 48 | fn from(value: Box) -> Box { 49 | match *value { 50 | Error::PingoraError(pe) => Box::new(pe), 51 | Error::DakiaError(de) => { 52 | // TODO: handle translation between pingora and dakia error 53 | // pass status code correctly 54 | // currently, it'll just print error message. Which is enough for debugging for now... 55 | let error_msg = de.to_string(); 56 | let pe = pingora_core::Error::explain(pingora::ErrorType::InternalError, error_msg); 57 | pe 58 | } 59 | // TODO: implement conversion for other errors 60 | _ => pingora_core::Error::new(pingora::ErrorType::InternalError), 61 | } 62 | } 63 | } 64 | 65 | impl From for Box { 66 | fn from(value: std::io::Error) -> Self { 67 | let message = value.to_string().into_boxed_str(); 68 | DakiaError::create_unknown_context(ImmutStr::Owned(message)) 69 | } 70 | } 71 | 72 | impl From for Box { 73 | fn from(value: pcre2::Error) -> Self { 74 | let message = value.to_string().into_boxed_str(); 75 | DakiaError::create_unknown_context(ImmutStr::Owned(message)) 76 | } 77 | } 78 | 79 | impl From for Box { 80 | fn from(value: http::Error) -> Self { 81 | let message = value.to_string().into_boxed_str(); 82 | DakiaError::create_unknown_context(ImmutStr::Owned(message)) 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /docs/quick_start.md: -------------------------------------------------------------------------------- 1 | # Quick Start 2 | 3 | > Note: Binary releases are not yet available. 4 | 5 | ## Build dakia from source 6 | 7 | - [Install Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html) 8 | - Clone the repository 9 | ```txt 10 | https://github.com/ats1999/dakia.git 11 | ``` 12 | - Navigate to the project root 13 | ```txt 14 | cd dakia/dakia 15 | ``` 16 | - Run dakia from source in watch mode 17 | ```txt 18 | cargo watch -x 'run -- --dp ./local_gateway_config' 19 | ``` 20 | - Build the project 21 | ```txt 22 | cargo build --release 23 | ``` 24 | 25 | ## Running the Binary 26 | 27 | Once the build is successful, execute the binary: 28 | 29 | ```sh 30 | ./target/release/dakia 31 | ``` 32 | 33 | On a successful run, dakia will display its version in the following format: 34 | 35 | ```txt 36 | _______ 37 | \ ___ `'. . .--. 38 | ' |--.\ \ .'| |__| 39 | | | \ ' .' | .--. 40 | | | | ' __ < | | | __ 41 | | | | | .:--.'. | | ____ | | .:--.'. 42 | | | ' .' / | \ | | | \ .' | | / | \ | 43 | | |___.' /' `" __ | | | |/ . | | `" __ | | 44 | /_______.'/ .'.''| | | /\ \ |__| .'.''| | 45 | \_______|/ / / | |_ | | \ \ / / | |_ 46 | \ \._,\ '/ ' \ \ \ \ \._,\ '/ 47 | `--' `" '------' '---' `--' `" 48 | 49 | ___ _ ___ 50 | / _ \ / | / _ \ 51 | | | | | | | | | | | 52 | | |_| | _ | | _ | |_| | 53 | \___/ (_) |_| (_) \___/ 54 | 55 | ``` 56 | 57 | ## Config 58 | 59 | `Dakia` uses a configuration directory instead of a single file. Refer to the [CLI Documentation](./cli.md) for instructions on specifying the Dakia directory path. 60 | 61 | The configuration directory should contain a file at: 62 | 63 | ```txt 64 | /config/dakia.yaml 65 | ``` 66 | 67 | Dakia directory must have a config file located inside `dakia-directory/config/dakia.yaml`. You can find an example configuration file here: [Sample Config](./config.sample.yaml) 68 | 69 | We support MongoDB like query syntax for filtering routes, which you can find in the sample config. 70 | 71 | ```yaml 72 | routers: 73 | - upstream: payment 74 | filter: 75 | $or: 76 | ds.req.path: 77 | $starts_with: /pay 78 | ds.req.query.type: 79 | $in: 80 | - pay 81 | - simple-pay 82 | - fake-pay 83 | - upstream: search 84 | filter: 85 | $or: 86 | ds.req.query.search: 87 | $exists: true 88 | ctx.user.authenticated: true 89 | ds.req.method: POST 90 | ``` 91 | 92 | > Documentation on parsing and applying filters to routes along with other options in config will be available soon! 93 | -------------------------------------------------------------------------------- /docs/cli.md: -------------------------------------------------------------------------------- 1 | # Dakia CLI 2 | 3 | ## Example Usage 4 | 5 | 1. **Start Dakia with a specific configuration directory and watch mode enabled:** 6 | ```bash 7 | dakia --dp "/path/to/dakia" --watch 8 | ``` 9 | 1. **Print dakia version:** 10 | ```bash 11 | dakia --version 12 | dakia -v 13 | ``` 14 | **Output** 15 | 16 | ```txt 17 | _______ 18 | \ ___ `'. . .--. 19 | ' |--.\ \ .'| |__| 20 | | | \ ' .' | .--. 21 | | | | ' __ < | | | __ 22 | | | | | .:--.'. | | ____ | | .:--.'. 23 | | | ' .' / | \ | | | \ .' | | / | \ | 24 | | |___.' /' `" __ | | | |/ . | | `" __ | | 25 | /_______.'/ .'.''| | | /\ \ |__| .'.''| | 26 | \_______|/ / / | |_ | | \ \ / / | |_ 27 | \ \._,\ '/ ' \ \ \ \ \._,\ '/ 28 | `--' `" '------' '---' `--' `" 29 | 30 | ___ _ ___ 31 | / _ \ / | / _ \ 32 | | | | | | | | | | | 33 | | |_| | _ | | _ | |_| | 34 | \___/ (_) |_| (_) \___/ 35 | ``` 36 | 37 | ### `--dp` 38 | 39 | - **Description**: Path to Dakia's local directory for storing configuration, interceptors, filters, extensions, and runtime data. 40 | - **Type**: `String` 41 | - **Example**: `--dp "/path/to/dakia"` 42 | 43 | --- 44 | 45 | ### `--watch` / `-w` 46 | 47 | - **Description**: Watch for changes in configuration files, interceptors, filters, and extensions, and automatically apply updates. 48 | - **Type**: `bool` 49 | - **Example**: `--watch` 50 | 51 | --- 52 | 53 | ### `--reload` 54 | 55 | - **Description**: Reload configuration files and update runtime settings. This may trigger a graceful restart if required. 56 | - **Type**: `bool` 57 | - **Example**: `--reload` 58 | 59 | --- 60 | 61 | ### `--test` / `-t` 62 | 63 | - **Description**: Test the server configuration without starting the application. 64 | - **Type**: `bool` 65 | - **Example**: `--test` 66 | 67 | --- 68 | 69 | ### `--version` / `-v` 70 | 71 | - **Description**: Display the current version of the API Gateway and exit. 72 | - **Type**: `bool` 73 | - **Example**: `--version` 74 | 75 | --- 76 | 77 | ### `--verbose` 78 | 79 | - **Description**: Enable verbose logging for more detailed output. Useful for debugging and monitoring. 80 | - **Type**: `bool` 81 | - **Example**: `--verbose` 82 | 83 | --- 84 | 85 | ### `--debug` 86 | 87 | - **Description**: Enable debug mode to output additional debugging information. Use this to troubleshoot issues during development or runtime. 88 | - **Type**: `bool` 89 | - **Example**: `--debug` 90 | 91 | --- 92 | 93 | ### `--upgrade` / `-u` 94 | 95 | - **Description**: Enable the server to attempt an upgrade from a running older server. This feature is supported only on Linux platforms. 96 | - **Type**: `bool` 97 | - **Example**: `--upgrade` 98 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/basic_auth/interceptor.rs: -------------------------------------------------------------------------------- 1 | use std::str::from_utf8; 2 | 3 | use async_trait::async_trait; 4 | use base64::{engine::general_purpose, Engine}; 5 | use http::StatusCode; 6 | 7 | use crate::{ 8 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 9 | proxy::http::Session, 10 | }; 11 | 12 | pub struct BasicAuthInterceptor { 13 | credentials: String, 14 | filter: Option, 15 | } 16 | 17 | impl BasicAuthInterceptor { 18 | pub fn build(filter: Option, username: String, password: String) -> Self { 19 | let user_pass = format!("{}:{}", username, password); 20 | let credentials = general_purpose::STANDARD.encode(user_pass.clone()); 21 | 22 | BasicAuthInterceptor { 23 | filter, 24 | credentials, 25 | } 26 | } 27 | 28 | fn authorize(&self, auth_header_bytes: &[u8]) -> bool { 29 | match from_utf8(auth_header_bytes) { 30 | Ok(user_credentials) => match user_credentials.strip_prefix("Basic ") { 31 | Some(stripped) => self.credentials == stripped, 32 | None => false, 33 | }, 34 | Err(_) => false, // Authorization header must be an valid UTF-8 string 35 | } 36 | } 37 | } 38 | 39 | #[async_trait] 40 | impl Interceptor for BasicAuthInterceptor { 41 | fn name(&self) -> InterceptorName { 42 | InterceptorName::BasicAuth 43 | } 44 | 45 | fn phase_mask(&self) -> PhaseMask { 46 | Phase::RequestFilter.mask() 47 | } 48 | 49 | fn filter(&self) -> &Option { 50 | &self.filter 51 | } 52 | 53 | async fn request_filter(&self, _session: &mut Session) -> PhaseResult { 54 | let auth_header_option = _session.ds_req_header("Authorization")?; 55 | /* 56 | TODO: Move code to write unauthorized response to a common method 57 | 58 | Can not right now because of the following error 59 | - implicit elided lifetime not allowed here expected lifetime parameter 60 | */ 61 | 62 | match auth_header_option { 63 | Some(auth_header_bytes) => { 64 | let is_authorized = self.authorize(auth_header_bytes); 65 | 66 | if is_authorized { 67 | Ok(false) 68 | } else { 69 | _session.set_res_status(StatusCode::UNAUTHORIZED); 70 | _session.set_ds_res_header( 71 | "WWW-Authenticate".to_string(), 72 | "Basic realm=\"Protected Area\"".as_bytes().to_vec(), 73 | ); 74 | Ok(true) 75 | } 76 | } 77 | None => { 78 | _session.set_res_status(StatusCode::UNAUTHORIZED); 79 | _session.set_ds_res_header( 80 | "WWW-Authenticate".to_string(), 81 | "Basic realm=\"Protected Area\"".as_bytes().to_vec(), 82 | ); 83 | Ok(true) 84 | } 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor_builder/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod utils; 2 | use std::{collections::HashMap, sync::Arc}; 3 | 4 | use crate::{ 5 | config::source_config::InterceptorConfig, 6 | error::DakiaResult, 7 | gateway::interceptor::{Interceptor, InterceptorName}, 8 | }; 9 | 10 | use super::interceptors::{ 11 | basic_auth::BasicAuthInterceptorBuilder, controller::ControllerInterceptorBuilder, 12 | rate_limiter::RateLimiterInterceptorBuilder, request_id::RequestIdInterceptorBuilder, 13 | request_rewrite::RequestRewriteInterceptorBuilder, 14 | response_rewrite::ResponseRewriteInterceptorBuilder, server_version, 15 | short_circuit::ShortCircuitInterceptorBuilder, use_file, 16 | }; 17 | 18 | pub trait InterceptorBuilder: Sync + Send { 19 | fn build(&self, _interceptor_config: InterceptorConfig) -> DakiaResult>; 20 | } 21 | 22 | #[derive(Clone)] 23 | pub struct InterceptorBuilderRegistry { 24 | /* 25 | Arc used instead of Box because of the error the trait `InterceptorBuilder` cannot be made into an object `InterceptorBuilder` cannot be made into an objec 26 | - https://stackoverflow.com/questions/30353462/how-to-clone-a-struct-storing-a-boxed-trait-object 27 | - https://www.reddit.com/r/rust/comments/7q3bz8/trait_object_with_clone/ 28 | - https://stackoverflow.com/questions/64725210/how-to-make-a-trait-and-a-struct-implementing-it-clonable 29 | */ 30 | // Mutex does not support Clone so wrapped in Arc 31 | pub registry: HashMap>, 32 | } 33 | 34 | impl InterceptorBuilderRegistry { 35 | pub fn build() -> Self { 36 | let mut registry: HashMap> = HashMap::new(); 37 | 38 | registry.insert( 39 | InterceptorName::ServerVersion, 40 | Arc::new(server_version::ServerVersionInterceptorBuilder::default()), 41 | ); 42 | registry.insert( 43 | InterceptorName::UseFile, 44 | Arc::new(use_file::UseFileInterceptorBuilder::default()), 45 | ); 46 | registry.insert( 47 | InterceptorName::BasicAuth, 48 | Arc::new(BasicAuthInterceptorBuilder::default()), 49 | ); 50 | registry.insert( 51 | InterceptorName::Controller, 52 | Arc::new(ControllerInterceptorBuilder::default()), 53 | ); 54 | 55 | registry.insert( 56 | InterceptorName::RateLimiter, 57 | Arc::new(RateLimiterInterceptorBuilder::default()), 58 | ); 59 | 60 | registry.insert( 61 | InterceptorName::RequestRewrite, 62 | Arc::new(RequestRewriteInterceptorBuilder::default()), 63 | ); 64 | 65 | registry.insert( 66 | InterceptorName::ResponseRewrite, 67 | Arc::new(ResponseRewriteInterceptorBuilder::default()), 68 | ); 69 | 70 | registry.insert( 71 | InterceptorName::ShortCircuit, 72 | Arc::new(ShortCircuitInterceptorBuilder::default()), 73 | ); 74 | 75 | registry.insert( 76 | InterceptorName::RequestId, 77 | Arc::new(RequestIdInterceptorBuilder::default()), 78 | ); 79 | 80 | Self { registry } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor_builder/utils.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, sync::Arc}; 2 | 3 | use log::debug; 4 | 5 | use crate::{ 6 | config::source_config::{GatewayConfig, InterceptorConfig}, 7 | error::{DakiaError, DakiaResult}, 8 | gateway::interceptor::Interceptor, 9 | proxy::http::HeaderBuffer, 10 | qe::query::{self, Query, Value}, 11 | }; 12 | 13 | use super::InterceptorBuilderRegistry; 14 | 15 | fn pull_str_or_err<'a>(qkey: &'a str, qval: &'a Value) -> DakiaResult<&'a str> { 16 | let dakia_error = DakiaError::i_explain(format!("expected string value for header {}", qkey)); 17 | let err = Err(dakia_error); 18 | match qval { 19 | Value::Scaler(scaler) => match scaler { 20 | query::Scaler::String(hval) => Ok(hval), 21 | _ => err, 22 | }, 23 | _ => err, 24 | } 25 | } 26 | 27 | fn pull_header_bytes_or_err<'a>(qkey: &'a str, qval: &'a Value) -> DakiaResult> { 28 | let hval = pull_str_or_err(&qkey, &qval)?; 29 | let hval_bytes = hval.as_bytes(); 30 | Ok(hval_bytes.to_owned()) 31 | } 32 | 33 | pub fn extract_headers(intercept_query: &Query) -> DakiaResult<(HeaderBuffer, HeaderBuffer)> { 34 | let mut ds_res_header_buf: HeaderBuffer = HashMap::new(); 35 | let mut us_req_header_buf: HeaderBuffer = HashMap::new(); 36 | 37 | for (qkey, qval) in intercept_query { 38 | if qkey.starts_with("ds.res.header") { 39 | let hkey = qkey.replace("ds.res.header.", ""); 40 | let hval_bytes = pull_header_bytes_or_err(qkey, qval)?; 41 | ds_res_header_buf.insert(hkey, hval_bytes); 42 | } 43 | 44 | if qkey.starts_with("us.req.header") { 45 | let hkey = qkey.replace("ds.req.header.", ""); 46 | let hval_bytes = pull_header_bytes_or_err(qkey, qval)?; 47 | us_req_header_buf.insert(hkey, hval_bytes); 48 | } 49 | } 50 | 51 | Ok((ds_res_header_buf, us_req_header_buf)) 52 | } 53 | 54 | pub fn build_interceptor( 55 | interceptor_config: &InterceptorConfig, 56 | interceptor_builder_registry: &InterceptorBuilderRegistry, 57 | ) -> DakiaResult> { 58 | let interceptor_name = &interceptor_config.name; 59 | let builder = interceptor_builder_registry.registry.get(interceptor_name); 60 | 61 | let interceptor = match builder { 62 | Some(builder) => builder.build(interceptor_config.clone())?, 63 | None => { 64 | return Err(DakiaError::i_explain(format!( 65 | "Invalid interceptor name {:?}. No such interceptor exists", 66 | interceptor_name.as_str() 67 | ))) 68 | } 69 | }; 70 | Ok(interceptor) 71 | } 72 | 73 | pub fn build_interceptors( 74 | gateway_config: &GatewayConfig, 75 | interceptor_builder_registry: &InterceptorBuilderRegistry, 76 | ) -> DakiaResult>> { 77 | let mut interceptors: Vec> = vec![]; 78 | 79 | for interceptor_config in &gateway_config.interceptors { 80 | debug!( 81 | "Initializing interceptor: {:?} (enabled: {})", 82 | interceptor_config.name, interceptor_config.enabled 83 | ); 84 | 85 | if !interceptor_config.enabled { 86 | continue; 87 | } 88 | 89 | let interceptor = build_interceptor(interceptor_config, interceptor_builder_registry)?; 90 | interceptors.push(interceptor); 91 | } 92 | 93 | Ok(interceptors) 94 | } 95 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/rate_limiter/interceptor.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cmp::min, 3 | time::{Duration, Instant}, 4 | }; 5 | 6 | use async_trait::async_trait; 7 | use dashmap::DashMap; 8 | use http::StatusCode; 9 | 10 | use crate::{ 11 | gateway::interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 12 | proxy::http::Session, 13 | }; 14 | 15 | struct Bucket { 16 | last_refilled_time: Instant, 17 | tokens: u32, 18 | } 19 | 20 | impl Bucket { 21 | fn refill(&mut self, capacity: u32, rate_limit: &RateLimit) { 22 | let now = Instant::now(); 23 | let elapsed = now.duration_since(self.last_refilled_time).as_millis(); 24 | let rifill_cycles = elapsed / rate_limit.refill_interval.as_millis(); 25 | let max_refill_tokens = rifill_cycles * rate_limit.refill_rate as u128; 26 | let total_tokens = max_refill_tokens + self.tokens as u128; 27 | let new_token_balance = min(capacity, total_tokens as u32); 28 | self.tokens = new_token_balance; 29 | self.last_refilled_time = Instant::now(); 30 | } 31 | 32 | fn try_consume(&mut self) -> bool { 33 | if self.tokens > 0 { 34 | self.tokens = self.tokens - 1; 35 | true 36 | } else { 37 | false 38 | } 39 | } 40 | } 41 | 42 | pub struct RateLimit { 43 | pub capacity: u32, 44 | pub refill_rate: u32, 45 | pub refill_interval: Duration, 46 | } 47 | 48 | pub struct RateLimiterInterceptor { 49 | rate_limit: RateLimit, 50 | buckets: DashMap, 51 | } 52 | 53 | impl RateLimiterInterceptor { 54 | pub fn build(rate_limit: RateLimit) -> Self { 55 | Self { 56 | rate_limit, 57 | buckets: DashMap::new(), 58 | } 59 | } 60 | } 61 | 62 | // NOTE: only token bucket algorithm supported for now, it uses user ip to enfore rate limit to prevent DDoS attack 63 | #[async_trait] 64 | impl Interceptor for RateLimiterInterceptor { 65 | fn name(&self) -> InterceptorName { 66 | InterceptorName::RateLimiter 67 | } 68 | 69 | fn phase_mask(&self) -> PhaseMask { 70 | Phase::RequestFilter.mask() 71 | } 72 | 73 | async fn request_filter(&self, _session: &mut Session) -> PhaseResult { 74 | let socket_addr = _session 75 | .ds_socket_addr() 76 | .map_or("".to_string(), |socket_addr| socket_addr.to_string()) 77 | .split(":") // keep only ip, ignore client port 78 | .next() 79 | .map(String::from) 80 | .map_or("".to_string(), |ip| ip); 81 | 82 | let mut bucket = match self.buckets.get_mut(&socket_addr) { 83 | Some(bucket) => bucket, 84 | None => { 85 | let new_bucket = Bucket { 86 | last_refilled_time: Instant::now(), 87 | tokens: self.rate_limit.refill_rate, 88 | }; 89 | self.buckets.insert(socket_addr.clone(), new_bucket); 90 | 91 | // safe to unwrap, it'll be always available at this point 92 | self.buckets.get_mut(&socket_addr).unwrap() 93 | } 94 | }; 95 | 96 | bucket.refill(self.rate_limit.capacity, &self.rate_limit); 97 | let consumed = bucket.try_consume(); 98 | 99 | if !consumed { 100 | _session.set_res_status(StatusCode::TOO_MANY_REQUESTS); 101 | Ok(true) 102 | } else { 103 | Ok(false) 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /docs/config.sample.yaml: -------------------------------------------------------------------------------- 1 | daemon: false 2 | error_log: /var/log/dakia/error.log 3 | pid_file: /var/run/dakia.pid 4 | upgrade_sock: /var/run/dakia.sock 5 | user: dakia 6 | group: dakia 7 | threads: 8 8 | work_stealing: true 9 | grace_period_seconds: 60 10 | graceful_shutdown_timeout_seconds: 30 11 | upstream_keepalive_pool_size: 10 12 | upstream_connect_offload_threadpools: 2 13 | upstream_connect_offload_thread_per_pool: 5 14 | upstream_debug_ssl_keylog: false 15 | gateways: 16 | - name: root 17 | bind_addresses: 18 | - host: 0.0.0.0 19 | port: 8080 20 | - host: 0.0.0.0 21 | port: 8090 22 | - host: 0.0.0.0 23 | port: 80 24 | downstreams: 25 | - host: example.com 26 | - host: localhost 27 | - host: example.net 28 | upstreams: 29 | - name: payment 30 | default: false 31 | traffic_distribution_policy: 32 | node_selection_algorithm: round_robin 33 | upstream_nodes: 34 | - address: 35 | host: 0.0.0.0 36 | port: 3000 37 | tls: false 38 | sni: null 39 | weight: 1 40 | - address: 41 | host: 0.0.0.0 42 | port: 3001 43 | tls: false 44 | sni: null 45 | weight: 2 46 | - address: 47 | host: 0.0.0.0 48 | port: 3002 49 | tls: false 50 | sni: null 51 | weight: 2 52 | - name: search 53 | default: false 54 | upstream_nodes: 55 | - address: 56 | host: 0.0.0.0 57 | port: 3002 58 | tls: false 59 | sni: null 60 | - name: default 61 | default: true 62 | upstream_nodes: 63 | - address: 64 | host: 0.0.0.0 65 | port: 3001 66 | tls: false 67 | sni: null 68 | routers: 69 | - upstream: payment 70 | filter: payment_router_filter 71 | - upstream: search 72 | filter: search_router_filter 73 | - upstream: default 74 | interceptors: 75 | - name: request_id 76 | enabled: true 77 | - name: response_rewrite 78 | enabled: true 79 | rewrite: 80 | header.from-response-rewrite: ok 81 | - name: short_circuit 82 | enabled: true 83 | filter: short_circuit 84 | response: 85 | header.terminated: true 86 | status: 502 87 | - name: server_version 88 | enabled: true 89 | - name: controller 90 | enabled: false 91 | filter: controller 92 | - name: basic_auth 93 | enabled: false 94 | filter: basic_auth 95 | config: 96 | username: dakia 97 | password: dakia 98 | - name: use_file 99 | enabled: true 100 | config: 101 | root: /home/static 102 | - name: rate_limiter 103 | enabled: true 104 | config: 105 | # 1 req per minute 106 | capacity: 5 107 | refill_rate: 2 108 | refill_interval: 12000 109 | filters: 110 | - name: controller 111 | ds.req.path: 112 | $starts_with: /controller 113 | header.host: localhost 114 | - name: basic_auth 115 | header.host: example.net 116 | - name: use_file 117 | header.host: example.net 118 | - name: payment_router_filter 119 | path: 120 | $starts_with: /payment 121 | - name: search_router_filter 122 | path: 123 | $starts_with: /search 124 | - name: short_circuit 125 | path: 126 | $starts_with: /search 127 | # ds - downstream 128 | # us - upstream 129 | 130 | -------------------------------------------------------------------------------- /dakia/src/shared/dakia_state.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use once_cell::sync::Lazy; 4 | 5 | use crate::{ 6 | config::DakiaConfig, 7 | error::{DakiaError, DakiaResult}, 8 | gateway::state::{GatewayState, GatewayStateStore}, 9 | }; 10 | 11 | #[derive(Clone)] 12 | pub struct DakiaState { 13 | dakia_config: DakiaConfig, 14 | gateway_state_stores: Vec>, 15 | } 16 | 17 | impl Default for DakiaState { 18 | fn default() -> Self { 19 | Self { 20 | dakia_config: Default::default(), 21 | gateway_state_stores: Default::default(), 22 | } 23 | } 24 | } 25 | 26 | pub static DAKIA_STATE: Lazy> = Lazy::new(|| Mutex::new(DakiaState::default())); 27 | 28 | pub struct DakiaStateStore {} 29 | 30 | impl DakiaStateStore { 31 | pub fn get_dakia_config(&self) -> DakiaResult { 32 | match DAKIA_STATE.lock() { 33 | Ok(dakia_state) => Ok(dakia_state.dakia_config.clone()), 34 | Err(err) => Err(DakiaError::i_explain(format!( 35 | "Failed to acquire lock while retrieving Dakia config: {err}" 36 | ))), 37 | } 38 | } 39 | 40 | pub fn store_dakia_config(&self, dakia_config: DakiaConfig) -> DakiaResult<()> { 41 | match DAKIA_STATE.lock() { 42 | Ok(mut dakia_state) => { 43 | dakia_state.dakia_config = dakia_config; 44 | Ok(()) 45 | } 46 | Err(err) => Err(DakiaError::i_explain(format!( 47 | "Failed to acquire lock while updating Dakia config: {err}" 48 | ))), 49 | } 50 | } 51 | 52 | pub fn get_gateway_stores(&self) -> DakiaResult>> { 53 | match DAKIA_STATE.lock() { 54 | Ok(dakia_state) => Ok(dakia_state.gateway_state_stores.clone()), 55 | Err(err) => Err(DakiaError::i_explain(format!( 56 | "Failed to acquire lock while retrieving gateway state stores: {err}" 57 | ))), 58 | } 59 | } 60 | 61 | pub fn store_gateway_state_stores( 62 | &self, 63 | gateway_stores: Vec>, 64 | ) -> DakiaResult<()> { 65 | match DAKIA_STATE.lock() { 66 | Ok(mut dakia_state) => { 67 | dakia_state.gateway_state_stores = gateway_stores; 68 | Ok(()) 69 | } 70 | Err(err) => Err(DakiaError::i_explain(format!( 71 | "Failed to acquire lock while updating gateway state stores: {err}" 72 | ))), 73 | } 74 | } 75 | 76 | pub fn update_gateway_state(&self, gateway_state: GatewayState) -> DakiaResult { 77 | match DAKIA_STATE.lock() { 78 | Ok(dakia_state) => { 79 | let cloned_gateway_state = gateway_state.clone(); 80 | let gateway_name = &cloned_gateway_state.gateway_config().name; 81 | 82 | for cur_gateway_state_store in &dakia_state.gateway_state_stores { 83 | let cur_gateway_state = &cur_gateway_state_store.get_state(); 84 | let cur_gateway_name = &cur_gateway_state.gateway_config().name; 85 | if cur_gateway_name == gateway_name { 86 | cur_gateway_state_store.update_state(gateway_state.clone()); 87 | return Ok(true); 88 | } 89 | } 90 | 91 | Ok(false) 92 | } 93 | Err(err) => Err(DakiaError::i_explain(format!( 94 | "Failed to acquire lock while updating gateway state store: {err}" 95 | ))), 96 | } 97 | } 98 | } 99 | 100 | pub static DAKIA_STATE_STORE: Lazy = Lazy::new(|| DakiaStateStore {}); 101 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/gateway_config.rs: -------------------------------------------------------------------------------- 1 | use crate::error::BError; 2 | use crate::error::DakiaError; 3 | use crate::error::DakiaResult; 4 | use crate::gateway::filter::exec_filter; 5 | use crate::proxy::http::Session; 6 | use crate::qe::query::Query; 7 | 8 | use super::interceptor_config::InterceptorConfig; 9 | use super::DownstreamConfig; 10 | use super::InetAddress; 11 | use super::RouterConfig; 12 | use super::UpstreamConfig; 13 | 14 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] 15 | pub struct GatewayConfig { 16 | pub name: String, // TODO: use auto generated name 17 | // TODO: add type = HTTP, TCP, SMTP, etc 18 | pub bind_addresses: Vec, 19 | pub downstreams: Vec, 20 | pub upstreams: Vec, 21 | 22 | #[serde(default)] 23 | pub routers: Vec, 24 | 25 | #[serde(default)] 26 | pub interceptors: Vec, 27 | 28 | #[serde(default)] 29 | pub filters: Vec, 30 | } 31 | 32 | pub fn find_router_config<'a>(session: &'a Session<'a>) -> DakiaResult> { 33 | for router_config in session.ctx().gateway_state.gateway_config().routers.iter() { 34 | match &router_config.filter { 35 | None => return Ok(Some(router_config)), // if no filter present for any router then it'll be considered a match when encountered 36 | Some(filter_name) => { 37 | let filter = session.ctx().gateway_state.filter_or_err(&filter_name)?; 38 | let is_matched = exec_filter(filter, session)?; 39 | if is_matched { 40 | return Ok(Some(router_config)); 41 | } 42 | } 43 | } 44 | } 45 | Ok(None) 46 | } 47 | 48 | pub fn find_router_config_or_err<'a>(session: &'a Session<'a>) -> DakiaResult<&'a RouterConfig> { 49 | let router_config = find_router_config(session)?; 50 | router_config.ok_or(DakiaError::create_unknown_context( 51 | crate::error::ImmutStr::Static("router config not found".into()), 52 | )) 53 | } 54 | 55 | impl GatewayConfig { 56 | pub fn find_default_upstream(&self) -> Option<&UpstreamConfig> { 57 | self.upstreams 58 | .iter() 59 | .find(|upstream_config| upstream_config.default) 60 | } 61 | 62 | pub fn find_upstream_config( 63 | &self, 64 | upstream_name: &str, 65 | fallback: bool, 66 | ) -> Option<&UpstreamConfig> { 67 | let upstream_config = self 68 | .upstreams 69 | .iter() 70 | .find(|upstream_config| upstream_config.name == upstream_name); 71 | match upstream_config { 72 | Some(upstream_config) => Some(upstream_config), 73 | None => { 74 | if fallback { 75 | self.find_default_upstream() 76 | } else { 77 | None 78 | } 79 | } 80 | } 81 | } 82 | 83 | pub fn find_upstream_config_or_err( 84 | &self, 85 | upstream_name: &str, 86 | fallback: bool, 87 | ) -> Result<&UpstreamConfig, BError> { 88 | self.find_upstream_config(upstream_name, fallback).ok_or( 89 | DakiaError::create_unknown_context(crate::error::ImmutStr::Static( 90 | "upstream config not found".into(), 91 | )), 92 | ) 93 | } 94 | } 95 | 96 | impl Default for GatewayConfig { 97 | fn default() -> Self { 98 | Self { 99 | name: Default::default(), 100 | bind_addresses: Default::default(), 101 | downstreams: Default::default(), 102 | upstreams: Default::default(), 103 | routers: Default::default(), 104 | interceptors: Default::default(), 105 | filters: Default::default(), 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptor/executor.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use log::trace; 4 | 5 | use crate::{error::DakiaResult, gateway::filter::exec_filter, proxy::http::Session}; 6 | 7 | use super::{is_hook_enabled, is_phase_enabled, Hook, Interceptor, Phase, PhaseResult}; 8 | 9 | fn match_filter<'a>(filter_name: &Option, session: &Session<'a>) -> DakiaResult { 10 | match filter_name { 11 | Some(filter_name) => { 12 | let filter = 13 | session.ctx().gateway_state.filter(filter_name).expect( 14 | format!("Unexpected error, {filter_name} filter not found...").as_str(), 15 | ); 16 | 17 | Ok(exec_filter(filter, session)?) 18 | } 19 | None => { 20 | trace!("No filter specified, defaulting to match as true."); 21 | Ok(true) 22 | } 23 | } 24 | } 25 | 26 | pub async fn exec_hook<'a>(cur_hook: Hook, session: &mut Session<'a>) -> PhaseResult { 27 | let gateway_state = session.ctx().gateway_state.clone(); 28 | let interceptors = gateway_state.interceptors(); 29 | 30 | for interceptor in interceptors { 31 | let is_hook_enabled = is_hook_enabled(interceptor.hook_mask(), &cur_hook); 32 | // TODO: store filter matching status inside RwLock> inside session.ctx() to avoid doing heavy computation while filtering the same logic 33 | let is_filter_matched = match_filter(interceptor.filter(), session)?; 34 | if !is_hook_enabled || !is_filter_matched { 35 | continue; 36 | } 37 | 38 | match cur_hook { 39 | Hook::PreDownstreamResponseHeaderFlush => { 40 | interceptor.pre_downstream_response_hook(session).await 41 | } 42 | }?; 43 | } 44 | 45 | Ok(false) 46 | } 47 | 48 | async fn execute_interceptor_phase<'a>( 49 | interceptor: &Arc, 50 | session: &mut Session<'a>, 51 | ) -> PhaseResult { 52 | let phase = session.phase(); 53 | let is_phase_enabled = is_phase_enabled(interceptor.phase_mask(), phase); 54 | 55 | trace!( 56 | "Executing interceptor {:?} phase: {:?}, enabled: {}", 57 | interceptor.name(), 58 | phase, 59 | is_phase_enabled, 60 | ); 61 | 62 | // TODO: store filter matching status inside RwLock> inside session.ctx() to avoid doing heavy computation while filtering the same logic 63 | let is_filter_matched = match_filter(interceptor.filter(), session)?; 64 | 65 | trace!( 66 | "Filter result for interceptor {:?} filter matched: {}", 67 | interceptor.name(), 68 | is_filter_matched 69 | ); 70 | 71 | if !is_phase_enabled || !is_filter_matched { 72 | return Ok(false); // false - continue to other phase or interceptor 73 | } 74 | 75 | match phase { 76 | Phase::Init => { 77 | interceptor.init(session).await?; 78 | Ok(false) 79 | } 80 | Phase::RequestFilter => interceptor.request_filter(session).await, 81 | Phase::UpstreamProxyFilter => interceptor.upstream_proxy_filter(session).await, 82 | Phase::UpstreamPeerSelection => todo!(), // no such requirement as of now 83 | Phase::PreUpstreamRequest => interceptor.pre_upstream_request(session).await, 84 | Phase::PostUpstreamResponse => interceptor.post_upstream_response(session).await, 85 | Phase::PreDownstreamResponse => interceptor.pre_downstream_response(session).await, 86 | } 87 | } 88 | 89 | pub async fn exec_phase<'a>(session: &mut Session<'a>) -> PhaseResult { 90 | let gateway_state = session.ctx().gateway_state.clone(); 91 | let interceptors = gateway_state.interceptors(); 92 | 93 | for interceptor in interceptors { 94 | let phase_result = execute_interceptor_phase(interceptor, session).await?; 95 | if phase_result { 96 | return Ok(true); 97 | } 98 | } 99 | 100 | Ok(false) 101 | } 102 | -------------------------------------------------------------------------------- /dakia/src/config/source_config/source_dakia_config.rs: -------------------------------------------------------------------------------- 1 | use std::{fs, path::Path}; 2 | 3 | use crate::{ 4 | config::DakiaConfig, 5 | error::{DakiaError, DakiaResult, ImmutStr}, 6 | }; 7 | 8 | use super::GatewayConfig; 9 | 10 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 11 | pub struct SourceDakiaRawConfig { 12 | pub dp: Option, 13 | pub error_log: Option, 14 | pub pid_file: Option, 15 | pub upgrade_sock: Option, 16 | pub daemon: Option, 17 | pub user: Option, 18 | pub group: Option, 19 | pub threads: Option, 20 | pub work_stealing: Option, 21 | pub grace_period_seconds: Option, 22 | pub graceful_shutdown_timeout_seconds: Option, 23 | pub upstream_keepalive_pool_size: Option, 24 | pub upstream_connect_offload_threadpools: Option, 25 | pub upstream_connect_offload_thread_per_pool: Option, 26 | pub upstream_debug_ssl_keylog: Option, 27 | pub gateways: Vec, 28 | } 29 | 30 | impl Default for SourceDakiaRawConfig { 31 | // TODO: keep a yaml embeded string for default config with router and interceptors 32 | fn default() -> Self { 33 | SourceDakiaRawConfig { 34 | dp: None, 35 | daemon: None, 36 | error_log: None, 37 | pid_file: None, 38 | upgrade_sock: None, 39 | user: None, 40 | group: None, 41 | threads: None, 42 | work_stealing: None, 43 | grace_period_seconds: None, 44 | graceful_shutdown_timeout_seconds: None, 45 | upstream_connect_offload_thread_per_pool: None, 46 | upstream_connect_offload_threadpools: None, 47 | upstream_debug_ssl_keylog: None, 48 | upstream_keepalive_pool_size: None, 49 | gateways: vec![], 50 | } 51 | } 52 | } 53 | 54 | impl From for SourceDakiaRawConfig { 55 | fn from(dakia_config: DakiaConfig) -> Self { 56 | Self { 57 | dp: Some(dakia_config.dp), 58 | daemon: Some(dakia_config.daemon), 59 | error_log: Some(dakia_config.error_log), 60 | pid_file: Some(dakia_config.pid_file), 61 | upgrade_sock: Some(dakia_config.upgrade_sock), 62 | user: dakia_config.user, 63 | group: dakia_config.group, 64 | threads: Some(dakia_config.threads), 65 | work_stealing: Some(dakia_config.work_stealing), 66 | grace_period_seconds: dakia_config.grace_period_seconds, 67 | graceful_shutdown_timeout_seconds: dakia_config.graceful_shutdown_timeout_seconds, 68 | upstream_connect_offload_thread_per_pool: dakia_config 69 | .upstream_connect_offload_thread_per_pool, 70 | upstream_connect_offload_threadpools: dakia_config.upstream_connect_offload_threadpools, 71 | upstream_debug_ssl_keylog: Some(dakia_config.upstream_debug_ssl_keylog), 72 | upstream_keepalive_pool_size: Some(dakia_config.upstream_keepalive_pool_size), 73 | gateways: dakia_config.gateways, 74 | } 75 | } 76 | } 77 | 78 | impl SourceDakiaRawConfig { 79 | pub fn flush(&self) -> DakiaResult<()> { 80 | let string_config = serde_yaml::to_string(self).map_err(|e| { 81 | DakiaError::create( 82 | crate::error::ErrorType::InternalError, 83 | crate::error::ErrorSource::Internal, 84 | Some(ImmutStr::from("Faild to flush dakia config to file")), 85 | Some(Box::new(e)), 86 | ) 87 | })?; 88 | 89 | let dp = self.dp.as_deref().unwrap_or("/etc/dakia"); // dakia path 90 | let cp = Path::new(dp).join("config/dakia.yaml"); // configs path 91 | 92 | fs::write(cp, string_config).map_err(|e| { 93 | DakiaError::create( 94 | crate::error::ErrorType::InternalError, 95 | crate::error::ErrorSource::Internal, 96 | Some(ImmutStr::from("Faild to flush dakia config to file")), 97 | Some(Box::new(e)), 98 | ) 99 | })?; 100 | 101 | Ok(()) 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /dakia/src/main.rs: -------------------------------------------------------------------------------- 1 | mod config; 2 | mod error; 3 | mod gateway; 4 | mod proxy; 5 | mod qe; 6 | mod shared; 7 | 8 | use std::{ 9 | mem::take, 10 | sync::{Arc, Mutex}, 11 | }; 12 | 13 | use clap::Parser; 14 | use config::{DakiaArgs, DakiaConfig}; 15 | use error::DakiaError; 16 | use gateway::state::build_gateway_state; 17 | use gateway::state::GatewayStateStore; 18 | use gateway::HttpGateway; 19 | 20 | use pingora::server::{configuration::ServerConf, Server}; 21 | use shared::{common::get_dakia_ascii_art, dakia_state::DAKIA_STATE_STORE}; 22 | 23 | use proxy::http::Proxy; 24 | use shared::into::IntoRef; 25 | use tokio::runtime::Builder; 26 | 27 | fn main() { 28 | println!("{}", get_dakia_ascii_art()); 29 | 30 | let dakia_args = DakiaArgs::parse(); 31 | 32 | let dakia_config = DakiaConfig::from_args(dakia_args.clone()).unwrap(); 33 | 34 | DAKIA_STATE_STORE 35 | .store_dakia_config(dakia_config.clone()) 36 | .unwrap(); 37 | 38 | process_args(&dakia_args, &dakia_config).unwrap(); 39 | 40 | // perform init steps 41 | init(); 42 | 43 | let runtime = Builder::new_current_thread() 44 | .build() 45 | // if there is any error, just panic 46 | .unwrap(); 47 | 48 | // TODO: add support for TCP, WebSocket and gRPC gateway 49 | let gateways: Arc>> = Arc::new(Mutex::new(vec![])); 50 | 51 | // clone data for passing to the tokio runtime 52 | let gateways_cloned = gateways.clone(); 53 | let dakia_config_cloned = dakia_config.clone(); 54 | 55 | let handle = runtime.spawn(async move { 56 | let mut gateway_state_stores: Vec> = vec![]; 57 | 58 | for gateway_config in &dakia_config_cloned.gateways { 59 | let cloned_gateway_config = gateway_config.clone(); 60 | 61 | // dakia can not work without state, so unwrap is not a problem 62 | let gateway_state = build_gateway_state(cloned_gateway_config, dakia_config.version) 63 | .await 64 | .unwrap(); 65 | let gateway_state_store = Arc::new(GatewayStateStore::new(gateway_state)); 66 | let server_conf: ServerConf = dakia_config_cloned.into_ref(); 67 | 68 | let gateway = gateway::build_http(gateway_state_store.clone(), Arc::new(server_conf)) 69 | .await 70 | .unwrap(); 71 | 72 | // rust mutex guard does not work properly across tokio await, so creating lock guard after await in each loop 73 | let mut gateway_vector_guard = gateways_cloned.lock().unwrap(); 74 | gateway_vector_guard.push(gateway); 75 | gateway_state_stores.push(gateway_state_store); 76 | } 77 | 78 | DAKIA_STATE_STORE 79 | .store_gateway_state_stores(gateway_state_stores) 80 | .unwrap(); 81 | }); 82 | 83 | runtime.block_on(handle).unwrap(); 84 | 85 | // we no longer this runtime, pingora runtime will be used instead 86 | runtime.shutdown_background(); 87 | 88 | let mut server = Server::new_with_opt_and_conf( 89 | dakia_config.to_pingore_opt(&dakia_args), 90 | dakia_config.into_ref(), 91 | ); 92 | server.bootstrap(); 93 | 94 | let mut gateway_vector_guard = gateways.lock().unwrap(); 95 | 96 | // take ownership of vector to pass owned value inside add_service 97 | let proxy_vector = take(&mut *gateway_vector_guard); 98 | 99 | for gateway in proxy_vector.into_iter() { 100 | server.add_service(gateway); 101 | } 102 | 103 | server.run_forever(); 104 | } 105 | 106 | fn init() { 107 | env_logger::init(); 108 | } 109 | 110 | fn process_args(args: &DakiaArgs, dakia_config: &DakiaConfig) -> Result<(), Box> { 111 | if args.version { 112 | // version will be printed along with dakia art in the very beginning, so just exist from here 113 | shared::common::exit(); 114 | } 115 | 116 | if args.reload { 117 | todo!(); 118 | } 119 | 120 | if args.debug { 121 | println!("{:?}", dakia_config); 122 | shared::common::exit(); 123 | } 124 | 125 | if args.test { 126 | todo!(); 127 | } 128 | // TODO: use kill -HUP pid 129 | Ok(()) 130 | } 131 | -------------------------------------------------------------------------------- /dakia/src/gateway/state.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | config::{source_config::GatewayConfig, ConfigVersion}, 3 | error::{DakiaError, DakiaResult}, 4 | shared::{mutable_registry::Registry, pattern_registry::PatternRegistryType}, 5 | }; 6 | use arc_swap::ArcSwap; 7 | use std::sync::Arc; 8 | 9 | use super::{ 10 | filter::{build_filter_registry, Filter}, 11 | interceptor::Interceptor, 12 | interceptor_builder::{utils::build_interceptors, InterceptorBuilderRegistry}, 13 | lb, registry_builder, 14 | }; 15 | 16 | #[derive(Clone)] 17 | pub struct GatewayState { 18 | version: ConfigVersion, 19 | gateway_config: GatewayConfig, 20 | ds_host_pattern_registry: PatternRegistryType, 21 | lb_registry: lb::LbRegistryType, 22 | _interceptor_builder_registry: InterceptorBuilderRegistry, 23 | interceptors: Vec>, 24 | filter_registry: Registry, 25 | } 26 | 27 | impl GatewayState { 28 | pub fn build( 29 | version: ConfigVersion, 30 | gateway_config: GatewayConfig, 31 | ds_host_pattern_registry: PatternRegistryType, 32 | lb_registry: lb::LbRegistryType, 33 | interceptor_builder_registry: InterceptorBuilderRegistry, 34 | interceptors: Vec>, 35 | filter_registry: Registry, 36 | ) -> Self { 37 | Self { 38 | version, 39 | gateway_config, 40 | ds_host_pattern_registry, 41 | lb_registry, 42 | _interceptor_builder_registry: interceptor_builder_registry, 43 | interceptors, 44 | filter_registry, 45 | } 46 | } 47 | 48 | pub fn gateway_config(&self) -> &GatewayConfig { 49 | &self.gateway_config 50 | } 51 | 52 | pub fn pattern_registry(&self) -> &PatternRegistryType { 53 | &self.ds_host_pattern_registry 54 | } 55 | 56 | pub fn lb_registry(&self) -> &lb::LbRegistryType { 57 | &self.lb_registry 58 | } 59 | 60 | pub fn interceptors(&self) -> &Vec> { 61 | &self.interceptors 62 | } 63 | 64 | pub fn filter(&self, filter_name: &str) -> Option<&Filter> { 65 | self.filter_registry.get(filter_name) 66 | } 67 | 68 | pub fn filter_or_err(&self, filter_name: &str) -> DakiaResult<&Filter> { 69 | self.filter_registry 70 | .get(filter_name) 71 | .ok_or(DakiaError::i_explain(format!( 72 | "expected filter {filter_name} not found in filter registry" 73 | ))) 74 | } 75 | 76 | pub fn version(&self) -> ConfigVersion { 77 | self.version 78 | } 79 | } 80 | 81 | pub struct GatewayStateStore { 82 | state: ArcSwap, 83 | } 84 | 85 | impl GatewayStateStore { 86 | pub fn new(state: GatewayState) -> Self { 87 | Self { 88 | state: ArcSwap::new(Arc::new(state)), 89 | } 90 | } 91 | } 92 | 93 | impl GatewayStateStore { 94 | pub fn update_state(&self, new_state: GatewayState) -> () { 95 | self.state.swap(Arc::new(new_state)); 96 | } 97 | 98 | pub fn get_state(&self) -> Arc { 99 | self.state.load_full() 100 | } 101 | 102 | pub fn get_inner(&self) -> GatewayState { 103 | let arc_config = self.get_state().clone(); 104 | (*arc_config).clone() 105 | } 106 | } 107 | 108 | pub async fn build_gateway_state( 109 | mut gateway_config: GatewayConfig, 110 | version: ConfigVersion, 111 | ) -> DakiaResult { 112 | let ds_host_pattern_registry = 113 | registry_builder::build_ds_host_pattern_registry(&gateway_config).await?; 114 | let lb_registry = registry_builder::build_lb_registry(&gateway_config).await?; 115 | 116 | let interceptor_builder_registry = InterceptorBuilderRegistry::build(); 117 | let filter_registry = build_filter_registry(&mut gateway_config)?; 118 | let interceptors = build_interceptors(&gateway_config, &interceptor_builder_registry)?; 119 | let gateway_state = GatewayState::build( 120 | version, 121 | gateway_config, 122 | ds_host_pattern_registry, 123 | lb_registry, 124 | interceptor_builder_registry, 125 | interceptors, 126 | filter_registry, 127 | ); 128 | 129 | Ok(gateway_state) 130 | } 131 | -------------------------------------------------------------------------------- /dakia/src/gateway/filter/operator.rs: -------------------------------------------------------------------------------- 1 | use crate::{error::Error, qe::query::Query, shared::pattern_matcher::Pcre2PatternMatcher}; 2 | 3 | use super::query2filter::query2filter; 4 | 5 | #[derive(Debug, Clone)] 6 | pub enum RelationalOperator { 7 | Eq(Vec), 8 | Ne(Vec), 9 | } 10 | 11 | #[derive(Debug, Clone)] 12 | pub enum SetOperator { 13 | In(Vec>), 14 | Nin(Vec>), 15 | } 16 | 17 | #[derive(Debug, Clone)] 18 | pub enum PatternOperator { 19 | Contains(Vec), 20 | NotContains(Vec), 21 | StartsWith(Vec), 22 | NotStartWith(Vec), 23 | EndsWith(Vec), 24 | NotEndsWith(Vec), 25 | Matches(Pcre2PatternMatcher), 26 | } 27 | 28 | #[derive(Debug, Clone)] 29 | pub enum Header { 30 | Accept, 31 | AcceptEncoding, 32 | AcceptLanguage, 33 | Authorization, 34 | CacheControl, 35 | ContentType, 36 | ContentLength, 37 | SetCookie, 38 | Host, 39 | Origin, 40 | Referer, 41 | UserAgent, 42 | XForwardedFor, 43 | XRequestId, 44 | Custom(Vec), // Allows custom headers 45 | } 46 | 47 | impl From<&str> for Header { 48 | fn from(value: &str) -> Self { 49 | match value.to_lowercase().as_str() { 50 | "accept" => Header::Accept, 51 | "accept-encoding" => Header::AcceptEncoding, 52 | "accept-language" => Header::AcceptLanguage, 53 | "authorization" => Header::Authorization, 54 | "cache-control" => Header::CacheControl, 55 | "content-type" => Header::ContentType, 56 | "content-length" => Header::ContentLength, 57 | "set-cookie" => Header::SetCookie, 58 | "host" => Header::Host, 59 | "origin" => Header::Origin, 60 | "referer" => Header::Referer, 61 | "user-agent" => Header::UserAgent, 62 | "x-forwarded-for" => Header::XForwardedFor, 63 | "x-request-id" => Header::XRequestId, 64 | _ => Header::Custom(value.as_bytes().to_vec()), 65 | } 66 | } 67 | } 68 | 69 | impl Header { 70 | pub fn as_bytes(&self) -> &[u8] { 71 | match self { 72 | Header::Accept => b"accept", 73 | Header::AcceptEncoding => b"accept-encoding", 74 | Header::AcceptLanguage => b"accept-language", 75 | Header::Authorization => b"authorization", 76 | Header::CacheControl => b"cache-control", 77 | Header::ContentType => b"content-type", 78 | Header::ContentLength => b"content-length", 79 | Header::SetCookie => b"set-cookie", 80 | Header::Host => b"host", 81 | Header::Origin => b"origin", 82 | Header::Referer => b"referer", 83 | Header::UserAgent => b"user-agent", 84 | Header::XForwardedFor => b"x-forwarded-for", 85 | Header::XRequestId => b"x-request-id", 86 | Header::Custom(bytes) => bytes, 87 | } 88 | } 89 | } 90 | #[derive(Debug, Clone)] 91 | pub enum CriteriaOperator { 92 | Relation(RelationalOperator), 93 | Pattern(PatternOperator), 94 | Set(SetOperator), 95 | Exists(bool), 96 | } 97 | 98 | #[derive(Debug, Clone)] 99 | pub enum LogicalCriteriaOperator { 100 | And(Vec), 101 | Or(Vec), 102 | } 103 | 104 | #[derive(Debug, Clone)] 105 | pub enum PartCriteriaOperator { 106 | CriteriaOperator(CriteriaOperator), 107 | LogicalCriteriaOperator(LogicalCriteriaOperator), 108 | } 109 | 110 | #[derive(Debug, Clone)] 111 | pub struct HeaderCriteria { 112 | pub name: Header, 113 | pub operator: Vec, 114 | } 115 | 116 | #[derive(Debug, Clone)] 117 | pub struct QueryCriteria { 118 | pub name: Vec, 119 | pub operator: Vec, 120 | } 121 | 122 | #[derive(Debug, Clone)] 123 | pub enum PartFilterCriteria { 124 | Header(HeaderCriteria), 125 | Query(QueryCriteria), 126 | Path(Vec), 127 | Scheme(Vec), 128 | Method(Vec), 129 | } 130 | 131 | #[derive(Debug, Clone)] 132 | pub enum FilterCriteria { 133 | Logical(LogicalFilterCriteria), 134 | PartFilterCriteria(PartFilterCriteria), 135 | } 136 | 137 | #[derive(Debug, Clone)] 138 | pub enum LogicalFilterCriteria { 139 | And(Vec), 140 | Or(Vec), 141 | } 142 | 143 | #[derive(Debug, Clone)] 144 | pub struct Filter { 145 | pub criteria_list: Vec, 146 | } 147 | 148 | impl TryFrom<&Query> for Filter { 149 | type Error = Box; 150 | 151 | fn try_from(value: &Query) -> Result { 152 | query2filter(value) 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /dakia/src/gateway/interceptors/controller/interceptor.rs: -------------------------------------------------------------------------------- 1 | use std::str::from_utf8; 2 | 3 | use async_trait::async_trait; 4 | use bytes::Bytes; 5 | use http::StatusCode; 6 | 7 | use crate::{ 8 | config::{source_config::SourceDakiaRawConfig, DakiaConfig}, 9 | error::DakiaResult, 10 | gateway::{ 11 | interceptor::{Interceptor, InterceptorName, Phase, PhaseMask, PhaseResult}, 12 | state::build_gateway_state, 13 | }, 14 | proxy::http::Session, 15 | shared::dakia_state::DAKIA_STATE_STORE, 16 | }; 17 | 18 | pub struct ControllerInterceptor { 19 | filter: Option, 20 | } 21 | 22 | impl ControllerInterceptor { 23 | pub fn build(filter: Option) -> Self { 24 | Self { filter } 25 | } 26 | 27 | async fn write_invalid_method_response(&self, _session: &mut Session<'_>) -> DakiaResult<()> { 28 | _session.set_res_status(StatusCode::METHOD_NOT_ALLOWED); 29 | Ok(()) 30 | } 31 | 32 | async fn write_bad_request_response(&self, _session: &mut Session<'_>) -> DakiaResult<()> { 33 | _session.set_res_status(StatusCode::BAD_REQUEST); 34 | Ok(()) 35 | } 36 | 37 | async fn write_invalid_content_type_response( 38 | &self, 39 | _session: &mut Session<'_>, 40 | ) -> DakiaResult<()> { 41 | _session.set_res_status(StatusCode::UNSUPPORTED_MEDIA_TYPE); 42 | Ok(()) 43 | } 44 | 45 | async fn store_dakia_config_in_store(&self, mut dakia_config: DakiaConfig) -> DakiaResult<()> { 46 | let cur_dakia_config = DAKIA_STATE_STORE.get_dakia_config().unwrap(); 47 | dakia_config.version = cur_dakia_config.version + 1; 48 | 49 | for gateway_config in &dakia_config.gateways { 50 | let gateway_state = 51 | build_gateway_state(gateway_config.clone(), dakia_config.version).await?; 52 | DAKIA_STATE_STORE.update_gateway_state(gateway_state)?; 53 | } 54 | 55 | DAKIA_STATE_STORE.store_dakia_config(dakia_config)?; 56 | Ok(()) 57 | } 58 | 59 | async fn update_in_memory_dakia_config(&self, _session: &mut Session<'_>) -> DakiaResult<()> { 60 | let body = _session.read_ds_req_body().await?; 61 | let body_str = match &body { 62 | Some(bval) => { 63 | if bval.is_empty() { 64 | return self.write_bad_request_response(_session).await; 65 | } else { 66 | from_utf8(&bval).expect("Failed to parse content: invalid UTF-8 encoding") 67 | } 68 | } 69 | None => return self.write_bad_request_response(_session).await, 70 | }; 71 | 72 | let content_type_hedaer = _session.ds_req_header("Content-Type")?; 73 | match content_type_hedaer { 74 | Some(havl) => { 75 | let source_dakia_raw_config = if havl == "application/json".as_bytes() { 76 | let source_dakia_config: SourceDakiaRawConfig = serde_json::from_str(body_str) 77 | .expect("Failed to deserialize: invalid json body"); 78 | source_dakia_config 79 | } else if havl == "application/yaml".as_bytes() { 80 | let source_dakia_config: SourceDakiaRawConfig = serde_yaml::from_str(body_str) 81 | .expect("Failed to deserialize: invalid yaml body"); 82 | source_dakia_config 83 | } else { 84 | return self.write_invalid_content_type_response(_session).await; 85 | }; 86 | 87 | self.store_dakia_config_in_store(DakiaConfig::from(source_dakia_raw_config)) 88 | .await?; 89 | Ok(()) 90 | } 91 | None => return self.write_invalid_content_type_response(_session).await, 92 | } 93 | } 94 | 95 | async fn write_invalid_accept_header_response( 96 | &self, 97 | _session: &mut Session<'_>, 98 | ) -> DakiaResult<()> { 99 | _session.set_res_status(StatusCode::NOT_ACCEPTABLE); 100 | Ok(()) 101 | } 102 | 103 | async fn write_dakia_config_in_response(&self, _session: &mut Session<'_>) -> DakiaResult<()> { 104 | let dakia_config = DAKIA_STATE_STORE.get_dakia_config()?; 105 | let source_dakia_raw_config = SourceDakiaRawConfig::from(dakia_config); 106 | let accept_header = _session.ds_req_header("Accept")?; 107 | 108 | match accept_header { 109 | Some(hval) => { 110 | let config_str = if hval == "application/json".as_bytes() { 111 | serde_json::to_string(&source_dakia_raw_config) 112 | .expect("Can not serialize config to json") 113 | } 114 | // https://www.ietf.org/archive/id/draft-ietf-httpapi-yaml-mediatypes-00.html#name-media-type-application-yaml 115 | else if hval == "application/yaml".as_bytes() { 116 | serde_yaml::to_string(&source_dakia_raw_config) 117 | .expect("Can not serialize config to json") 118 | } else { 119 | self.write_invalid_accept_header_response(_session).await?; 120 | return Ok(()); 121 | }; 122 | 123 | _session 124 | .write_ds_res_body(Some(Bytes::from(config_str)), true) 125 | .await?; 126 | } 127 | None => return self.write_invalid_accept_header_response(_session).await, 128 | } 129 | 130 | Ok(()) 131 | } 132 | } 133 | 134 | #[async_trait] 135 | impl Interceptor for ControllerInterceptor { 136 | fn name(&self) -> InterceptorName { 137 | InterceptorName::Controller 138 | } 139 | 140 | fn phase_mask(&self) -> PhaseMask { 141 | Phase::UpstreamProxyFilter.mask() 142 | } 143 | 144 | fn filter(&self) -> &Option { 145 | &self.filter 146 | } 147 | 148 | async fn upstream_proxy_filter(&self, _session: &mut Session) -> PhaseResult { 149 | let method = _session.ds_req_method()?; 150 | if method == "GET" { 151 | self.write_dakia_config_in_response(_session).await?; 152 | } else if method == "PUT" { 153 | self.update_in_memory_dakia_config(_session).await?; 154 | } else { 155 | self.write_invalid_method_response(_session).await?; 156 | } 157 | 158 | Ok(true) 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /dakia/src/proxy/http/proxy.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::{ 4 | config::source_config::find_router_config_or_err, 5 | error::{DakiaError, DakiaResult}, 6 | gateway::{interceptor::Phase, state::GatewayStateStore}, 7 | proxy::http::helpers::get_inet_addr_from_backend, 8 | }; 9 | 10 | use super::{ 11 | helpers::is_valid_ds_host, 12 | session::{self}, 13 | DakiaHttpGatewayCtx, 14 | }; 15 | use async_trait::async_trait; 16 | use http::StatusCode; 17 | use pingora::{ 18 | prelude::HttpPeer, 19 | proxy::{ProxyHttp, Session}, 20 | Error, ErrorSource, 21 | ErrorType::HTTPStatus, 22 | }; 23 | use pingora_http::{RequestHeader, ResponseHeader}; 24 | 25 | #[derive(Clone)] 26 | pub struct Proxy { 27 | gateway_state_store: Arc, 28 | } 29 | 30 | impl Proxy { 31 | pub async fn build(gateway_state_store: Arc) -> DakiaResult { 32 | let proxy = Proxy { 33 | gateway_state_store, 34 | }; 35 | 36 | Ok(proxy) 37 | } 38 | } 39 | 40 | #[async_trait] 41 | impl ProxyHttp for Proxy { 42 | type CTX = DakiaHttpGatewayCtx; 43 | fn new_ctx(&self) -> Self::CTX { 44 | let gateway_state = self.gateway_state_store.get_state(); 45 | DakiaHttpGatewayCtx::new(gateway_state) 46 | } 47 | 48 | async fn early_request_filter( 49 | &self, 50 | _session: &mut Session, 51 | _ctx: &mut Self::CTX, 52 | ) -> Result<(), Box> { 53 | let mut session = session::Session::build(Phase::Init, _session, _ctx); 54 | session.execute_interceptors_phase().await?; 55 | Ok(()) 56 | } 57 | 58 | async fn request_filter( 59 | &self, 60 | _session: &mut Session, 61 | _ctx: &mut Self::CTX, 62 | ) -> Result> { 63 | let mut session = session::Session::build(Phase::RequestFilter, _session, _ctx); 64 | let host = session.ds_req_header("host")?; 65 | 66 | match host { 67 | Some(host) => { 68 | let is_valid_ds_host = is_valid_ds_host( 69 | &session.ctx().gateway_state.gateway_config(), 70 | &self.gateway_state_store.get_state().pattern_registry(), 71 | host, 72 | ) 73 | .await?; 74 | 75 | if !is_valid_ds_host { 76 | session.set_res_status(StatusCode::FORBIDDEN); 77 | session.flush_ds_res_header().await?; 78 | return Ok(true); 79 | } 80 | } 81 | 82 | None => { 83 | // host is required header 84 | session.set_res_status(StatusCode::BAD_REQUEST); 85 | session.flush_ds_res_header().await?; 86 | return Ok(true); 87 | } 88 | }; 89 | 90 | Ok(session.execute_interceptors_phase().await?) 91 | } 92 | 93 | async fn proxy_upstream_filter( 94 | &self, 95 | _session: &mut Session, 96 | _ctx: &mut Self::CTX, 97 | ) -> Result> 98 | where 99 | Self::CTX: Send + Sync, 100 | { 101 | let mut session = session::Session::build(Phase::UpstreamProxyFilter, _session, _ctx); 102 | Ok(!session.execute_interceptors_phase().await?) 103 | } 104 | 105 | async fn upstream_peer( 106 | &self, 107 | _session: &mut Session, 108 | _ctx: &mut Self::CTX, 109 | ) -> Result, Box> { 110 | let session = session::Session::build(Phase::UpstreamProxyFilter, _session, _ctx); 111 | 112 | let router_config = find_router_config_or_err(&session)?; 113 | let upstream_name = &router_config.upstream; 114 | 115 | let gateway_state = self.gateway_state_store.get_state(); 116 | let lb_registry = gateway_state.lb_registry(); 117 | 118 | let mut lb = lb_registry.get(&upstream_name).await?; 119 | lb = match lb { 120 | None => lb_registry.get("default").await?, 121 | Some(lb) => Some(lb), 122 | }; 123 | 124 | let lb = lb.ok_or(DakiaError::i_explain(format!( 125 | "load balacer not found for upstream {upstream_name}" 126 | )))?; 127 | 128 | let backend = lb.select(b"", 256).unwrap(); // hash doesn't matter 129 | 130 | let inet_address = get_inet_addr_from_backend(&backend); 131 | 132 | let upstream_node_config = gateway_state 133 | .gateway_config() 134 | .find_upstream_config_or_err(upstream_name, true) 135 | .map(|a| a.find_upstream_node_config_or_err(inet_address))??; 136 | 137 | let tls = upstream_node_config.tls; 138 | let sni = upstream_node_config.clone().sni.unwrap_or("".to_string()); 139 | 140 | let peer = Box::new(HttpPeer::new(backend.addr, tls, sni)); 141 | 142 | Ok(peer) 143 | } 144 | 145 | async fn upstream_request_filter( 146 | &self, 147 | _session: &mut Session, 148 | _upstream_request: &mut RequestHeader, 149 | _ctx: &mut Self::CTX, 150 | ) -> Result<(), pingora_core::BError> 151 | where 152 | Self::CTX: Send + Sync, 153 | { 154 | let mut session = session::Session::build(Phase::PreUpstreamRequest, _session, _ctx); 155 | session.upstream_request(_upstream_request); 156 | session.execute_interceptors_phase().await?; 157 | session.flush_us_req_header()?; 158 | 159 | Ok(()) 160 | } 161 | 162 | async fn fail_to_proxy(&self, _session: &mut Session, e: &Error, _ctx: &mut Self::CTX) -> u16 163 | where 164 | Self::CTX: Send + Sync, 165 | { 166 | let code = match e.etype() { 167 | HTTPStatus(code) => *code, 168 | _ => { 169 | match e.esource() { 170 | ErrorSource::Upstream => 502, 171 | ErrorSource::Downstream => { 172 | match e.etype() { 173 | pingora::ErrorType::WriteError 174 | | pingora::ErrorType::ReadError 175 | | pingora::ErrorType::ConnectionClosed => { 176 | /* conn already dead */ 177 | 0 178 | } 179 | _ => 400, 180 | } 181 | } 182 | ErrorSource::Internal | ErrorSource::Unset => 500, 183 | } 184 | } 185 | }; 186 | 187 | if code > 0 { 188 | let mut session = session::Session::build(Phase::PreDownstreamResponse, _session, _ctx); 189 | let status_code = StatusCode::from_u16(code).unwrap(); 190 | session.set_res_status(status_code); 191 | session.flush_ds_res_header().await.unwrap(); 192 | } 193 | code 194 | } 195 | 196 | async fn response_filter( 197 | &self, 198 | _session: &mut Session, 199 | _upstream_response: &mut ResponseHeader, 200 | _ctx: &mut Self::CTX, 201 | ) -> Result<(), Box> 202 | where 203 | Self::CTX: Send + Sync, 204 | { 205 | let mut session = session::Session::build(Phase::PostUpstreamResponse, _session, _ctx); 206 | session.upstream_response(_upstream_response); 207 | session.execute_interceptors_phase().await?; 208 | session.flush_ds_res_header().await?; 209 | Ok(()) 210 | } 211 | } 212 | -------------------------------------------------------------------------------- /dakia/src/config/dakia_config.rs: -------------------------------------------------------------------------------- 1 | use std::{fs, path::Path}; 2 | 3 | use log::debug; 4 | use pingora::{prelude::Opt, server::configuration::ServerConf}; 5 | 6 | use crate::{ 7 | config::source_config::SourceDakiaRawConfig, 8 | error::{DakiaError, DakiaResult, ImmutStr}, 9 | shared::into::IntoRef, 10 | }; 11 | 12 | use super::{source_config::GatewayConfig, DakiaArgs}; 13 | 14 | pub type ConfigVersion = i64; 15 | 16 | #[derive(Debug, Clone)] 17 | pub struct DakiaConfig { 18 | pub version: ConfigVersion, 19 | pub daemon: bool, 20 | pub dp: String, 21 | pub error_log: String, 22 | pub pid_file: String, 23 | pub upgrade_sock: String, 24 | pub user: Option, 25 | pub group: Option, 26 | pub threads: usize, 27 | pub work_stealing: bool, 28 | pub grace_period_seconds: Option, 29 | pub graceful_shutdown_timeout_seconds: Option, 30 | pub upstream_keepalive_pool_size: usize, 31 | pub upstream_connect_offload_threadpools: Option, 32 | pub upstream_connect_offload_thread_per_pool: Option, 33 | pub upstream_debug_ssl_keylog: bool, 34 | pub gateways: Vec, 35 | } 36 | 37 | impl Default for DakiaConfig { 38 | fn default() -> Self { 39 | Self { 40 | version: 0, 41 | dp: Default::default(), 42 | daemon: Default::default(), 43 | error_log: Default::default(), 44 | pid_file: Default::default(), 45 | upgrade_sock: Default::default(), 46 | user: Default::default(), 47 | group: Default::default(), 48 | threads: Default::default(), 49 | work_stealing: Default::default(), 50 | grace_period_seconds: Default::default(), 51 | graceful_shutdown_timeout_seconds: Default::default(), 52 | upstream_keepalive_pool_size: Default::default(), 53 | upstream_connect_offload_threadpools: Default::default(), 54 | upstream_connect_offload_thread_per_pool: Default::default(), 55 | upstream_debug_ssl_keylog: Default::default(), 56 | gateways: Default::default(), 57 | } 58 | } 59 | } 60 | 61 | impl DakiaConfig { 62 | pub fn from_args(args: DakiaArgs) -> DakiaResult { 63 | let dp = args.dp.as_deref().unwrap_or("/etc/dakia"); // dakia path 64 | let cp = Path::new(dp).join("config/dakia.yaml"); // configs path 65 | 66 | let is_config_file_readable = fs::metadata(&cp) 67 | .map(|metadata| metadata.is_file()) 68 | .unwrap_or(false); 69 | 70 | if !is_config_file_readable { 71 | let e = DakiaError::create( 72 | crate::error::ErrorType::InternalError, 73 | crate::error::ErrorSource::Internal, 74 | Some(ImmutStr::from("Failed to load Dakia config file. The file might be missing, inaccessible, or malformed!")), 75 | None, 76 | ); 77 | return Err(e); 78 | } 79 | 80 | let raw_config = fs::read_to_string(&cp).map_err(|e| DakiaError::create( 81 | crate::error::ErrorType::InternalError, 82 | crate::error::ErrorSource::Internal, 83 | Some(ImmutStr::from("Failed to load Dakia config file. The file might be missing, inaccessible, or malformed!")), 84 | Some(Box::new(e)), 85 | ))?; 86 | 87 | let mut source_dakia_config: SourceDakiaRawConfig = serde_yaml::from_str(&raw_config) 88 | .map_err(|e| { 89 | DakiaError::create( 90 | crate::error::ErrorType::InternalError, 91 | crate::error::ErrorSource::Internal, 92 | Some(ImmutStr::from("Failed to parse config the file.")), 93 | Some(Box::new(e)), 94 | ) 95 | })?; 96 | 97 | // update this so that it can be preserved over restart 98 | source_dakia_config.dp = args.dp; 99 | 100 | debug!( 101 | "\n========== Dakia Config ==========\n{:#?}\n===================================", 102 | source_dakia_config 103 | ); 104 | 105 | Ok(DakiaConfig::from(source_dakia_config)) 106 | } 107 | pub fn find_gateway_config<'a>(&'a self, gateway_name: &str) -> Option<&'a GatewayConfig> { 108 | self.gateways.iter().find(|g| g.name == gateway_name) 109 | } 110 | pub fn find_gateway_config_or_err(&self, gateway_name: &str) -> DakiaResult<&GatewayConfig> { 111 | let gateway_config = 112 | self.find_gateway_config(gateway_name) 113 | .ok_or(DakiaError::create_unknown_context(ImmutStr::Static( 114 | "gateway config not found".into(), 115 | )))?; 116 | Ok(gateway_config) 117 | } 118 | pub fn to_pingore_opt(&self, args: &DakiaArgs) -> Opt { 119 | let mut opt = Opt::default(); 120 | opt.daemon = self.daemon; 121 | opt.upgrade = args.upgrade; 122 | // not required, as we are pssing struct directly 123 | // opt.conf = Some(self.dp.clone() + "/config/pingora.yaml"); 124 | opt 125 | } 126 | } 127 | 128 | impl From for DakiaConfig { 129 | fn from(source_dakia_raw_config: SourceDakiaRawConfig) -> Self { 130 | DakiaConfig { 131 | version: 0, 132 | daemon: source_dakia_raw_config.daemon.unwrap_or(false), 133 | dp: source_dakia_raw_config 134 | .dp 135 | .unwrap_or("/etc/dakia".to_string()), 136 | error_log: source_dakia_raw_config 137 | .error_log 138 | .unwrap_or("/var/log/dakia/error.log".to_string()), 139 | pid_file: source_dakia_raw_config 140 | .pid_file 141 | .unwrap_or("/tmp/dakia.pid".to_string()), 142 | upgrade_sock: source_dakia_raw_config 143 | .upgrade_sock 144 | .unwrap_or("/tmp/dakia_upgrade.sock".to_string()), 145 | user: source_dakia_raw_config.user.clone(), 146 | group: source_dakia_raw_config.group.clone(), 147 | threads: source_dakia_raw_config.threads.unwrap_or(1), 148 | work_stealing: source_dakia_raw_config.work_stealing.unwrap_or(true), 149 | grace_period_seconds: source_dakia_raw_config.grace_period_seconds, 150 | graceful_shutdown_timeout_seconds: source_dakia_raw_config 151 | .graceful_shutdown_timeout_seconds, 152 | upstream_keepalive_pool_size: source_dakia_raw_config 153 | .upstream_keepalive_pool_size 154 | .unwrap_or(128), 155 | upstream_connect_offload_threadpools: source_dakia_raw_config 156 | .upstream_connect_offload_threadpools, 157 | upstream_connect_offload_thread_per_pool: source_dakia_raw_config 158 | .upstream_connect_offload_thread_per_pool, 159 | upstream_debug_ssl_keylog: source_dakia_raw_config 160 | .upstream_debug_ssl_keylog 161 | .unwrap_or(false), 162 | gateways: source_dakia_raw_config.gateways, 163 | } 164 | } 165 | } 166 | 167 | impl IntoRef for DakiaConfig { 168 | fn into_ref(&self) -> ServerConf { 169 | ServerConf { 170 | daemon: self.daemon, 171 | error_log: Some(self.error_log.clone()), 172 | grace_period_seconds: self.grace_period_seconds, 173 | graceful_shutdown_timeout_seconds: self.graceful_shutdown_timeout_seconds, 174 | group: self.group.clone(), 175 | user: self.user.clone(), 176 | threads: self.threads, 177 | pid_file: self.pid_file.clone(), 178 | upgrade_sock: self.upgrade_sock.clone(), 179 | upstream_connect_offload_thread_per_pool: self.upstream_connect_offload_thread_per_pool, 180 | upstream_debug_ssl_keylog: self.upstream_debug_ssl_keylog, 181 | upstream_connect_offload_threadpools: self.upstream_connect_offload_threadpools, 182 | upstream_keepalive_pool_size: self.upstream_keepalive_pool_size, 183 | work_stealing: self.work_stealing, 184 | version: 1, 185 | ca_file: None, 186 | client_bind_to_ipv4: vec![], 187 | client_bind_to_ipv6: vec![], 188 | } 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /dakia/src/proxy/http/session.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, mem::take}; 2 | 3 | use bytes::Bytes; 4 | use http::{uri::PathAndQuery, StatusCode, Uri}; 5 | use pingora::protocols::l4::socket::SocketAddr; 6 | use pingora_http::{RequestHeader as PRequestHeader, ResponseHeader as PResponseHeader}; 7 | use pingora_proxy::Session as PSession; 8 | 9 | use crate::{ 10 | error::{DakiaError, DakiaResult}, 11 | gateway::interceptor::{ 12 | executor::{exec_hook, exec_phase}, 13 | Hook, Phase, PhaseResult, 14 | }, 15 | }; 16 | 17 | use super::DakiaHttpGatewayCtx; 18 | 19 | pub struct Session<'a> { 20 | psession: &'a mut PSession, 21 | upstream_request: Option<&'a mut PRequestHeader>, 22 | upstream_response: Option<&'a mut PResponseHeader>, 23 | phase: Phase, 24 | ds_status_code: StatusCode, 25 | ctx: &'a mut DakiaHttpGatewayCtx, 26 | ds_header_flushed: bool, 27 | } 28 | 29 | impl<'a> Session<'a> { 30 | pub fn build( 31 | phase: Phase, 32 | psession: &'a mut PSession, 33 | ctx: &'a mut DakiaHttpGatewayCtx, 34 | ) -> Self { 35 | Session { 36 | phase, 37 | psession, 38 | upstream_request: None, 39 | upstream_response: None, 40 | ds_status_code: StatusCode::OK, 41 | ctx, 42 | ds_header_flushed: false, 43 | } 44 | } 45 | 46 | pub fn upstream_request(&mut self, upstream_request: &'a mut PRequestHeader) { 47 | self.upstream_request = Some(upstream_request); 48 | } 49 | 50 | pub fn upstream_response(&mut self, upstream_response: &'a mut PResponseHeader) { 51 | self.upstream_response = Some(upstream_response); 52 | } 53 | } 54 | 55 | impl<'a> Session<'a> { 56 | pub fn ds_socket_addr(&self) -> Option<&SocketAddr> { 57 | self.psession.client_addr() 58 | } 59 | } 60 | 61 | impl<'a> Session<'a> { 62 | pub fn ds_req_method(&self) -> DakiaResult<&str> { 63 | Ok(self.psession.as_downstream().req_header().method.as_str()) 64 | } 65 | 66 | pub fn us_req_method(&self) -> DakiaResult<&str> { 67 | Ok(self.upstream_request.as_ref().unwrap().method.as_str()) 68 | } 69 | } 70 | 71 | impl<'a> Session<'a> { 72 | pub fn ds_req_path(&self) -> &str { 73 | self.psession.as_downstream().req_header().uri.path() 74 | } 75 | 76 | pub fn set_us_req_uri(&mut self, uri: Uri) -> DakiaResult<()> { 77 | match self.upstream_request.as_mut() { 78 | Some(upstream_request) => { 79 | upstream_request.set_uri(uri); 80 | Ok(()) 81 | } 82 | None => Err(DakiaError::i_explain( 83 | "Something went wrong! Upstream headers are not present", 84 | )), 85 | } 86 | } 87 | } 88 | 89 | impl<'a> Session<'a> { 90 | pub fn ds_req_query(&self) -> DakiaResult> { 91 | Ok(self.psession.as_downstream().req_header().uri.query()) 92 | } 93 | 94 | pub fn us_req_query(&self) -> DakiaResult> { 95 | Ok(self.upstream_request.as_ref().unwrap().uri.query()) 96 | } 97 | 98 | pub fn ds_req_path_and_query(&self) -> Option<&PathAndQuery> { 99 | self.psession 100 | .as_downstream() 101 | .req_header() 102 | .uri 103 | .path_and_query() 104 | } 105 | 106 | pub fn us_req_path_and_query(&self) -> Option<&PathAndQuery> { 107 | self.upstream_request.as_ref().unwrap().uri.path_and_query() 108 | } 109 | } 110 | 111 | impl<'a> Session<'a> { 112 | pub fn us_req_header(&self, header_name: &str) -> DakiaResult> { 113 | let header_value = self 114 | .upstream_request 115 | .as_ref() 116 | .unwrap() 117 | .headers 118 | .get(header_name); 119 | 120 | match header_value { 121 | Some(value) => Ok(Some(value.as_bytes())), 122 | None => Ok(None), 123 | } 124 | } 125 | 126 | pub fn ds_req_header(&self, header_name: &str) -> DakiaResult> { 127 | let header_value = self 128 | .psession 129 | .as_downstream() 130 | .req_header() 131 | .headers 132 | .get(header_name); 133 | 134 | match header_value { 135 | Some(value) => Ok(Some(value.as_bytes())), 136 | None => Ok(None), 137 | } 138 | } 139 | } 140 | 141 | impl<'a> Session<'a> { 142 | pub fn set_us_req_header(&mut self, header_name: String, header_value: Vec) { 143 | self.ctx 144 | .us_req_header_buffer 145 | .insert(header_name, header_value); 146 | } 147 | 148 | pub fn set_ds_res_header(&mut self, header_name: String, header_value: Vec) { 149 | self.ctx 150 | .ds_res_header_buffer 151 | .insert(header_name, header_value); 152 | } 153 | 154 | async fn flush_header_to_ds(&mut self) -> DakiaResult<()> { 155 | let mut header = PResponseHeader::build(self.ds_status_code, None).unwrap(); 156 | 157 | let headers = take(&mut self.ctx.ds_res_header_buffer); 158 | for (header_name, header_value) in headers.into_iter() { 159 | header.insert_header(header_name, header_value)?; 160 | } 161 | 162 | self.psession 163 | .write_response_header(Box::new(header), false) 164 | .await?; 165 | 166 | Ok(()) 167 | } 168 | 169 | async fn flush_header_to_us_res(&mut self) -> DakiaResult<()> { 170 | let upstream_response = self.upstream_response.as_mut().expect( 171 | format!( 172 | "upstream_response must be available in phase {}", 173 | Phase::PostUpstreamResponse 174 | ) 175 | .as_str(), 176 | ); 177 | 178 | let headers = take(&mut self.ctx.ds_res_header_buffer); 179 | for (header_name, header_value) in headers.into_iter() { 180 | upstream_response.insert_header(header_name, header_value)?; 181 | } 182 | 183 | Ok(()) 184 | } 185 | 186 | pub async fn flush_ds_res_header(&mut self) -> DakiaResult<()> { 187 | if self.ds_header_flushed { 188 | return Ok(()); 189 | } 190 | 191 | self.ds_header_flushed = true; 192 | 193 | let cur_hook = Hook::PreDownstreamResponseHeaderFlush; 194 | // TODO: allow to configure keepalive once bug is fixed in pingora itself 195 | // https://github.com/cloudflare/pingora/issues/540 196 | self.psession.set_keepalive(None); 197 | 198 | exec_hook(cur_hook, self).await?; 199 | 200 | match self.phase { 201 | Phase::Init 202 | | Phase::RequestFilter 203 | | Phase::UpstreamProxyFilter 204 | | Phase::PreDownstreamResponse 205 | | Phase::UpstreamPeerSelection => self.flush_header_to_ds().await, 206 | Phase::PreUpstreamRequest => Err(DakiaError::i_explain(format!( 207 | "can not write downstream headers in {} phase", 208 | Phase::PreUpstreamRequest 209 | ))), 210 | Phase::PostUpstreamResponse => self.flush_header_to_us_res().await, 211 | } 212 | } 213 | 214 | pub fn flush_us_req_header(&mut self) -> DakiaResult<()> { 215 | match self.upstream_request.as_mut() { 216 | Some(upstream_request) => { 217 | let headers = take(&mut self.ctx.us_req_header_buffer); 218 | for (header_name, header_value) in headers.into_iter() { 219 | upstream_request.insert_header(header_name, header_value)?; 220 | } 221 | Ok(()) 222 | } 223 | 224 | None => Err(DakiaError::i_explain( 225 | "Something went wrong! Upstream headers are not present", 226 | )), 227 | } 228 | } 229 | } 230 | 231 | impl<'a> Session<'a> { 232 | pub fn set_res_status(&mut self, status_code: StatusCode) { 233 | self.ds_status_code = status_code; 234 | } 235 | } 236 | 237 | impl<'a> Session<'a> { 238 | pub async fn write_ds_res_body( 239 | &mut self, 240 | body: Option, 241 | end_of_stream: bool, 242 | ) -> DakiaResult<()> { 243 | if !self.ds_header_flushed { 244 | self.flush_ds_res_header().await?; 245 | } 246 | 247 | self.psession 248 | .write_response_body(body, end_of_stream) 249 | .await?; 250 | Ok(()) 251 | } 252 | } 253 | 254 | impl<'a> Session<'a> { 255 | pub async fn read_ds_req_body(&mut self) -> DakiaResult> { 256 | let body = self.psession.downstream_session.read_request_body().await?; 257 | Ok(body) 258 | } 259 | } 260 | 261 | impl<'a> Session<'a> { 262 | pub async fn execute_interceptors_phase(&mut self) -> PhaseResult { 263 | let short_circuit = exec_phase(self).await?; 264 | if short_circuit { 265 | self.flush_ds_res_header().await?; 266 | Ok(true) 267 | } else { 268 | Ok(false) 269 | } 270 | } 271 | } 272 | 273 | impl<'a> Session<'a> { 274 | pub fn ctx(&self) -> &DakiaHttpGatewayCtx { 275 | &self.ctx 276 | } 277 | 278 | pub fn phase(&self) -> &Phase { 279 | &self.phase 280 | } 281 | } 282 | 283 | pub type HeaderBuffer = HashMap>; 284 | -------------------------------------------------------------------------------- /dakia/src/gateway/filter/executor.rs: -------------------------------------------------------------------------------- 1 | use log::trace; 2 | 3 | use crate::{ 4 | error::DakiaResult, 5 | gateway::filter::operator::{ 6 | FilterCriteria, LogicalCriteriaOperator, LogicalFilterCriteria, PartFilterCriteria, 7 | PatternOperator, RelationalOperator, SetOperator, 8 | }, 9 | proxy::http::Session, 10 | shared::pattern_matcher::PatternMatcher, 11 | }; 12 | 13 | use super::{ 14 | operator::{CriteriaOperator, HeaderCriteria, PartCriteriaOperator}, 15 | Filter, 16 | }; 17 | 18 | fn contains_slice(haystack: &[u8], needle: &[u8]) -> bool { 19 | if needle.is_empty() { 20 | return true; 21 | } 22 | haystack 23 | .windows(needle.len()) 24 | .any(|window| window == needle) 25 | } 26 | 27 | fn match_critera_operator(operator: &CriteriaOperator, value: Option<&[u8]>) -> DakiaResult { 28 | match operator { 29 | CriteriaOperator::Relation(relational_operator) => match relational_operator { 30 | RelationalOperator::Eq(qval) => match value { 31 | Some(value) => Ok(value == qval), 32 | None => Ok(false), 33 | }, 34 | RelationalOperator::Ne(qval) => match value { 35 | Some(value) => Ok(value != qval), 36 | None => Ok(false), 37 | }, 38 | }, 39 | CriteriaOperator::Pattern(pattern_operator) => match pattern_operator { 40 | PatternOperator::Contains(qval) => match value { 41 | Some(value) => Ok(contains_slice(value, &qval)), 42 | None => Ok(false), 43 | }, 44 | PatternOperator::NotContains(qval) => match value { 45 | Some(value) => Ok(!contains_slice(value, &qval)), 46 | None => Ok(false), 47 | }, 48 | PatternOperator::StartsWith(qval) => match value { 49 | Some(value) => Ok(value.starts_with(&qval)), 50 | None => Ok(false), 51 | }, 52 | PatternOperator::NotStartWith(qval) => match value { 53 | Some(value) => Ok(!value.starts_with(&qval)), 54 | None => Ok(false), 55 | }, 56 | PatternOperator::EndsWith(qval) => match value { 57 | Some(value) => Ok(value.ends_with(&qval)), 58 | None => Ok(false), 59 | }, 60 | PatternOperator::NotEndsWith(qval) => match value { 61 | Some(value) => Ok(!value.ends_with(&qval)), 62 | None => Ok(false), 63 | }, 64 | PatternOperator::Matches(pcre2_pattern_matcher) => match value { 65 | Some(value) => { 66 | // TODO: return error here if something is wrong instead of returning false 67 | let result = pcre2_pattern_matcher.is_match(value).unwrap_or(false); 68 | Ok(result) 69 | } 70 | None => Ok(false), 71 | }, 72 | }, 73 | CriteriaOperator::Set(set_operator) => match value { 74 | Some(value) => match set_operator { 75 | SetOperator::In(qval) => { 76 | for q in qval { 77 | if q == value { 78 | return Ok(true); 79 | } 80 | } 81 | 82 | Ok(false) 83 | } 84 | SetOperator::Nin(qval) => { 85 | for q in qval { 86 | if q == value { 87 | return Ok(false); 88 | } 89 | } 90 | 91 | Ok(true) 92 | } 93 | }, 94 | None => Ok(false), 95 | }, 96 | CriteriaOperator::Exists(exists) => { 97 | if *exists { 98 | Ok(value.is_some()) 99 | } else { 100 | Ok(value.is_none()) 101 | } 102 | } 103 | } 104 | } 105 | 106 | fn match_part_critera_operator( 107 | operator: &PartCriteriaOperator, 108 | value: Option<&[u8]>, 109 | ) -> DakiaResult { 110 | match operator { 111 | PartCriteriaOperator::CriteriaOperator(criteria_operator) => { 112 | return match_critera_operator(criteria_operator, value) 113 | } 114 | PartCriteriaOperator::LogicalCriteriaOperator(logical_criteria_operator) => { 115 | match logical_criteria_operator { 116 | LogicalCriteriaOperator::And(criteria_operators) => { 117 | for criteria_operator in criteria_operators { 118 | if !match_critera_operator(criteria_operator, value)? { 119 | return Ok(false); 120 | } 121 | } 122 | return Ok(true); 123 | } 124 | LogicalCriteriaOperator::Or(criteria_operators) => { 125 | for criteria_operator in criteria_operators { 126 | if match_critera_operator(criteria_operator, value)? { 127 | return Ok(true); 128 | } 129 | } 130 | return Ok(false); 131 | } 132 | } 133 | } 134 | } 135 | } 136 | 137 | fn match_part_critera_operators( 138 | operators: &Vec, 139 | value: Option<&[u8]>, 140 | ) -> DakiaResult { 141 | for part_criteria_operator in operators { 142 | if !match_part_critera_operator(part_criteria_operator, value)? { 143 | return Ok(false); 144 | } 145 | } 146 | Ok(true) 147 | } 148 | 149 | fn match_header<'a>(header_criteria: &HeaderCriteria, session: &Session<'a>) -> DakiaResult { 150 | let header_name_bytes = header_criteria.name.as_bytes(); 151 | let header_name_utf8 = String::from_utf8_lossy(header_name_bytes).into_owned(); 152 | let req_header_value = session.ds_req_header(&header_name_utf8)?; 153 | match_part_critera_operators(&header_criteria.operator, req_header_value) 154 | } 155 | 156 | fn match_query<'a>(header_criteria: &HeaderCriteria, session: &Session<'a>) -> DakiaResult { 157 | todo!() 158 | } 159 | 160 | fn match_path<'a>( 161 | criteria_operators: &Vec, 162 | session: &Session<'a>, 163 | ) -> DakiaResult { 164 | let req_path = session.ds_req_path(); 165 | trace!("executing path match for {req_path}",); 166 | match_part_critera_operators(criteria_operators, Some(req_path.as_bytes())) 167 | } 168 | 169 | fn match_method<'a>( 170 | criteria_operators: &Vec, 171 | session: &Session<'a>, 172 | ) -> DakiaResult { 173 | let req_path = session.ds_req_method()?; 174 | match_part_critera_operators(criteria_operators, Some(req_path.as_bytes())) 175 | } 176 | 177 | fn exec_part_filter<'a>( 178 | part_filter_criteria: &PartFilterCriteria, 179 | session: &Session<'a>, 180 | ) -> DakiaResult { 181 | match part_filter_criteria { 182 | PartFilterCriteria::Header(header_criteria) => match_header(header_criteria, session), 183 | PartFilterCriteria::Query(query_criteria) => todo!(), 184 | PartFilterCriteria::Path(part_criteria_operators) => { 185 | match_path(part_criteria_operators, session) 186 | } 187 | PartFilterCriteria::Scheme(part_criteria_operators) => todo!(), 188 | PartFilterCriteria::Method(part_criteria_operators) => { 189 | match_method(part_criteria_operators, session) 190 | } 191 | } 192 | } 193 | 194 | pub fn exec_filter<'a>(filter: &Filter, session: &Session<'a>) -> DakiaResult { 195 | trace!("executing filter match for filter \n {:#?}", filter); 196 | 197 | for criteria in &filter.criteria_list { 198 | match criteria { 199 | FilterCriteria::Logical(logical_filter_criteria) => match logical_filter_criteria { 200 | LogicalFilterCriteria::And(part_filter_criterias) => { 201 | for part_filter_criteria in part_filter_criterias { 202 | let is_part_filter_matched = 203 | exec_part_filter(part_filter_criteria, session)?; 204 | if !is_part_filter_matched { 205 | return Ok(false); 206 | } 207 | } 208 | return Ok(true); 209 | } 210 | LogicalFilterCriteria::Or(part_filter_criterias) => { 211 | for part_filter_criteria in part_filter_criterias { 212 | let is_part_filter_matched = 213 | exec_part_filter(part_filter_criteria, session)?; 214 | if is_part_filter_matched { 215 | return Ok(true); 216 | } 217 | } 218 | return Ok(false); 219 | } 220 | }, 221 | FilterCriteria::PartFilterCriteria(part_filter_criteria) => { 222 | trace!( 223 | "executing part filter criteria match for \n {:#?}", 224 | part_filter_criteria 225 | ); 226 | return exec_part_filter(part_filter_criteria, session); 227 | } 228 | } 229 | } 230 | 231 | // return true if no criteria is present to indicate a match 232 | Ok(true) 233 | } 234 | -------------------------------------------------------------------------------- /dakia/src/qe/query.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::error::{DakiaError, DakiaResult, Error}; 6 | 7 | pub type Map = HashMap; 8 | pub type Array = Vec; 9 | pub type Query = Map; 10 | 11 | #[derive(PartialEq, Debug)] 12 | pub enum Operator { 13 | And, // logical and 14 | Or, // logical or 15 | Eq, // equal to 16 | Ne, // not equal to 17 | In, // in array 18 | Nin, // not in array 19 | Contains, // substring present 20 | NotContains, // sub strig not present, 21 | StartsWith, // text starts with 22 | NotStartWith, // text not starts with 23 | EndsWith, // text ends with 24 | NotEndsWith, // text not ends with 25 | Exists, // value exists 26 | Matches, // value matches specified regex 27 | } 28 | 29 | impl TryFrom<&str> for Operator { 30 | type Error = Error; 31 | 32 | fn try_from(value: &str) -> Result { 33 | match value { 34 | "$and" => Ok(Self::And), 35 | "$or" => Ok(Self::Or), 36 | "$eq" => Ok(Self::Eq), 37 | "$not_eq" => Ok(Self::Ne), 38 | "$in" => Ok(Self::In), 39 | "$not_in" => Ok(Self::Nin), 40 | "$exists" => Ok(Self::Exists), 41 | "$matches" => Ok(Self::Matches), 42 | "$contains" => Ok(Self::Contains), 43 | "$not_contains" => Ok(Self::NotContains), 44 | "$starts_with" => Ok(Self::StartsWith), 45 | "$not_starts_with" => Ok(Self::NotStartWith), 46 | "$ends_with" => Ok(Self::EndsWith), 47 | "$not_ends_with" => Ok(Self::NotEndsWith), 48 | _ => return Err(*DakiaError::create_unknown_msg("Invalid operator!")), 49 | } 50 | } 51 | } 52 | 53 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] 54 | #[serde(untagged)] 55 | pub enum Value { 56 | Scaler(Scaler), 57 | Composite(Composite), 58 | } 59 | 60 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] 61 | #[serde(untagged)] 62 | pub enum Scaler { 63 | String(String), 64 | I64(i64), 65 | Bool(bool), 66 | } 67 | 68 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] 69 | #[serde(untagged)] 70 | pub enum Composite { 71 | Map(Map), 72 | Vector(Array), 73 | } 74 | 75 | // fields of enum SupplierValue should be equivalent to Scaler enum fields of Query 76 | #[derive(Debug)] 77 | pub enum SupplierValue<'a> { 78 | I32(i32), 79 | // TODO: change Str to byte to support non UTF-8 characters 80 | Str(&'a str), 81 | None, 82 | } 83 | 84 | pub fn extract_key_str_or_err<'a>(query: &'a Query, qkey: &'a str) -> DakiaResult<&'a str> { 85 | let mismatch_err = DakiaError::i_explain(format!("mismatched value type for key {}", qkey)); 86 | 87 | match query.get(qkey) { 88 | Some(qval) => match qval { 89 | Value::Scaler(scaler) => match scaler { 90 | Scaler::String(strval) => Ok(strval), 91 | _ => Err(mismatch_err), 92 | }, 93 | _ => Err(mismatch_err), 94 | }, 95 | None => Err(DakiaError::i_explain(format!( 96 | "Can not extract key {}", 97 | qkey 98 | ))), 99 | } 100 | } 101 | 102 | fn get_str_from_scaler(scaler: &Scaler) -> String { 103 | match scaler { 104 | Scaler::String(strval) => strval.to_string(), 105 | Scaler::I64(intval) => intval.to_string(), 106 | Scaler::Bool(boolval) => boolval.to_string(), 107 | } 108 | } 109 | 110 | pub fn extract_vec_bytes_or_err(val: &Value) -> DakiaResult> { 111 | match val { 112 | Value::Scaler(scaler) => Ok(get_str_from_scaler(scaler).as_bytes().to_vec()), 113 | Value::Composite(composite) => Err(DakiaError::i_explain(format!( 114 | "Expected a scaler value, found {:?}", 115 | composite 116 | ))), 117 | } 118 | } 119 | 120 | pub fn extract_key_vec_bytes(query: &Query, key: &str) -> DakiaResult>> { 121 | match query.get(key) { 122 | Some(val) => match val { 123 | Value::Scaler(scaler) => Ok(Some(get_str_from_scaler(scaler).as_bytes().to_vec())), 124 | Value::Composite(composite) => Err(DakiaError::i_explain(format!( 125 | "Expected a scaler value, found {:?}", 126 | composite 127 | ))), 128 | }, 129 | None => Ok(None), 130 | } 131 | } 132 | 133 | pub fn extract_vec_or_err(val: &Value) -> DakiaResult<&Vec> { 134 | match val { 135 | Value::Scaler(scaler) => Err(DakiaError::i_explain(format!( 136 | "Expected a vector value, found {:?}", 137 | scaler 138 | ))), 139 | Value::Composite(composite) => match composite { 140 | Composite::Map(hash_map) => Err(DakiaError::i_explain(format!( 141 | "Expected a vector value, found {:?}", 142 | hash_map 143 | ))), 144 | Composite::Vector(values) => Ok(values), 145 | }, 146 | } 147 | } 148 | 149 | pub fn extract_string_or_err(val: &Value) -> DakiaResult { 150 | match val { 151 | Value::Scaler(scaler) => Ok(get_str_from_scaler(scaler)), 152 | Value::Composite(composite) => Err(DakiaError::i_explain(format!( 153 | "Expected a string value, found {:?}", 154 | composite 155 | ))), 156 | } 157 | } 158 | 159 | pub fn extract_bool_or_err(val: &Value) -> DakiaResult { 160 | match val { 161 | Value::Scaler(scaler) => match scaler { 162 | Scaler::Bool(boolval) => Ok(boolval.to_owned()), 163 | _ => Err(DakiaError::i_explain(format!( 164 | "Expected a boolean value, {:?}", 165 | scaler 166 | ))), 167 | }, 168 | Value::Composite(composite) => Err(DakiaError::i_explain(format!( 169 | "Expected a boolean value, found {:?}", 170 | composite 171 | ))), 172 | } 173 | } 174 | 175 | pub fn extract_key_i64_or_err(query: &Query, key: &str) -> DakiaResult { 176 | match query.get(key) { 177 | Some(val) => match val { 178 | Value::Scaler(scaler) => match scaler { 179 | Scaler::String(string) => Err(DakiaError::i_explain(format!( 180 | "Key '{key}' expected an integer but found {:?}", 181 | string 182 | ))), 183 | Scaler::I64(i64) => Ok(*i64), 184 | Scaler::Bool(boolean) => Err(DakiaError::i_explain(format!( 185 | "Key '{key}' expected an integer but found {:?}", 186 | boolean 187 | ))), 188 | }, 189 | Value::Composite(composite) => Err(DakiaError::i_explain(format!( 190 | "Key '{key}' expected an integer but found {:?}", 191 | composite 192 | ))), 193 | }, 194 | None => Err(DakiaError::i_explain(format!( 195 | "Key '{key}' expected an integer but found nothing", 196 | ))), 197 | } 198 | } 199 | 200 | #[cfg(test)] 201 | mod tests { 202 | use super::*; 203 | 204 | #[test] 205 | fn test_operator_try_from() { 206 | assert_eq!(Operator::try_from("$and").unwrap(), Operator::And); 207 | assert_eq!(Operator::try_from("$or").unwrap(), Operator::Or); 208 | assert_eq!(Operator::try_from("$eq").unwrap(), Operator::Eq); 209 | assert_eq!(Operator::try_from("$not_eq").unwrap(), Operator::Ne); 210 | assert_eq!(Operator::try_from("$in").unwrap(), Operator::In); 211 | assert_eq!(Operator::try_from("$not_in").unwrap(), Operator::Nin); 212 | assert_eq!(Operator::try_from("$exists").unwrap(), Operator::Exists); 213 | assert_eq!(Operator::try_from("$matches").unwrap(), Operator::Matches); 214 | assert_eq!(Operator::try_from("$contains").unwrap(), Operator::Contains); 215 | assert_eq!( 216 | Operator::try_from("$not_contains").unwrap(), 217 | Operator::NotContains 218 | ); 219 | assert_eq!( 220 | Operator::try_from("$starts_with").unwrap(), 221 | Operator::StartsWith 222 | ); 223 | assert_eq!( 224 | Operator::try_from("$not_starts_with").unwrap(), 225 | Operator::NotStartWith 226 | ); 227 | assert_eq!( 228 | Operator::try_from("$ends_with").unwrap(), 229 | Operator::EndsWith 230 | ); 231 | assert_eq!( 232 | Operator::try_from("$not_ends_with").unwrap(), 233 | Operator::NotEndsWith 234 | ); 235 | 236 | assert!(Operator::try_from("$invalid").is_err()); 237 | } 238 | 239 | #[test] 240 | fn test_value_serialization() { 241 | let string_value = Value::Scaler(Scaler::String("hello".to_string())); 242 | let yaml = serde_yaml::to_string(&string_value).unwrap(); 243 | assert_eq!(yaml.trim(), "hello"); 244 | } 245 | 246 | #[test] 247 | fn test_value_deserialization() { 248 | let yaml = "hello"; 249 | let value: Value = serde_yaml::from_str(yaml).unwrap(); 250 | assert_eq!(value, Value::Scaler(Scaler::String("hello".to_string()))); 251 | } 252 | 253 | #[test] 254 | fn test_composite_map() { 255 | let mut map = Map::new(); 256 | map.insert("key".to_string(), Value::Scaler(Scaler::I64(42))); 257 | let composite = Composite::Map(map); 258 | let yaml = serde_yaml::to_string(&composite).unwrap(); 259 | assert!(yaml.contains("42")); 260 | } 261 | 262 | #[test] 263 | fn test_composite_vector() { 264 | let array = Array::from([ 265 | Value::Scaler(Scaler::Bool(true)), 266 | Value::Scaler(Scaler::I64(10)), 267 | ]); 268 | let composite = Composite::Vector(array); 269 | let yaml = serde_yaml::to_string(&composite).unwrap(); 270 | assert!(yaml.contains("true")); 271 | assert!(yaml.contains("10")); 272 | } 273 | } 274 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 17 | 18 | 19 | 20 | ![1](https://github.com/user-attachments/assets/9348db35-f589-4dc4-9a03-24924d6d8f2d) 21 | 22 | # Dakia: An API gateway tailored for modern distributed systems 23 | 24 | **Dakia** is a high-performance API gateway built with Rust, designed for low-latency request processing. Dakia supports **dynamic configuration updates**, allowing seamless changes to routing rules, authentication settings, rate limits, and other gateway policies **without requiring a restart**. This ensures high availability and adaptability in rapidly evolving microservices architectures. Additional features include request routing, load balancing, caching, and fault tolerance, making it a robust solution for modern distributed systems. 25 | 26 | > It's under construction 🦺 🪚 🏗️ 🚧 🔨 27 | 28 | ## Feature highlights 29 | 30 | - **Configurable**: Easily manage API configurations using various formats like YAML, JSON, and HTTP API calls. 31 | - **Extensible**: Add new functionality with support for custom middleware and plugins, written in any programming language (Rust, Java, C++, etc.). 32 | - **Fully Programmable**: Tailor the API Gateway to your specific needs with custom plugins and middleware in multiple languages. 33 | - **Zero Downtime Upgrades**: Perform upgrades and restarts without affecting the availability of your services. 34 | - **Dynamic Middleware**: Add, remove, or modify middleware on the fly without disrupting service. 35 | - **Request and Response Management**: Modify requests before they reach the upstream or read/write responses to meet your application's needs. 36 | - **Real-Time Configuration**: Modify your gateway configuration in real time with no downtime, using HTTP API calls. 37 | 38 | Dakia ensures your services stay performant, reliable, and highly customizable, giving you full control. 39 | 40 | ## Limitations ☠️ 41 | 42 | > These limitations will be addressed over time as we continue to improve the dakia. 43 | 44 | - Currently supports only `UTF-8` character encoding. 45 | - Only the round-robin load balancing algorithm is available at the moment. 46 | - IPv6 addresses are not supported at this time; only IPv4 is supported. 47 | - Currently it supports only `HTTP` protocol 48 | 49 | ## Reasons to use `Dakia` 50 | 51 | - **Security** - Built with Rust, Dakia ensures enhanced memory safety and reduces vulnerabilities compared to services written in C/C++. 52 | - **Performance** - Powered by [pingora](https://github.com/cloudflare/pingora), a battle tested protocol implemention, as it has been serving more than **_40M+_** internet requests per second for more than a few years. 53 | - **Customization** - You need ultimate customization, you can configure, extend and even further program in multiple languages. 54 | - **Asynchronous and Multi-Threaded** – Dakia is designed to handle tasks concurrently, ensuring efficient and high-performance request processing by default. 55 | 56 | **_Benchmark of Dakia on a Local Machine (8 CPU Cores, 16GB RAM)_** 57 | 58 | ```txt 59 | Performance test: 10s @ http://0.0.0.0:80 60 | - 1 thread, 100 concurrent connections 61 | 62 | Thread Statistics: 63 | - Average Latency: 699.64µs (±125.23µs) 64 | - Maximum Latency: 3.77ms 65 | - Requests per Second: 76.09k (±3.98k) 66 | - Peak Requests per Second: 81.89k 67 | 68 | Total Requests: 764,590 in 10.10s 69 | Data Transferred: 80.94MB 70 | Throughput: 75,678.12 requests/sec 71 | Transfer Rate: 8.01MB/sec 72 | ``` 73 | 74 | ## Architecture 75 | 76 | ![flow](https://github.com/user-attachments/assets/581b8dd2-c313-4a38-85a1-fd1429104f6a) 77 | 78 | ## Getting started 79 | 80 | - See our [quick starting guide](/docs/quick_start.md) 81 | - See our [docs](/docs/README.md) 82 | 83 | ## 📊 Progress Tracker 84 | 85 | [Dakia Configuration Sample](https://github.com/ats1999/dakia/blob/main/docs/config.sample.yaml) 86 | 87 | | Task | Status | 88 | | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- | 89 | | Configurable(YAML + JSON) | Done ✅ | 90 | | Virtual Host | Done ✅ | 91 | | Wild card host matching ([Wiki](https://en.wikipedia.org/wiki/Matching_wildcards)) | Done ✅ | 92 | | Wild card route ([Wiki](https://en.wikipedia.org/wiki/Matching_wildcards)) | Done ✅ | 93 | | Proxy | Done ✅ | 94 | | HTTP Protocol Suport | Done ✅ | 95 | | [Upstream SSL support](https://en.wikipedia.org/wiki/Server_Name_Indication) | Done ✅ | 96 | | Load Balancer | Done ✅ | 97 | | Filter (MongoDB like query support) | Done ✅ | 98 | | Dakia CLI | Done ✅ | 99 | | [PCRE](https://www.pcre.org/) support for pattern matching | Done ✅ | 100 | | Extension, Interceptor & Interceptions Phases (Inbuilt Rust) | Done ✅ | 101 | | Declarative filter support [(Allows to use MongoDB like query syntax for filtering HTTP requests)](https://github.com/ats1999/dakia/blob/main/docs/config.sample.yaml) | Done ✅ | 102 | | [FFI](https://en.wikipedia.org/wiki/Foreign_function_interface) Support for interceptor | Pending | 103 | | [UDS Support](https://man7.org/linux/man-pages/man7/unix.7.html) | Pending | 104 | | Load Balancer Algorithms (Least connection, Least response time, IP/Url hash) | Pending | 105 | | SSL Support | Pending | 106 | | Certbot Integration | Pending | 107 | | Controller (API to manage dakia over REST) | Done ✅ | 108 | | TCP/UDP Proxy | Pending | 109 | | Web Socket Proxy | Pending | 110 | | gRPC Proxy | Pending | 111 | | Docs | In-Progress 🚀 | 112 | 113 | ### Load Balancing Algorithm 114 | 115 | | Algorithm | Status | 116 | | ------------------- | ------- | 117 | | Round robin | Done ✅ | 118 | | Least connection | Pending | 119 | | Least response time | Pending | 120 | | IP/URL hash | Pending | 121 | 122 | ### Interceptor 123 | 124 | | Interceptor | Description | Status | 125 | | ------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------- | 126 | | Server Version | Append server version into http response header. E.g `Dakia/1.0.1` | Done ✅ | 127 | | Request ID | Append request id(UUID) into upstream request and downstream response headers. | Done ✅ | 128 | | Basic Auth | [Basic Authentication](https://en.wikipedia.org/wiki/Basic_access_authentication) | Done ✅ | 129 | | Basic Auth ( External Storage Integration ) | Use external storage for storing user name and password, support hashing. | Pending | 130 | | JWT Auth | [Support JWT authentication](https://jwt.io/) | Pending | 131 | | Use File | Read data from a file and return its contents as the response. If the file is not found, respond with a 404 error. | Done ✅ | 132 | | Use File ( Path rewrite support ) | Allow to rewrite HTTP request path | Pending | 133 | | Try File | Read data from a file and return its contents as the response. If the file is not found, make request to upstream, write response to file and then serve response. | Pending | 134 | | Controller | Allow to update dakia configuration in **_YAML/JSON_** format via REST endpoint without restarting the gateway | Done ✅ | 135 | | Rate Limiter | Token bucket rate limiter algorithm | Done ✅ | 136 | | Prometheus Integration | Expose server interval metric using prometheus (New TCP connection, Reused TCP connection, TCP connection failure, etc) | Pending | 137 | -------------------------------------------------------------------------------- /dakia/src/gateway/filter/query2filter.rs: -------------------------------------------------------------------------------- 1 | use std::vec; 2 | 3 | use crate::{ 4 | error::{DakiaError, DakiaResult}, 5 | gateway::filter::operator::{ 6 | Header, HeaderCriteria, LogicalCriteriaOperator, PatternOperator, QueryCriteria, 7 | RelationalOperator, SetOperator, 8 | }, 9 | qe::query::{ 10 | self, extract_bool_or_err, extract_string_or_err, extract_vec_bytes_or_err, 11 | extract_vec_or_err, Query, Value, 12 | }, 13 | shared::pattern_matcher::Pcre2PatternMatcher, 14 | }; 15 | 16 | use super::{ 17 | operator::{ 18 | CriteriaOperator, FilterCriteria, LogicalFilterCriteria, PartCriteriaOperator, 19 | PartFilterCriteria, 20 | }, 21 | Filter, 22 | }; 23 | 24 | const LOGICAL_OPERATOR: [&str; 2] = ["$and", "$or"]; 25 | const HTTP_PARTS: [&str; 5] = ["scheme", "path", "method", "header", "query"]; 26 | 27 | pub fn query2filter(query: &Query) -> DakiaResult { 28 | let mut filter = Filter { 29 | criteria_list: vec![], 30 | }; 31 | 32 | for (part, part_filter) in query { 33 | if is_logical_filter_criteria(&part) { 34 | let part_filter_criterias = build_part_filter_criteria_list(part_filter)?; 35 | let filter_criteria = if part.eq("$and") { 36 | FilterCriteria::Logical(LogicalFilterCriteria::And(part_filter_criterias)) 37 | } else { 38 | FilterCriteria::Logical(LogicalFilterCriteria::Or(part_filter_criterias)) 39 | }; 40 | 41 | filter.criteria_list.push(filter_criteria); 42 | continue; 43 | } 44 | 45 | if is_part_filter_criteria(&part) { 46 | let part_filter_criteria = build_part_filter_criteria(part, part_filter)?; 47 | filter 48 | .criteria_list 49 | .push(FilterCriteria::PartFilterCriteria(part_filter_criteria)); 50 | continue; 51 | } 52 | 53 | return Err(DakiaError::i_explain(format!( 54 | "Invalid filter param {part}" 55 | ))); 56 | } 57 | 58 | Ok(filter) 59 | } 60 | 61 | fn build_part_filter_criteria_list(part_filter: &Value) -> DakiaResult> { 62 | match part_filter { 63 | Value::Scaler(scaler) => Err(DakiaError::i_explain(format!( 64 | "Invalid part filter, map is expected found {:?}", 65 | scaler 66 | ))), 67 | Value::Composite(composite) => match composite { 68 | query::Composite::Map(hash_map) => { 69 | let mut part_filter_criteria_list: Vec = vec![]; 70 | 71 | for (part, filter) in hash_map { 72 | let part_filter_criteria = build_part_filter_criteria(part, filter)?; 73 | part_filter_criteria_list.push(part_filter_criteria); 74 | } 75 | 76 | Ok(part_filter_criteria_list) 77 | } 78 | query::Composite::Vector(vector) => Err(DakiaError::i_explain(format!( 79 | "Invalid part filter, map is expected found {:?}", 80 | vector 81 | ))), 82 | }, 83 | } 84 | } 85 | 86 | fn build_part_filter_criteria(part: &str, part_filter: &Value) -> DakiaResult { 87 | if is_part_nested(part, "header") { 88 | let nested_part_name = get_nested_part_name(part, "header"); 89 | let header_criteria = HeaderCriteria { 90 | name: Header::from(nested_part_name.as_str()), 91 | operator: build_part_criteria_operator_list(part_filter)?, 92 | }; 93 | 94 | return Ok(PartFilterCriteria::Header(header_criteria)); 95 | } 96 | 97 | if is_part_nested(part, "query") { 98 | let nested_part_name = get_nested_part_name(part, "query"); 99 | let query_criteria = QueryCriteria { 100 | name: nested_part_name.as_bytes().to_vec(), 101 | operator: build_part_criteria_operator_list(part_filter)?, 102 | }; 103 | 104 | return Ok(PartFilterCriteria::Query(query_criteria)); 105 | } 106 | 107 | let part_criteria_operator_list = build_part_criteria_operator_list(part_filter)?; 108 | if is_part(part, "path") { 109 | return Ok(PartFilterCriteria::Path(part_criteria_operator_list)); 110 | } 111 | 112 | if is_part(part, "method") { 113 | return Ok(PartFilterCriteria::Method(part_criteria_operator_list)); 114 | } 115 | 116 | if is_part(part, "scheme") { 117 | return Ok(PartFilterCriteria::Scheme(part_criteria_operator_list)); 118 | } 119 | 120 | Err(DakiaError::i_explain(format!( 121 | "Invalid part filter {}", 122 | part 123 | ))) 124 | } 125 | 126 | fn build_sacler_part_criteria_operator(scaler: &query::Scaler) -> PartCriteriaOperator { 127 | let value = match scaler { 128 | query::Scaler::String(strval) => strval.to_string(), 129 | query::Scaler::I64(intval) => intval.to_string(), 130 | query::Scaler::Bool(boolval) => boolval.to_string(), 131 | }; 132 | 133 | PartCriteriaOperator::CriteriaOperator(CriteriaOperator::Relation(RelationalOperator::Eq( 134 | value.as_bytes().to_vec(), 135 | ))) 136 | } 137 | 138 | fn build_set_values(val: &Value) -> DakiaResult>> { 139 | let vector = extract_vec_or_err(val)?; 140 | let mut bytes_vector: Vec> = vec![]; 141 | for val in vector { 142 | let bytes = extract_vec_bytes_or_err(val)?; 143 | bytes_vector.push(bytes); 144 | } 145 | Ok(bytes_vector) 146 | } 147 | 148 | fn build_criteria_operator(key: &str, value: &Value) -> DakiaResult { 149 | let criteria_operator = match key.to_lowercase().as_str() { 150 | // relational operator 151 | "$eq" => { 152 | let bytes = extract_vec_bytes_or_err(value)?; 153 | CriteriaOperator::Relation(RelationalOperator::Eq(bytes)) 154 | } 155 | "$ne" => { 156 | let bytes = extract_vec_bytes_or_err(value)?; 157 | CriteriaOperator::Relation(RelationalOperator::Ne(bytes)) 158 | } 159 | 160 | // set operator 161 | "$in" => { 162 | let set = build_set_values(value)?; 163 | CriteriaOperator::Set(SetOperator::In(set)) 164 | } 165 | "$nin" => { 166 | let set = build_set_values(value)?; 167 | CriteriaOperator::Set(SetOperator::Nin(set)) 168 | } 169 | 170 | // pattern operator 171 | "$contains" => { 172 | let bytes = extract_vec_bytes_or_err(value)?; 173 | CriteriaOperator::Pattern(PatternOperator::Contains(bytes)) 174 | } 175 | "$not_contains" => { 176 | let bytes = extract_vec_bytes_or_err(value)?; 177 | CriteriaOperator::Pattern(PatternOperator::NotContains(bytes)) 178 | } 179 | "$starts_with" => { 180 | let bytes = extract_vec_bytes_or_err(value)?; 181 | CriteriaOperator::Pattern(PatternOperator::StartsWith(bytes)) 182 | } 183 | "$not_starts_with" => { 184 | let bytes = extract_vec_bytes_or_err(value)?; 185 | CriteriaOperator::Pattern(PatternOperator::NotStartWith(bytes)) 186 | } 187 | "$ends_with" => { 188 | let bytes = extract_vec_bytes_or_err(value)?; 189 | CriteriaOperator::Pattern(PatternOperator::EndsWith(bytes)) 190 | } 191 | "$not_ends_with" => { 192 | let bytes = extract_vec_bytes_or_err(value)?; 193 | CriteriaOperator::Pattern(PatternOperator::NotEndsWith(bytes)) 194 | } 195 | "$matches" => { 196 | let pattern = extract_string_or_err(value)?; 197 | let pattern_matcher = Pcre2PatternMatcher::build(&pattern)?; 198 | CriteriaOperator::Pattern(PatternOperator::Matches(pattern_matcher)) 199 | } 200 | 201 | // existance operator 202 | "$exists" => { 203 | let exists = extract_bool_or_err(value)?; 204 | CriteriaOperator::Exists(exists) 205 | } 206 | 207 | _ => return Err(DakiaError::i_explain(format!("Invalid operator {key}"))), 208 | }; 209 | 210 | Ok(criteria_operator) 211 | } 212 | 213 | fn build_criteria_operators(val: &Value) -> DakiaResult> { 214 | match val { 215 | Value::Scaler(scaler) => Err(DakiaError::i_explain(format!( 216 | "Invalid operator, expected a map and found {:?}", 217 | scaler 218 | ))), 219 | Value::Composite(composite) => match composite { 220 | query::Composite::Map(hash_map) => { 221 | let mut criteria_operators: Vec = vec![]; 222 | 223 | for (k, v) in hash_map { 224 | let criteria_operator = build_criteria_operator(k, v)?; 225 | criteria_operators.push(criteria_operator); 226 | } 227 | 228 | Ok(criteria_operators) 229 | } 230 | query::Composite::Vector(vector) => Err(DakiaError::i_explain(format!( 231 | "Invalid operator, expected a map and found {:?}", 232 | vector 233 | ))), 234 | }, 235 | } 236 | } 237 | 238 | fn build_part_criteria_operator_list(val: &Value) -> DakiaResult> { 239 | match val { 240 | Value::Scaler(scaler) => Ok(vec![build_sacler_part_criteria_operator(scaler)]), 241 | Value::Composite(composite) => match composite { 242 | query::Composite::Map(hash_map) => { 243 | let mut part_criteria_operators: Vec = vec![]; 244 | for (key, operator) in hash_map { 245 | if key == "$and" { 246 | let operators = build_criteria_operators(operator)?; 247 | let and_operator = PartCriteriaOperator::LogicalCriteriaOperator( 248 | LogicalCriteriaOperator::And(operators), 249 | ); 250 | part_criteria_operators.push(and_operator); 251 | } else if key == "$or" { 252 | let operators = build_criteria_operators(operator)?; 253 | let and_operator = PartCriteriaOperator::LogicalCriteriaOperator( 254 | LogicalCriteriaOperator::Or(operators), 255 | ); 256 | part_criteria_operators.push(and_operator); 257 | } else { 258 | let criteria_operator = build_criteria_operator(key, operator)?; 259 | let part_criteria_operator = 260 | PartCriteriaOperator::CriteriaOperator(criteria_operator); 261 | part_criteria_operators.push(part_criteria_operator); 262 | } 263 | } 264 | return Ok(part_criteria_operators); 265 | } 266 | query::Composite::Vector(vector) => { 267 | return Err(DakiaError::i_explain(format!( 268 | "Invalid operator {:?}", 269 | vector 270 | ))) 271 | } 272 | }, 273 | } 274 | } 275 | fn is_logical_filter_criteria(key: &str) -> bool { 276 | LOGICAL_OPERATOR.contains(&key) 277 | } 278 | 279 | fn is_part_filter_criteria(key: &str) -> bool { 280 | key.starts_with("ds.") 281 | || key.starts_with("req.") 282 | || key.starts_with("header.") 283 | || HTTP_PARTS.contains(&key) 284 | } 285 | 286 | fn is_part_nested(part_path: &str, http_part: &str) -> bool { 287 | part_path.starts_with(format!("ds.req.{http_part}.").as_str()) 288 | || part_path.starts_with(format!("req.{http_part}.").as_str()) 289 | || part_path.starts_with(format!("{http_part}.").as_str()) 290 | } 291 | 292 | fn get_nested_part_name(part_path: &str, http_part: &str) -> String { 293 | if part_path.starts_with(format!("ds.req.{http_part}.").as_str()) { 294 | part_path.replace(format!("ds.req.{http_part}.").as_str(), "") 295 | } else if part_path.starts_with(format!("req.{http_part}.").as_str()) { 296 | part_path.replace(format!("req.{http_part}.").as_str(), "") 297 | } else if part_path.starts_with(format!("{http_part}.").as_str()) { 298 | part_path.replace(format!("{http_part}.").as_str(), "") 299 | } else { 300 | // it will never occur 301 | "".to_string() 302 | } 303 | } 304 | 305 | fn is_part(part_path: &str, http_part: &str) -> bool { 306 | part_path.starts_with(format!("ds.req.{http_part}").as_str()) 307 | || part_path.starts_with(format!("req.{http_part}").as_str()) 308 | || part_path.starts_with(format!("{http_part}").as_str()) 309 | } 310 | 311 | #[cfg(test)] 312 | mod tests { 313 | 314 | use super::*; 315 | 316 | #[test] 317 | fn test_filter() { 318 | let yaml = r#" 319 | $or: 320 | ds.req.method: GET 321 | path: 322 | $or: 323 | $eq: /hello 324 | $matches: bolo 325 | $starts_with: fuck 326 | $and: 327 | $ends_with: fuck 328 | ds.req.method: GET 329 | $and: 330 | scheme: 331 | $or: 332 | $ne: https 333 | $in: 334 | - http 335 | - https 336 | $and: 337 | header.content-type: 338 | $contains: application/json 339 | scheme: 340 | $matches: https 341 | "#; 342 | 343 | let query: Query = serde_yaml::from_str(yaml).unwrap(); 344 | let filter = query2filter(&query).is_ok(); 345 | assert!(filter); 346 | } 347 | } 348 | -------------------------------------------------------------------------------- /dakia/src/error/mod.rs: -------------------------------------------------------------------------------- 1 | #![warn(clippy::all)] 2 | //! The library to provide the struct to represent errors in pingora. 3 | 4 | pub use std::error::Error as ErrorTrait; 5 | use std::fmt; 6 | use std::fmt::Debug; 7 | use std::result::Result as StdResult; 8 | 9 | mod immut_str; 10 | mod result; 11 | pub use immut_str::ImmutStr; 12 | pub use result::*; 13 | 14 | /// The boxed [Error], the desired way to pass [Error] 15 | pub type BError = Box; 16 | pub type BErrorStd = Box; 17 | 18 | /// Syntax sugar for `std::Result` 19 | pub type Result = StdResult; 20 | 21 | /// The struct that represents an error 22 | #[derive(Debug)] 23 | pub struct DakiaError { 24 | /// the type of error 25 | pub etype: ErrorType, 26 | /// the source of error: from upstream, downstream or internal 27 | pub source: ErrorSource, 28 | /// chain to the cause of this error 29 | pub cause: Option>, 30 | /// an arbitrary string that explains the context when the error happens 31 | pub context: Option, 32 | } 33 | 34 | /// The source of the error 35 | #[derive(Debug, PartialEq, Eq, Clone)] 36 | pub enum ErrorSource { 37 | /// The error is caused by the remote server 38 | Upstream, 39 | /// The error is caused by the remote client 40 | Downstream, 41 | /// The error is caused by the internal logic 42 | Internal, 43 | /// Error source unknown or to be set 44 | Unknown, 45 | } 46 | 47 | impl ErrorSource { 48 | /// for displaying the error source 49 | pub fn as_str(&self) -> &str { 50 | match self { 51 | Self::Upstream => "Upstream", 52 | Self::Downstream => "Downstream", 53 | Self::Internal => "Internal", 54 | Self::Unknown => "Unknown", 55 | } 56 | } 57 | } 58 | 59 | /// Predefined type of errors 60 | #[derive(Debug, PartialEq, Eq, Clone)] 61 | pub enum ErrorType { 62 | // error occurred during proxy 63 | ProxyError(u16), 64 | // other errors 65 | InternalError, 66 | // catch all 67 | UnknownError, 68 | } 69 | 70 | impl ErrorType { 71 | pub fn as_str(&self) -> &str { 72 | match self { 73 | ErrorType::ProxyError(_) => "HTTPStatus", 74 | ErrorType::InternalError => "InternalError", 75 | ErrorType::UnknownError => "UnknownError", 76 | } 77 | } 78 | } 79 | 80 | impl DakiaError { 81 | /// Simply create the error. See other functions that provide less verbose interfaces. 82 | #[inline] 83 | pub fn create( 84 | etype: ErrorType, 85 | esource: ErrorSource, 86 | context: Option, 87 | cause: Option>, 88 | ) -> BError { 89 | let de = DakiaError { 90 | etype: etype, 91 | source: esource, 92 | cause, 93 | context, 94 | }; 95 | 96 | Box::new(Error::DakiaError(de)) 97 | } 98 | 99 | /// Simply create the error. See other functions that provide less verbose interfaces. 100 | #[inline] 101 | pub fn create_internal() -> BError { 102 | let de = DakiaError { 103 | etype: ErrorType::InternalError, 104 | source: ErrorSource::Internal, 105 | cause: None, 106 | context: None, 107 | }; 108 | 109 | Box::new(Error::DakiaError(de)) 110 | } 111 | 112 | /// Simply create the error. See other functions that provide less verbose interfaces. 113 | #[inline] 114 | pub fn create_internal_context(context: &'static str) -> BError { 115 | let de = DakiaError { 116 | etype: ErrorType::InternalError, 117 | source: ErrorSource::Internal, 118 | cause: None, 119 | context: Some(ImmutStr::Static(context)), 120 | }; 121 | 122 | Box::new(Error::DakiaError(de)) 123 | } 124 | 125 | /// Simply create the error. See other functions that provide less verbose interfaces. 126 | #[inline] 127 | pub fn create_unknown_context(context: ImmutStr) -> BError { 128 | let de = DakiaError { 129 | etype: ErrorType::UnknownError, 130 | source: ErrorSource::Unknown, 131 | cause: None, 132 | context: Some(context), 133 | }; 134 | 135 | Box::new(Error::DakiaError(de)) 136 | } 137 | 138 | /// Simply create the error. See other functions that provide less verbose interfaces. 139 | #[inline] 140 | pub fn create_unknown_msg(msg: &str) -> BError { 141 | let context = ImmutStr::Owned(msg.to_string().into_boxed_str()); 142 | 143 | let de = DakiaError { 144 | etype: ErrorType::UnknownError, 145 | source: ErrorSource::Unknown, 146 | cause: None, 147 | context: Some(context), 148 | }; 149 | 150 | Box::new(Error::DakiaError(de)) 151 | } 152 | 153 | #[inline] 154 | fn do_new(e: ErrorType, s: ErrorSource) -> BError { 155 | Self::create(e, s, None, None) 156 | } 157 | 158 | /// Create an error with the given type 159 | #[inline] 160 | pub fn new(e: ErrorType) -> BError { 161 | Self::do_new(e, ErrorSource::Unknown) 162 | } 163 | 164 | #[inline] 165 | pub fn because, E: Into>>( 166 | e: ErrorType, 167 | context: S, 168 | cause: E, 169 | ) -> BError { 170 | Self::create( 171 | e, 172 | ErrorSource::Unknown, 173 | Some(context.into()), 174 | Some(cause.into()), 175 | ) 176 | } 177 | 178 | // Short for Err(Self::because) 179 | #[inline] 180 | pub fn e_because, E: Into>>( 181 | e: ErrorType, 182 | context: S, 183 | cause: E, 184 | ) -> Result { 185 | Err(Self::because(e, context, cause)) 186 | } 187 | 188 | // Create an error with context but no direct causing error 189 | #[inline] 190 | pub fn explain>(e: ErrorType, context: S) -> BError { 191 | Self::create(e, ErrorSource::Unknown, Some(context.into()), None) 192 | } 193 | 194 | // Create an internal error with context but no direct causing error 195 | #[inline] 196 | pub fn i_explain>(context: S) -> BError { 197 | Self::create( 198 | ErrorType::InternalError, 199 | ErrorSource::Unknown, 200 | Some(context.into()), 201 | None, 202 | ) 203 | } 204 | 205 | // Short for Err(Self::explain) 206 | #[inline] 207 | pub fn e_explain>(e: ErrorType, context: S) -> Result { 208 | Err(Self::explain(e, context)) 209 | } 210 | 211 | // The new_{up, down, in} functions are to create new errors with source 212 | // {upstream, downstream, internal} 213 | #[inline] 214 | pub fn new_up(e: ErrorType) -> BError { 215 | Self::do_new(e, ErrorSource::Upstream) 216 | } 217 | 218 | #[inline] 219 | pub fn new_down(e: ErrorType) -> BError { 220 | Self::do_new(e, ErrorSource::Downstream) 221 | } 222 | 223 | #[inline] 224 | pub fn new_in(e: ErrorType) -> BError { 225 | Self::do_new(e, ErrorSource::Internal) 226 | } 227 | 228 | // the err_* functions are the same as new_* but return a Result 229 | #[inline] 230 | pub fn err(e: ErrorType) -> Result { 231 | Err(Self::new(e)) 232 | } 233 | 234 | #[inline] 235 | pub fn err_up(e: ErrorType) -> Result { 236 | Err(Self::new_up(e)) 237 | } 238 | 239 | #[inline] 240 | pub fn err_down(e: ErrorType) -> Result { 241 | Err(Self::new_down(e)) 242 | } 243 | 244 | #[inline] 245 | pub fn err_in(e: ErrorType) -> Result { 246 | Err(Self::new_in(e)) 247 | } 248 | 249 | pub fn etype(&self) -> &ErrorType { 250 | &self.etype 251 | } 252 | 253 | pub fn esource(&self) -> &ErrorSource { 254 | &self.source 255 | } 256 | 257 | pub fn reason_str(&self) -> &str { 258 | self.etype.as_str() 259 | } 260 | 261 | pub fn source_str(&self) -> &str { 262 | self.source.as_str() 263 | } 264 | 265 | /// The as_{up, down, in} functions are to change the current errors with source 266 | /// {upstream, downstream, internal} 267 | pub fn as_up(&mut self) { 268 | self.source = ErrorSource::Upstream; 269 | } 270 | 271 | pub fn as_down(&mut self) { 272 | self.source = ErrorSource::Downstream; 273 | } 274 | 275 | pub fn as_in(&mut self) { 276 | self.source = ErrorSource::Internal; 277 | } 278 | 279 | pub fn set_cause>>(&mut self, cause: C) { 280 | self.cause = Some(cause.into()); 281 | } 282 | 283 | pub fn set_context>(&mut self, context: T) { 284 | self.context = Some(context.into()); 285 | } 286 | 287 | // Display error but skip the duplicate elements from the error in previous hop 288 | fn chain_display( 289 | &self, 290 | previous: Option<&DakiaError>, 291 | f: &mut fmt::Formatter<'_>, 292 | ) -> fmt::Result { 293 | if previous.map(|p| p.source != self.source).unwrap_or(true) { 294 | write!(f, "{}", self.source.as_str())? 295 | } 296 | if previous.map(|p| p.etype != self.etype).unwrap_or(true) { 297 | write!(f, " {}", self.etype.as_str())? 298 | } 299 | 300 | if let Some(c) = self.context.as_ref() { 301 | write!(f, " context: {}", c)?; 302 | } 303 | if let Some(c) = self.cause.as_ref() { 304 | if let Some(e) = c.downcast_ref::>() { 305 | write!(f, " cause: ")?; 306 | e.chain_display(Some(self), f) 307 | } else { 308 | write!(f, " cause: {}", c) 309 | } 310 | } else { 311 | Ok(()) 312 | } 313 | } 314 | 315 | pub fn to_pingora_error(self) -> Box { 316 | let petype = match self.etype { 317 | ErrorType::InternalError => pingora::ErrorType::InternalError, 318 | ErrorType::ProxyError(status_code) => pingora::ErrorType::HTTPStatus(status_code), 319 | ErrorType::UnknownError => pingora::ErrorType::UnknownError, 320 | }; 321 | 322 | let pesource = match self.source { 323 | ErrorSource::Downstream => pingora::ErrorSource::Downstream, 324 | ErrorSource::Internal => pingora::ErrorSource::Internal, 325 | ErrorSource::Unknown => pingora::ErrorSource::Unset, 326 | ErrorSource::Upstream => pingora::ErrorSource::Upstream, 327 | }; 328 | 329 | let pe = pingora::Error::create( 330 | petype, pesource, None, 331 | // TODO: handle conversion of context 332 | // Some(pingora::ImmutStr::Owned( 333 | // self.context.unwrap().clone().to_string().into_boxed_str(), 334 | // )), 335 | self.cause, 336 | ); 337 | pe 338 | } 339 | } 340 | 341 | impl fmt::Display for DakiaError { 342 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 343 | self.chain_display(None, f) 344 | } 345 | } 346 | 347 | impl ErrorTrait for DakiaError {} 348 | 349 | // Helper trait to add more context to a given error 350 | pub trait Context { 351 | // Wrap the `Err(E)` in [Result] with more context, the existing E will be the cause. 352 | // This is a shortcut for map_err() + more_context() 353 | fn err_context, F: FnOnce() -> C>(self, context: F) -> Result; 354 | } 355 | 356 | // Helper trait to chain errors with context 357 | pub trait OrErr { 358 | // Wrap the E in [Result] with new [ErrorType] and context, the existing E will be the cause. 359 | // This is a shortcut for map_err() + because() 360 | fn or_err(self, et: ErrorType, context: &'static str) -> Result 361 | where 362 | E: Into>; 363 | 364 | // Similar to or_err(), but takes a closure, which is useful for constructing String. 365 | fn or_err_with, F: FnOnce() -> C>( 366 | self, 367 | et: ErrorType, 368 | context: F, 369 | ) -> Result 370 | where 371 | E: Into>; 372 | 373 | // Replace the E in [Result] with a new [Error] generated from the current error 374 | // This is useful when the current error cannot move out of scope. This is a shortcut for map_err() + explain(). 375 | fn explain_err, F: FnOnce(E) -> C>( 376 | self, 377 | et: ErrorType, 378 | context: F, 379 | ) -> Result; 380 | 381 | // Similar to or_err() but just to surface errors that are not [Error] (where `?` cannot be used directly). 382 | // or_err()/or_err_with() are still preferred because they make the error more readable and traceable. 383 | fn or_fail(self) -> Result 384 | where 385 | E: Into>; 386 | } 387 | 388 | impl OrErr for Result { 389 | fn or_err(self, et: ErrorType, context: &'static str) -> Result 390 | where 391 | E: Into>, 392 | { 393 | self.map_err(|e| DakiaError::because(et, context, e)) 394 | } 395 | 396 | fn or_err_with, F: FnOnce() -> C>( 397 | self, 398 | et: ErrorType, 399 | context: F, 400 | ) -> Result 401 | where 402 | E: Into>, 403 | { 404 | self.map_err(|e| DakiaError::because(et, context(), e)) 405 | } 406 | 407 | fn explain_err, F: FnOnce(E) -> C>( 408 | self, 409 | et: ErrorType, 410 | exp: F, 411 | ) -> Result { 412 | self.map_err(|e| DakiaError::explain(et, exp(e))) 413 | } 414 | 415 | fn or_fail(self) -> Result 416 | where 417 | E: Into>, 418 | { 419 | self.map_err(|e| DakiaError::because(ErrorType::InternalError, "", e)) 420 | } 421 | } 422 | 423 | // Helper trait to convert an [Option] to an [Error] with context. 424 | pub trait OkOrErr { 425 | fn or_err(self, et: ErrorType, context: &'static str) -> Result; 426 | 427 | fn or_err_with, F: FnOnce() -> C>( 428 | self, 429 | et: ErrorType, 430 | context: F, 431 | ) -> Result; 432 | } 433 | 434 | impl OkOrErr for Option { 435 | // Convert the [Option] to a new [Error] with [ErrorType] and context if None, Ok otherwise. 436 | // This is a shortcut for .ok_or(Error::explain()) 437 | fn or_err(self, et: ErrorType, context: &'static str) -> Result { 438 | self.ok_or(DakiaError::explain(et, context)) 439 | } 440 | 441 | // Similar to to_err(), but takes a closure, which is useful for constructing String. 442 | fn or_err_with, F: FnOnce() -> C>( 443 | self, 444 | et: ErrorType, 445 | context: F, 446 | ) -> Result { 447 | self.ok_or_else(|| DakiaError::explain(et, context())) 448 | } 449 | } 450 | --------------------------------------------------------------------------------