[trifid-api] dnclient endpoint, client updates and trifid-api MVP

This commit is contained in:
c0repwn3r 2023-05-15 13:39:15 -04:00
parent b8c6ddd123
commit 3a2319d0c0
Signed by: core
GPG Key ID: FDBF740DADDCEECF
16 changed files with 1024 additions and 143 deletions

126
Cargo.lock generated
View File

@ -1034,6 +1034,41 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "darling"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0558d22a7b463ed0241e993f76f09f30b126687447751a8638587b864e4b3944"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab8bfa2e259f8ee1ce5e97824a3c55ec4404a0d772ca7fa96bf19f0752a046eb"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn 2.0.16",
]
[[package]]
name = "darling_macro"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a"
dependencies = [
"darling_core",
"quote",
"syn 2.0.16",
]
[[package]] [[package]]
name = "der" name = "der"
version = "0.7.5" version = "0.7.5"
@ -1044,6 +1079,17 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]] [[package]]
name = "derive_more" name = "derive_more"
version = "0.99.17" version = "0.99.17"
@ -1111,26 +1157,7 @@ dependencies = [
[[package]] [[package]]
name = "dnapi-rs" name = "dnapi-rs"
version = "0.1.9" version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "089055338c55024ba337072022e8d38b37dc018ea5c26e3a5522e61a5fd932f8"
dependencies = [
"base64 0.21.0",
"base64-serde",
"chrono",
"log",
"openssl-sys",
"rand",
"reqwest",
"serde",
"serde_json",
"trifid-pki 0.1.10",
"url",
]
[[package]]
name = "dnapi-rs"
version = "0.1.11"
dependencies = [ dependencies = [
"base64 0.21.0", "base64 0.21.0",
"base64-serde", "base64-serde",
@ -1141,6 +1168,7 @@ dependencies = [
"reqwest", "reqwest",
"serde", "serde",
"serde_json", "serde_json",
"serde_with",
"trifid-pki 0.1.11", "trifid-pki 0.1.11",
"url", "url",
] ]
@ -1488,11 +1516,11 @@ dependencies = [
[[package]] [[package]]
name = "hashlink" name = "hashlink"
version = "0.8.1" version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa" checksum = "0761a1b9491c4f2e3d66aa0f62d0fba0af9a0e2852e4d48ea506632a4b56e6aa"
dependencies = [ dependencies = [
"hashbrown 0.12.3", "hashbrown 0.13.2",
] ]
[[package]] [[package]]
@ -1655,6 +1683,12 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]] [[package]]
name = "idna" name = "idna"
version = "0.3.0" version = "0.3.0"
@ -1673,6 +1707,7 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"hashbrown 0.12.3", "hashbrown 0.12.3",
"serde",
] ]
[[package]] [[package]]
@ -2270,9 +2305,9 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.56" version = "1.0.57"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" checksum = "c4ec6d5fe0b140acb27c9a0444118cf55bfbb4e0b259739429abb4521dd67c16"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@ -2850,6 +2885,34 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_with"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f02d8aa6e3c385bf084924f660ce2a3a6bd333ba55b35e8590b321f35d88513"
dependencies = [
"base64 0.21.0",
"chrono",
"hex",
"indexmap",
"serde",
"serde_json",
"serde_with_macros",
"time 0.3.21",
]
[[package]]
name = "serde_with_macros"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edc7d5d3932fb12ce722ee5e64dd38c504efba37567f0c402f6ca728c3b8b070"
dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]] [[package]]
name = "serde_yaml" name = "serde_yaml"
version = "0.9.21" version = "0.9.21"
@ -3174,7 +3237,7 @@ dependencies = [
"clap 4.2.7", "clap 4.2.7",
"ctrlc", "ctrlc",
"dirs 5.0.1", "dirs 5.0.1",
"dnapi-rs 0.1.11", "dnapi-rs",
"flate2", "flate2",
"hex", "hex",
"ipnet", "ipnet",
@ -3387,9 +3450,9 @@ dependencies = [
[[package]] [[package]]
name = "totp-rs" name = "totp-rs"
version = "5.0.1" version = "5.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "332e333b188e843cb4cc477b2911160a533bcfc6e9e488d7bef25011f9e2ba1b" checksum = "0ad5e73765ff14ae797c1a61ee0c7beaf21b4e4a0047844300e332c6c24df1fc"
dependencies = [ dependencies = [
"base32", "base32",
"constant_time_eq", "constant_time_eq",
@ -3464,7 +3527,8 @@ dependencies = [
"aes-gcm", "aes-gcm",
"base64 0.21.0", "base64 0.21.0",
"chrono", "chrono",
"dnapi-rs 0.1.9", "derivative",
"dnapi-rs",
"ed25519-dalek", "ed25519-dalek",
"hex", "hex",
"ipnet", "ipnet",
@ -3625,9 +3689,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.3.2" version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2" checksum = "345444e32442451b267fc254ae85a209c64be56d2890e601a0c37ff0c3c5ecd2"
dependencies = [ dependencies = [
"getrandom", "getrandom",
"serde", "serde",

View File

@ -1,6 +1,6 @@
[package] [package]
name = "dnapi-rs" name = "dnapi-rs"
version = "0.1.11" version = "0.1.13"
edition = "2021" edition = "2021"
description = "A rust client for the Defined Networking API" description = "A rust client for the Defined Networking API"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
@ -13,6 +13,7 @@ repository = "https://git.e3t.cc/~core/trifid"
[dependencies] [dependencies]
serde = { version = "1.0.159", features = ["derive"] } serde = { version = "1.0.159", features = ["derive"] }
serde_with = "3.0.0"
base64-serde = "0.7.0" base64-serde = "0.7.0"
log = "0.4.17" log = "0.4.17"
reqwest = { version = "0.11.16", features = ["blocking", "json"] } reqwest = { version = "0.11.16", features = ["blocking", "json"] }

View File

@ -198,6 +198,8 @@ impl Client {
return Err("Failed to verify signed API result".into()); return Err("Failed to verify signed API result".into());
} }
debug!("deserializing result");
let result: DoUpdateResponse = serde_json::from_slice(&result_wrapper.data.message)?; let result: DoUpdateResponse = serde_json::from_slice(&result_wrapper.data.message)?;
if result.nonce != update_keys.nonce { if result.nonce != update_keys.nonce {
@ -262,6 +264,7 @@ impl Client {
.http_client .http_client
.post(self.server_url.join(ENDPOINT_V1)?) .post(self.server_url.join(ENDPOINT_V1)?)
.body(post_body) .body(post_body)
.header("Content-Type", "application/json")
.send() .send()
.await?; .await?;

View File

@ -271,6 +271,7 @@ impl Client {
.http_client .http_client
.post(self.server_url.join(ENDPOINT_V1)?) .post(self.server_url.join(ENDPOINT_V1)?)
.body(post_body) .body(post_body)
.header("Content-Type", "application/json")
.send()?; .send()?;
match resp.status() { match resp.status() {

View File

@ -2,6 +2,7 @@
use base64_serde::base64_serde_type; use base64_serde::base64_serde_type;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_with::serde_as;
/// The version 1 `DNClient` API endpoint /// The version 1 `DNClient` API endpoint
pub const ENDPOINT_V1: &str = "/v1/dnclient"; pub const ENDPOINT_V1: &str = "/v1/dnclient";
@ -37,8 +38,8 @@ pub struct RequestWrapper {
#[serde(rename = "type")] #[serde(rename = "type")]
/// The type of the message. Used to determine how `value` is encoded /// The type of the message. Used to determine how `value` is encoded
pub message_type: String, pub message_type: String,
#[serde(with = "Base64Standard")]
/// A base64-encoded arbitrary message, the type of which is stated in `message_type` /// A base64-encoded arbitrary message, the type of which is stated in `message_type`
#[serde(with = "b64_as")]
pub value: Vec<u8>, pub value: Vec<u8>,
/// The timestamp of when this message was sent. Follows the format `%Y-%m-%dT%H:%M:%S.%f%:z`, or: /// The timestamp of when this message was sent. Follows the format `%Y-%m-%dT%H:%M:%S.%f%:z`, or:
/// <4-digit year>-<two-digit-month>-<two-digit-day>T<two-digit-hour, 24-hour>:<two-digit-minute>:<two-digit-second>.<nanoseconds, zero-padded><offset with semicolon> /// <4-digit year>-<two-digit-month>-<two-digit-day>T<two-digit-hour, 24-hour>:<two-digit-minute>:<two-digit-second>.<nanoseconds, zero-padded><offset with semicolon>
@ -198,3 +199,26 @@ pub struct APIError {
/// A type alias to a array of `APIErrors`. Just for parity with dnapi. /// A type alias to a array of `APIErrors`. Just for parity with dnapi.
pub type APIErrors = Vec<APIError>; pub type APIErrors = Vec<APIError>;
mod b64_as {
use serde::{Serialize, Deserialize};
use serde::{Deserializer, Serializer};
use base64::Engine;
pub fn serialize<S: Serializer>(v: &Vec<u8>, s: S) -> Result<S::Ok, S::Error> {
let base64 = base64::engine::general_purpose::STANDARD.encode(v);
<String>::serialize(&base64, s)
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
let base64 = <Option<String>>::deserialize(d)?;
match base64 {
Some(v) => {
base64::engine::general_purpose::STANDARD.decode(v.as_bytes())
.map(|v| v)
.map_err(|e| serde::de::Error::custom(e))
},
None => Ok(vec![]),
}
}
}

View File

@ -96,7 +96,7 @@ pub fn apiworker_main(
let (config, dh_privkey, creds) = match client.do_update(&creds) { let (config, dh_privkey, creds) = match client.do_update(&creds) {
Ok(d) => d, Ok(d) => d,
Err(e) => { Err(e) => {
error!("error requesting updating config: {}", e); error!("error requesting updated config: {}", e);
match save_cdata(&instance, cdata) { match save_cdata(&instance, cdata) {
Ok(_) => (), Ok(_) => (),
Err(e) => { Err(e) => {

View File

@ -28,10 +28,11 @@ hex = "0.4" # Misc.
totp-rs = { version = "5.0.1", features = ["gen_secret", "otpauth"] } # Misc. totp-rs = { version = "5.0.1", features = ["gen_secret", "otpauth"] } # Misc.
base64 = "0.21.0" # Misc. base64 = "0.21.0" # Misc.
chrono = "0.4.24" # Misc. chrono = "0.4.24" # Misc.
derivative = "2.2.0" # Misc.
trifid-pki = { version = "0.1.9", features = ["serde_derive"] } # Cryptography trifid-pki = { version = "0.1.9", features = ["serde_derive"] } # Cryptography
aes-gcm = "0.10.1" # Cryptography aes-gcm = "0.10.1" # Cryptography
ed25519-dalek = "2.0.0-rc.2" # Cryptography ed25519-dalek = "2.0.0-rc.2" # Cryptography
dnapi-rs = "0.1.9" # API message types dnapi-rs = { version = "0.1", path = "../dnapi-rs" } # API message types
ipnet = "2.7.2" # API message types ipnet = "2.7.2" # API message types

View File

@ -18,6 +18,7 @@ use trifid_pki::cert::{
deserialize_ed25519_private, deserialize_nebula_certificate_from_pem, NebulaCertificate, deserialize_ed25519_private, deserialize_nebula_certificate_from_pem, NebulaCertificate,
NebulaCertificateDetails, NebulaCertificateDetails,
}; };
use crate::keystore::keystore_init;
pub struct CodegenRequiredInfo { pub struct CodegenRequiredInfo {
pub host: host::Model, pub host: host::Model,
@ -83,10 +84,12 @@ pub async fn generate_config(
cas += &String::from_utf8(hex::decode(&ca.cert)?)?; cas += &String::from_utf8(hex::decode(&ca.cert)?)?;
} }
let ks = keystore_init()?;
// blocked hosts // blocked hosts
let mut blocked_hosts_fingerprints = vec![]; let mut blocked_hosts_fingerprints = vec![];
for host in &info.blocked_hosts { for host in &info.blocked_hosts {
if let Some(host) = data.keystore.hosts.iter().find(|u| &u.id == host) { if let Some(host) = ks.hosts.iter().find(|u| &u.id == host) {
for cert in &host.certs { for cert in &host.certs {
blocked_hosts_fingerprints.push(cert.cert.sha256sum()?); blocked_hosts_fingerprints.push(cert.cert.sha256sum()?);
} }

View File

@ -19,9 +19,13 @@ use log::error;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error;
use std::fs; use std::fs;
use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4};
use std::path::PathBuf; use std::path::PathBuf;
use std::time::SystemTime;
use derivative::Derivative;
use trifid_pki::cert::deserialize_nebula_certificate_from_pem;
pub static CONFIG: Lazy<TrifidConfig> = Lazy::new(|| { pub static CONFIG: Lazy<TrifidConfig> = Lazy::new(|| {
let config_str = match fs::read_to_string("/etc/trifid/config.toml") { let config_str = match fs::read_to_string("/etc/trifid/config.toml") {
@ -130,7 +134,7 @@ fn certs_expiry_time() -> u64 {
3600 * 24 * 31 * 12 // 1 year 3600 * 24 * 31 * 12 // 1 year
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfig { pub struct NebulaConfig {
pub pki: NebulaConfigPki, pub pki: NebulaConfigPki,
#[serde(default = "empty_hashmap")] #[serde(default = "empty_hashmap")]
@ -188,7 +192,52 @@ pub struct NebulaConfigPki {
pub disconnect_invalid: bool, pub disconnect_invalid: bool,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] impl PartialEq for NebulaConfigPki {
fn eq(&self, other: &Self) -> bool {
if self.ca != other.ca { return false; }
if self.key != other.key { return false; }
if self.blocklist != other.blocklist { return false; }
if self.disconnect_invalid != other.disconnect_invalid { return false; }
// cert logic
// if the cert is invalid, fallback to just checking equality
match is_cert_equal_ignoring_expiry(&self.cert, &other.cert) {
Ok(res) => {
res
},
Err(_) => {
self.cert == other.cert
}
}
}
}
fn is_cert_equal_ignoring_expiry(me: &str, other: &str) -> Result<bool, Box<dyn Error>> {
// determines if the certificates are equal, ignoring not_before, not_after and the signature
// exception: if either certificate is expired, not_before and not_after will be checked anyway
// parse cert A
let cert_a = deserialize_nebula_certificate_from_pem(me.as_bytes())?;
let cert_b = deserialize_nebula_certificate_from_pem(other.as_bytes())?;
if cert_a.details.is_ca != cert_b.details.is_ca { return Ok(false); }
if cert_a.details.name != cert_b.details.name { return Ok(false); }
if cert_a.details.public_key != cert_b.details.public_key { return Ok(false); }
if cert_a.details.groups != cert_b.details.groups { return Ok(false); }
if cert_a.details.ips != cert_b.details.ips { return Ok(false); }
if cert_a.details.issuer != cert_b.details.issuer { return Ok(false); }
if cert_a.details.subnets != cert_b.details.subnets { return Ok(false); }
if cert_a.expired(SystemTime::now()) || cert_b.expired(SystemTime::now()) {
if cert_a.details.not_before != cert_b.details.not_before { return Ok(false); }
if cert_a.details.not_after != cert_b.details.not_after { return Ok(false); }
if cert_a.signature != cert_b.signature { return Ok(false); }
}
Ok(true)
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigLighthouse { pub struct NebulaConfigLighthouse {
#[serde(default = "bool_false")] #[serde(default = "bool_false")]
#[serde(skip_serializing_if = "is_bool_false")] #[serde(skip_serializing_if = "is_bool_false")]
@ -212,7 +261,7 @@ pub struct NebulaConfigLighthouse {
pub local_allow_list: HashMap<Ipv4Net, bool>, // `interfaces` is not supported pub local_allow_list: HashMap<Ipv4Net, bool>, // `interfaces` is not supported
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigLighthouseDns { pub struct NebulaConfigLighthouseDns {
#[serde(default = "string_empty")] #[serde(default = "string_empty")]
#[serde(skip_serializing_if = "is_string_empty")] #[serde(skip_serializing_if = "is_string_empty")]
@ -222,7 +271,7 @@ pub struct NebulaConfigLighthouseDns {
pub port: u16, pub port: u16,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigListen { pub struct NebulaConfigListen {
#[serde(default = "string_empty")] #[serde(default = "string_empty")]
#[serde(skip_serializing_if = "is_string_empty")] #[serde(skip_serializing_if = "is_string_empty")]
@ -239,7 +288,7 @@ pub struct NebulaConfigListen {
pub write_buffer: Option<u32>, pub write_buffer: Option<u32>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigPunchy { pub struct NebulaConfigPunchy {
#[serde(default = "bool_false")] #[serde(default = "bool_false")]
#[serde(skip_serializing_if = "is_bool_false")] #[serde(skip_serializing_if = "is_bool_false")]
@ -252,7 +301,7 @@ pub struct NebulaConfigPunchy {
pub delay: String, pub delay: String,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub enum NebulaConfigCipher { pub enum NebulaConfigCipher {
#[serde(rename = "aes")] #[serde(rename = "aes")]
Aes, Aes,
@ -260,7 +309,7 @@ pub enum NebulaConfigCipher {
ChaChaPoly, ChaChaPoly,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigRelay { pub struct NebulaConfigRelay {
#[serde(default = "empty_vec")] #[serde(default = "empty_vec")]
#[serde(skip_serializing_if = "is_empty_vec")] #[serde(skip_serializing_if = "is_empty_vec")]
@ -273,7 +322,7 @@ pub struct NebulaConfigRelay {
pub use_relays: bool, pub use_relays: bool,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigTun { pub struct NebulaConfigTun {
#[serde(default = "bool_false")] #[serde(default = "bool_false")]
#[serde(skip_serializing_if = "is_bool_false")] #[serde(skip_serializing_if = "is_bool_false")]
@ -300,13 +349,13 @@ pub struct NebulaConfigTun {
pub unsafe_routes: Vec<NebulaConfigTunUnsafeRoute>, pub unsafe_routes: Vec<NebulaConfigTunUnsafeRoute>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigTunRouteOverride { pub struct NebulaConfigTunRouteOverride {
pub mtu: u64, pub mtu: u64,
pub route: Ipv4Net, pub route: Ipv4Net,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigTunUnsafeRoute { pub struct NebulaConfigTunUnsafeRoute {
pub route: Ipv4Net, pub route: Ipv4Net,
pub via: Ipv4Addr, pub via: Ipv4Addr,
@ -318,7 +367,7 @@ pub struct NebulaConfigTunUnsafeRoute {
pub metric: i64, pub metric: i64,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigLogging { pub struct NebulaConfigLogging {
#[serde(default = "loglevel_info")] #[serde(default = "loglevel_info")]
#[serde(skip_serializing_if = "is_loglevel_info")] #[serde(skip_serializing_if = "is_loglevel_info")]
@ -334,7 +383,7 @@ pub struct NebulaConfigLogging {
pub timestamp_format: String, pub timestamp_format: String,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub enum NebulaConfigLoggingLevel { pub enum NebulaConfigLoggingLevel {
#[serde(rename = "panic")] #[serde(rename = "panic")]
Panic, Panic,
@ -350,7 +399,7 @@ pub enum NebulaConfigLoggingLevel {
Debug, Debug,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub enum NebulaConfigLoggingFormat { pub enum NebulaConfigLoggingFormat {
#[serde(rename = "json")] #[serde(rename = "json")]
Json, Json,
@ -358,7 +407,7 @@ pub enum NebulaConfigLoggingFormat {
Text, Text,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigSshd { pub struct NebulaConfigSshd {
#[serde(default = "bool_false")] #[serde(default = "bool_false")]
#[serde(skip_serializing_if = "is_bool_false")] #[serde(skip_serializing_if = "is_bool_false")]
@ -370,7 +419,7 @@ pub struct NebulaConfigSshd {
pub authorized_users: Vec<NebulaConfigSshdAuthorizedUser>, pub authorized_users: Vec<NebulaConfigSshdAuthorizedUser>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigSshdAuthorizedUser { pub struct NebulaConfigSshdAuthorizedUser {
pub user: String, pub user: String,
#[serde(default = "empty_vec")] #[serde(default = "empty_vec")]
@ -378,7 +427,7 @@ pub struct NebulaConfigSshdAuthorizedUser {
pub keys: Vec<String>, pub keys: Vec<String>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
#[serde(tag = "type")] #[serde(tag = "type")]
pub enum NebulaConfigStats { pub enum NebulaConfigStats {
#[serde(rename = "graphite")] #[serde(rename = "graphite")]
@ -387,7 +436,7 @@ pub enum NebulaConfigStats {
Prometheus(NebulaConfigStatsPrometheus), Prometheus(NebulaConfigStatsPrometheus),
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigStatsGraphite { pub struct NebulaConfigStatsGraphite {
#[serde(default = "string_nebula")] #[serde(default = "string_nebula")]
#[serde(skip_serializing_if = "is_string_nebula")] #[serde(skip_serializing_if = "is_string_nebula")]
@ -405,7 +454,7 @@ pub struct NebulaConfigStatsGraphite {
pub lighthouse_metrics: bool, pub lighthouse_metrics: bool,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub enum NebulaConfigStatsGraphiteProtocol { pub enum NebulaConfigStatsGraphiteProtocol {
#[serde(rename = "tcp")] #[serde(rename = "tcp")]
Tcp, Tcp,
@ -413,7 +462,7 @@ pub enum NebulaConfigStatsGraphiteProtocol {
Udp, Udp,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigStatsPrometheus { pub struct NebulaConfigStatsPrometheus {
pub listen: String, pub listen: String,
pub path: String, pub path: String,
@ -432,7 +481,7 @@ pub struct NebulaConfigStatsPrometheus {
pub lighthouse_metrics: bool, pub lighthouse_metrics: bool,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigFirewall { pub struct NebulaConfigFirewall {
#[serde(default = "none")] #[serde(default = "none")]
#[serde(skip_serializing_if = "is_none")] #[serde(skip_serializing_if = "is_none")]
@ -447,7 +496,7 @@ pub struct NebulaConfigFirewall {
pub outbound: Option<Vec<NebulaConfigFirewallRule>>, pub outbound: Option<Vec<NebulaConfigFirewallRule>>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigFirewallConntrack { pub struct NebulaConfigFirewallConntrack {
#[serde(default = "string_12m")] #[serde(default = "string_12m")]
#[serde(skip_serializing_if = "is_string_12m")] #[serde(skip_serializing_if = "is_string_12m")]
@ -460,7 +509,7 @@ pub struct NebulaConfigFirewallConntrack {
pub default_timeout: String, pub default_timeout: String,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct NebulaConfigFirewallRule { pub struct NebulaConfigFirewallRule {
#[serde(default = "none")] #[serde(default = "none")]
#[serde(skip_serializing_if = "is_none")] #[serde(skip_serializing_if = "is_none")]

View File

@ -3,10 +3,11 @@ use ed25519_dalek::{SigningKey, VerifyingKey};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::error::Error; use std::error::Error;
use std::fs; use std::fs;
use log::debug;
use trifid_pki::cert::NebulaCertificate; use trifid_pki::cert::NebulaCertificate;
use trifid_pki::x25519_dalek::PublicKey; use trifid_pki::x25519_dalek::PublicKey;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize, Clone)]
pub struct Keystore { pub struct Keystore {
#[serde(default = "default_vec")] #[serde(default = "default_vec")]
pub hosts: Vec<KeystoreHostInformation>, pub hosts: Vec<KeystoreHostInformation>,
@ -18,7 +19,7 @@ fn default_vec<T>() -> Vec<T> {
pub fn keystore_init() -> Result<Keystore, Box<dyn Error>> { pub fn keystore_init() -> Result<Keystore, Box<dyn Error>> {
let mut ks_fp = CONFIG.crypto.local_keystore_directory.clone(); let mut ks_fp = CONFIG.crypto.local_keystore_directory.clone();
ks_fp.push("/tfks.toml"); ks_fp.push("tfks.toml");
if !ks_fp.exists() { if !ks_fp.exists() {
return Ok(Keystore { return Ok(Keystore {
@ -34,7 +35,9 @@ pub fn keystore_init() -> Result<Keystore, Box<dyn Error>> {
pub fn keystore_flush(ks: &Keystore) -> Result<(), Box<dyn Error>> { pub fn keystore_flush(ks: &Keystore) -> Result<(), Box<dyn Error>> {
let mut ks_fp = CONFIG.crypto.local_keystore_directory.clone(); let mut ks_fp = CONFIG.crypto.local_keystore_directory.clone();
ks_fp.push("/tfks.toml"); ks_fp.push("tfks.toml");
debug!("writing to {}", ks_fp.display());
fs::write(ks_fp, toml::to_string(ks)?)?; fs::write(ks_fp, toml::to_string(ks)?)?;

View File

@ -43,8 +43,7 @@ pub mod timers;
pub mod tokens; pub mod tokens;
pub struct AppState { pub struct AppState {
pub conn: DatabaseConnection, pub conn: DatabaseConnection
pub keystore: Keystore,
} }
#[actix_web::main] #[actix_web::main]
@ -72,12 +71,12 @@ async fn main() -> Result<(), Box<dyn Error>> {
info!("Performing database migration..."); info!("Performing database migration...");
Migrator::up(&db, None).await?; Migrator::up(&db, None).await?;
let data = Data::new(AppState { conn: db, keystore }); let data = Data::new(AppState { conn: db });
HttpServer::new(move || { HttpServer::new(move || {
App::new() App::new()
.app_data(data.clone()) .app_data(data.clone())
.app_data(JsonConfig::default().error_handler(|err, _req| { .app_data(JsonConfig::default().content_type_required(false).error_handler(|err, _req| {
let api_error: APIError = (&err).into(); let api_error: APIError = (&err).into();
actix_web::error::InternalError::from_response( actix_web::error::InternalError::from_response(
err, err,
@ -112,6 +111,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
.service(routes::v1::hosts::enroll_host) .service(routes::v1::hosts::enroll_host)
.service(routes::v1::hosts::create_host_and_enrollment_code) .service(routes::v1::hosts::create_host_and_enrollment_code)
.service(routes::v2::enroll::enroll) .service(routes::v2::enroll::enroll)
.service(routes::v1::dnclient::dnclient)
}) })
.bind(CONFIG.server.bind)? .bind(CONFIG.server.bind)?
.run() .run()

View File

@ -0,0 +1,369 @@
use actix_web::{HttpRequest, HttpResponse, post};
use actix_web::web::{Data, Json};
use base64::Engine;
use dnapi_rs::message::{APIError, APIErrors, CheckForUpdateResponse, CheckForUpdateResponseWrapper, DoUpdateRequest, DoUpdateResponse, EnrollResponse, RequestV1, RequestWrapper, SignedResponse, SignedResponseWrapper};
use ed25519_dalek::{Signature, Signer, SigningKey, Verifier, VerifyingKey};
use log::{debug, error};
use trifid_pki::x25519_dalek::PublicKey;
use crate::AppState;
use crate::codegen::{collect_info, generate_config};
use crate::keystore::{keystore_flush, keystore_init, KSCert, KSClientKey, KSConfig, KSSigningKey};
use std::clone::Clone;
use dnapi_rs::credentials::ed25519_public_keys_to_pem;
use rand::rngs::OsRng;
use trifid_pki::cert::{deserialize_ed25519_public, deserialize_x25519_public};
#[post("/v1/dnclient")]
pub async fn dnclient(req: Json<RequestV1>, req_info: HttpRequest, db: Data<AppState>) -> HttpResponse {
if req.version != 1 {
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_UNSUPPORTED_VERSION".to_string(),
message: "This server does not support the requested DNClient version.".to_string(),
path: None,
}
])
}
// verify the signature
let host = &req.host_id;
let mut keystore = match keystore_init() {
Ok(ks) => ks,
Err(e) => {
error!("keystore load error: {}", e);
return HttpResponse::InternalServerError().json(EnrollResponse::Error {
errors: vec![APIError {
code: "ERR_KS_LOAD_ERROR".to_string(),
message: e.to_string(),
path: None,
}],
});
}
};
debug!("{}", host);
let host_in_ks = keystore.hosts.iter_mut().find(|u| &u.id == host);
let host_in_ks = match host_in_ks {
Some(host) => host,
None => {
return HttpResponse::Unauthorized().json(vec![
APIError {
code: "ERR_HOST_ERROR".to_string(),
message: "The host does not exist or you do not have permission to access it.".to_string(),
path: None,
}
])
}
};
let client_keys = host_in_ks.client_keys.iter().find(|u| u.id == req.counter as u64).unwrap();
let client_keys_2 = host_in_ks.client_keys.iter().find(|u| u.id == host_in_ks.current_client_key).unwrap();
let signature = match Signature::from_slice(&req.signature) {
Ok(sig) => sig,
Err(e) => {
error!("signature load error: {}", e);
// Be intentionally vague as the signature is invalid.
return HttpResponse::Unauthorized().json(vec![
APIError {
code: "ERR_HOST_ERROR".to_string(),
message: "The host does not exist or you do not have permission to access it.".to_string(),
path: None,
}
])
}
};
if client_keys.ed_pub.verify(req.message.as_bytes(), &signature).is_err() && client_keys_2.ed_pub.verify(req.message.as_bytes(), &signature).is_err() {
// Be intentionally vague as the message is invalid.
debug!("! invalid signature");
return HttpResponse::Unauthorized().json(vec![
APIError {
code: "ERR_HOST_ERROR".to_string(),
message: "The host does not exist or you do not have permission to access it.".to_string(),
path: None,
}
])
}
// Sig OK
// Decode the message from base64
debug!("{}", req.message);
let msg_raw = match base64::engine::general_purpose::STANDARD.decode(&req.message) {
Ok(msg) => msg,
Err(e) => {
error!("b64 decode error: {}", e);
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_INVALID_MESSAGE".to_string(),
message: "Error while decoding message from base64.".to_string(),
path: None
}
])
}
};
// Decode it into RequestWrapper
debug!("{:?}", String::from_utf8(msg_raw.clone()));
let req_w: RequestWrapper = match serde_json::from_slice(&msg_raw) {
Ok(msg) => msg,
Err(e) => {
error!("msg decode error: {}", e);
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_INVALID_MESSAGE".to_string(),
message: "Error while decoding message from JSON.".to_string(),
path: None
}
])
}
};
// Do a config build
let info = match collect_info(&db, host, client_keys.dh_pub.as_bytes()).await {
Ok(i) => i,
Err(e) => {
return HttpResponse::InternalServerError().json(EnrollResponse::Error {
errors: vec![APIError {
code: "ERR_CFG_GENERATION_ERROR".to_string(),
message: e.to_string(),
path: None,
}],
});
}
};
// codegen: handoff to dedicated codegen module, we have collected all information
let (cfg, cert) = match generate_config(&db, &info).await {
Ok(cfg) => cfg,
Err(e) => {
error!("error generating configuration: {}", e);
return HttpResponse::InternalServerError().json(EnrollResponse::Error {
errors: vec![APIError {
code: "ERR_CFG_GENERATION_ERROR".to_string(),
message: "There was an error generating the host configuration.".to_string(),
path: None,
}],
});
}
};
let current_cfg = host_in_ks.config.iter().find(|u| u.id == host_in_ks.current_config);
let config_update_avail = current_cfg.map(|u| u.config.clone()) != Some(cfg.clone()) || req.counter < host_in_ks.current_config as u32;
return match req_w.message_type.as_str() {
"CheckForUpdate" => {
// value ignored here
HttpResponse::Ok().json(CheckForUpdateResponseWrapper {
data: CheckForUpdateResponse { update_available: config_update_avail },
})
},
"DoUpdate" => {
if !config_update_avail {
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_NO_UPDATE_AVAILABLE".to_string(),
message: "There is no new configuration available.".to_string(),
path: None
}
])
}
let do_update_req: DoUpdateRequest = match serde_json::from_slice(&req_w.value) {
Ok(req) => req,
Err(e) => {
error!("DoUpdate deserialization error: {}", e);
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_REQ_DESERIALIZE_ERROR".to_string(),
message: "There was an error deserializing the update request.".to_string(),
path: None
}
])
}
};
let dh_pubkey = match deserialize_x25519_public(&do_update_req.dh_pubkey_pem) {
Ok(pk) => pk,
Err(e) => {
error!("PEM decode error: {}", e);
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_BAD_PK".to_string(),
message: "There was an error deserializing the DHPK.".to_string(),
path: None
}
])
}
};
let info = match collect_info(&db, host, &dh_pubkey).await {
Ok(i) => i,
Err(e) => {
return HttpResponse::InternalServerError().json(EnrollResponse::Error {
errors: vec![APIError {
code: "ERR_CFG_GENERATION_ERROR".to_string(),
message: e.to_string(),
path: None,
}],
});
}
};
// codegen: handoff to dedicated codegen module, we have collected all information
let (cfg, cert) = match generate_config(&db, &info).await {
Ok(cfg) => cfg,
Err(e) => {
error!("error generating configuration: {}", e);
return HttpResponse::InternalServerError().json(EnrollResponse::Error {
errors: vec![APIError {
code: "ERR_CFG_GENERATION_ERROR".to_string(),
message: "There was an error generating the host configuration.".to_string(),
path: None,
}],
});
}
};
let mut ks = host_in_ks;
ks.certs.push(KSCert {
id: ks.current_cert + 1,
cert,
});
ks.current_cert += 1;
ks.config.push(KSConfig {
id: ks.current_config + 1,
config: cfg.clone(),
});
ks.current_config += 1;
ks.signing_keys.push(KSSigningKey {
id: ks.current_signing_key + 1,
key: SigningKey::generate(&mut OsRng),
});
ks.current_signing_key += 1;
let dh_pubkey = match deserialize_x25519_public(&do_update_req.dh_pubkey_pem) {
Ok(r) => r,
Err(e) => {
error!("DH pubkey deserialize error: {}", e);
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_DH_INVALID".to_string(),
message: "There was an error deserializing the DH pubkey.".to_string(),
path: None
}
])
}
};
let ed_pubkey = match deserialize_ed25519_public(&do_update_req.ed_pubkey_pem) {
Ok(r) => r,
Err(e) => {
error!("ED pubkey deserialize error: {}", e);
return HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_ED_INVALID".to_string(),
message: "There was an error deserializing the ED pubkey.".to_string(),
path: None
}
])
}
};
let dh_pubkey_typed: [u8; 32] = dh_pubkey.clone().try_into().unwrap();
ks.client_keys.push(KSClientKey {
id: ks.current_client_key + 1,
dh_pub: PublicKey::from(dh_pubkey_typed),
ed_pub: VerifyingKey::from_bytes(&ed_pubkey.try_into().unwrap()).unwrap(),
});
ks.current_client_key += 1;
let host_in_ks = ks.clone();
match keystore_flush(&keystore) {
Ok(_) => (),
Err(e) => {
error!("keystore save error: {}", e);
return HttpResponse::InternalServerError().json(vec![
APIError {
code: "ERR_SAVE_ERR".to_string(),
message: "There was an error saving the keystore.".to_string(),
path: None
}
])
}
}
// get the signing key that the client last trusted based on its current config version
// this is their current counter
let signing_key = host_in_ks.signing_keys.iter().find(|u| u.id == (req.counter as u64) - 1).unwrap();
let msg = DoUpdateResponse {
config: match serde_yaml::to_string(&cfg) {
Ok(c_str) => c_str.as_bytes().to_vec(),
Err(e) => {
error!("config serialization error: {}", e);
return HttpResponse::InternalServerError().json(vec![
APIError {
code: "ERR_CFG_SERIALIZATION".to_string(),
message: "There was an error serializing the new configuration.".to_string(),
path: None
}
])
}
},
counter: host_in_ks.current_config as u32,
nonce: do_update_req.nonce,
trusted_keys: ed25519_public_keys_to_pem(&[signing_key.key.verifying_key()]),
};
let msg_bytes = match serde_json::to_vec(&msg) {
Ok(b) => b,
Err(e) => {
error!("response serialization error: {}", e);
return HttpResponse::InternalServerError().json(vec![
APIError {
code: "ERR_CFG_SERIALIZATION".to_string(),
message: "There was an error serializing the new configuration.".to_string(),
path: None
}
])
}
};
let resp = SignedResponse {
version: 1,
message: msg_bytes.clone(),
signature: signing_key.key.sign(&msg_bytes).to_vec(),
};
let resp_w = SignedResponseWrapper {
data: resp,
};
HttpResponse::Ok().json(resp_w)
},
_ => {
HttpResponse::BadRequest().json(vec![
APIError {
code: "ERR_UNSUPPORTED_METHOD".to_string(),
message: "This server does not support that method yet.".to_string(),
path: None
}
])
}
}
}

View File

@ -623,7 +623,9 @@ pub async fn create_hosts_request(
}); });
} }
if req.is_lighthouse || req.is_relay && req.static_addresses.is_empty() { debug!("{:?}", req.static_addresses);
if (req.is_lighthouse || req.is_relay) && req.static_addresses.is_empty() {
return HttpResponse::BadRequest().json(APIErrorsResponse { return HttpResponse::BadRequest().json(APIErrorsResponse {
errors: vec![APIError { errors: vec![APIError {
code: "ERR_NEEDS_STATIC_ADDR".to_string(), code: "ERR_NEEDS_STATIC_ADDR".to_string(),
@ -2182,7 +2184,7 @@ pub async fn create_host_and_enrollment_code(
}); });
} }
if req.is_lighthouse || req.is_relay && req.static_addresses.is_empty() { if (req.is_lighthouse || req.is_relay) && req.static_addresses.is_empty() {
return HttpResponse::BadRequest().json(APIErrorsResponse { return HttpResponse::BadRequest().json(APIErrorsResponse {
errors: vec![APIError { errors: vec![APIError {
code: "ERR_NEEDS_STATIC_ADDR".to_string(), code: "ERR_NEEDS_STATIC_ADDR".to_string(),

View File

@ -7,3 +7,4 @@ pub mod signup;
pub mod totp_authenticators; pub mod totp_authenticators;
pub mod trifid; pub mod trifid;
pub mod verify_totp_authenticators; pub mod verify_totp_authenticators;
pub mod dnclient;

View File

@ -9,7 +9,7 @@ use rand::rngs::OsRng;
use sea_orm::{ColumnTrait, EntityTrait, ModelTrait, QueryFilter}; use sea_orm::{ColumnTrait, EntityTrait, ModelTrait, QueryFilter};
use crate::codegen::{collect_info, generate_config}; use crate::codegen::{collect_info, generate_config};
use crate::keystore::{KSCert, KSClientKey, KSConfig, KSSigningKey, KeystoreHostInformation}; use crate::keystore::{KSCert, KSClientKey, KSConfig, KSSigningKey, KeystoreHostInformation, keystore_flush, keystore_init};
use crate::AppState; use crate::AppState;
use trifid_api_entities::entity::host_enrollment_code; use trifid_api_entities::entity::host_enrollment_code;
use trifid_pki::cert::{ use trifid_pki::cert::{
@ -40,8 +40,8 @@ pub async fn enroll(
errors: vec![APIError { errors: vec![APIError {
code: "ERR_DB_ERROR".to_string(), code: "ERR_DB_ERROR".to_string(),
message: message:
"There was an error with the database request. Please try again later." "There was an error with the database request. Please try again later."
.to_string(), .to_string(),
path: None, path: None,
}], }],
}); });
@ -57,7 +57,7 @@ pub async fn enroll(
message: "That code is invalid or has expired.".to_string(), message: "That code is invalid or has expired.".to_string(),
path: None, path: None,
}], }],
}) });
} }
}; };
@ -109,8 +109,8 @@ pub async fn enroll(
errors: vec![APIError { errors: vec![APIError {
code: "ERR_DB_ERROR".to_string(), code: "ERR_DB_ERROR".to_string(),
message: message:
"There was an error with the database request. Please try again later." "There was an error with the database request. Please try again later."
.to_string(), .to_string(),
path: None, path: None,
}], }],
}); });
@ -145,88 +145,87 @@ pub async fn enroll(
} }
}; };
let host_in_ks = db.keystore.hosts.iter().find(|u| u.id == enroll_info.id); let mut ks_clone = match keystore_init() {
Ok(ks) => ks,
let host_in_ks = match host_in_ks { Err(e) => {
Some(ksinfo) => { error!("error loading keystore: {}", e);
let mut ks = ksinfo.clone(); return HttpResponse::InternalServerError().json(EnrollResponse::Error {
errors: vec![APIError {
ks.certs.push(KSCert { code: "ERR_KS_LOAD_ERROR".to_string(),
id: ks.current_cert + 1, message: "There was an error loading the keystore.".to_string(),
cert, path: None,
});
ks.current_cert += 1;
ks.config.push(KSConfig {
id: ks.current_config + 1,
config: cfg.clone(),
});
ks.current_config += 1;
ks.signing_keys.push(KSSigningKey {
id: ks.current_signing_key,
key: SigningKey::generate(&mut OsRng),
});
ks.current_signing_key += 1;
let dh_pubkey_typed: [u8; 32] = dh_pubkey.clone().try_into().unwrap();
ks.client_keys.push(KSClientKey {
id: ks.current_client_key + 1,
dh_pub: PublicKey::from(dh_pubkey_typed),
ed_pub: VerifyingKey::from_bytes(&ed_pubkey.try_into().unwrap()).unwrap(),
});
ks.current_client_key += 1;
ks
}
None => {
let dh_pubkey_typed: [u8; 32] = dh_pubkey.clone().try_into().unwrap();
KeystoreHostInformation {
id: enroll_info.id.clone(),
current_signing_key: 1,
current_client_key: 1,
current_config: 1,
current_cert: 1,
certs: vec![KSCert { id: 1, cert }],
config: vec![KSConfig {
id: 1,
config: cfg.clone(),
}], }],
signing_keys: vec![KSSigningKey { });
id: 1,
key: SigningKey::generate(&mut OsRng),
}],
client_keys: vec![KSClientKey {
id: 1,
dh_pub: PublicKey::from(dh_pubkey_typed),
ed_pub: VerifyingKey::from_bytes(&ed_pubkey.try_into().unwrap()).unwrap(),
}],
}
} }
}; };
loop {
let host_in_ks = ks_clone.hosts.iter().position(|u| u.id == enroll_info.host);
if let Some(host) = host_in_ks {
ks_clone.hosts.remove(host);
} else {
break;
}
}
let dh_pubkey_typed: [u8; 32] = dh_pubkey.clone().try_into().unwrap();
let host = KeystoreHostInformation {
id: enroll_info.host.clone(),
current_signing_key: 0,
current_client_key: 1,
current_config: 1,
current_cert: 1,
certs: vec![KSCert { id: 1, cert }],
config: vec![KSConfig {
id: 1,
config: cfg.clone(),
}],
signing_keys: vec![KSSigningKey {
id: 0,
key: SigningKey::generate(&mut OsRng),
}],
client_keys: vec![KSClientKey {
id: 1,
dh_pub: PublicKey::from(dh_pubkey_typed),
ed_pub: VerifyingKey::from_bytes(&ed_pubkey.try_into().unwrap()).unwrap(),
}],
};
ks_clone.hosts.push(host.clone());
match keystore_flush(&ks_clone) {
Ok(_) => (),
Err(e) => {
error!("keystore save error: {}", e);
return HttpResponse::InternalServerError().json(vec![
APIError {
code: "ERR_SAVE_ERR".to_string(),
message: "There was an error saving the keystore.".to_string(),
path: None,
}
]);
}
}
HttpResponse::Ok().json(EnrollResponse::Success { HttpResponse::Ok().json(EnrollResponse::Success {
data: EnrollResponseData { data: EnrollResponseData {
config: match serde_yaml::to_string(&cfg) { config: match serde_yaml::to_string(&cfg) {
Ok(cfg) => cfg.as_bytes().to_vec(), Ok(cfg) => cfg.as_bytes().to_vec(),
Err(e) => { Err(e) => {
error!("serialization error: {}", e); error!("serialization error: {}", e);
return HttpResponse::InternalServerError().json(EnrollResponse::Error { return HttpResponse::BadRequest().json(vec![
errors: vec![ APIError {
APIError { code: "ERR_ED_INVALID".to_string(),
code: "ERR_CFG_SERIALIZATION_ERROR".to_string(), message: "There was an error deserializing the ED pubkey.".to_string(),
message: "There was an error serializing the node's configuration. Please try again later.".to_string(), path: None,
path: None, }
} ]);
],
});
} }
}, },
host_id: enroll_info.host.clone(), host_id: enroll_info.host.clone(),
counter: host_in_ks.current_config as u32, counter: host.current_config as u32,
trusted_keys: serialize_ed25519_public(host_in_ks.signing_keys.iter().find(|u| u.id == host_in_ks.current_signing_key).unwrap().key.verifying_key().as_bytes().as_slice()).to_vec(), trusted_keys: serialize_ed25519_public(host.signing_keys.iter().find(|u| u.id == host.current_signing_key).unwrap().key.verifying_key().as_bytes().as_slice()).to_vec(),
organization: EnrollResponseDataOrg { id: info.organization.id.clone(), name: info.organization.name.clone() }, organization: EnrollResponseDataOrg { id: info.organization.id.clone(), name: info.organization.name.clone() },
}, },
}) })

View File

@ -0,0 +1,361 @@
[[hosts]]
id = "host-IPNHZ2XBXJDY2WYOYG7709CBJ8"
current_signing_key = 1
current_client_key = 2
current_config = 2
current_cert = 2
[[hosts.certs]]
id = 1
[hosts.certs.cert]
signature = [112, 198, 103, 65, 58, 33, 254, 185, 255, 1, 204, 111, 236, 234, 55, 143, 24, 27, 104, 53, 89, 106, 209, 53, 201, 35, 248, 55, 109, 120, 219, 26, 171, 234, 181, 70, 174, 177, 12, 121, 190, 67, 73, 104, 218, 2, 139, 120, 116, 174, 106, 120, 56, 162, 143, 162, 143, 199, 237, 151, 215, 129, 245, 8]
[hosts.certs.cert.details]
name = "asd"
ips = ["10.17.2.3/15"]
subnets = []
groups = ["role:role-A4YTNBOMCFJNK5OAKHQCUUVIL8"]
public_key = [10, 175, 118, 186, 191, 43, 172, 0, 152, 238, 83, 31, 38, 79, 189, 76, 149, 38, 157, 84, 200, 210, 0, 95, 37, 169, 196, 77, 214, 209, 91, 10]
is_ca = false
issuer = "9a4dd7cb5c3a086b0173f126bbf20b85ac7886a2129d2f8573acc2e20f09ec1f"
[hosts.certs.cert.details.not_before]
secs_since_epoch = 1684171628
nanos_since_epoch = 68795993
[hosts.certs.cert.details.not_after]
secs_since_epoch = 1716312428
nanos_since_epoch = 68796023
[[hosts.certs]]
id = 2
[hosts.certs.cert]
signature = [134, 249, 92, 208, 133, 181, 164, 230, 242, 79, 132, 140, 164, 28, 159, 165, 55, 176, 140, 73, 208, 50, 53, 184, 178, 242, 62, 90, 55, 187, 245, 231, 22, 89, 161, 9, 181, 56, 135, 163, 93, 102, 69, 34, 51, 139, 158, 181, 5, 207, 2, 87, 100, 236, 215, 116, 109, 43, 186, 148, 200, 235, 99, 7]
[hosts.certs.cert.details]
name = "addsd"
ips = ["10.17.2.3/15"]
subnets = []
groups = ["role:role-A4YTNBOMCFJNK5OAKHQCUUVIL8"]
public_key = [78, 139, 195, 146, 198, 211, 251, 196, 238, 154, 134, 158, 111, 25, 198, 228, 195, 108, 242, 146, 16, 45, 98, 155, 152, 116, 114, 218, 226, 137, 182, 11]
is_ca = false
issuer = "9a4dd7cb5c3a086b0173f126bbf20b85ac7886a2129d2f8573acc2e20f09ec1f"
[hosts.certs.cert.details.not_before]
secs_since_epoch = 1684171718
nanos_since_epoch = 140841799
[hosts.certs.cert.details.not_after]
secs_since_epoch = 1716312518
nanos_since_epoch = 140841859
[[hosts.config]]
id = 1
[hosts.config.config]
routines = 0
[hosts.config.config.pki]
ca = """
-----BEGIN NEBULA CERTIFICATE-----\r
Cl0KK2NvcmVAY29yZWRvZXMuZGV2J3MgT3JnYW5pemF0aW9uIFNpZ25pbmcgQ0Eo\r
y7iEowYwy+2S0AY6II2RV3kVBopKoTe3j+aT1LbZuWTR/5oQGra185GB5W63QAES\r
QGRgfmRuJOzhtWwwU4BGMo47uoncMGV41sz1NYcvwmruwhJDaYYJ51DLz3v5bYZV\r
LCxfFB661cvoq1OZ7G5ZcgY=\r
-----END NEBULA CERTIFICATE-----\r
"""
cert = """
-----BEGIN NEBULA CERTIFICATE-----\r
CoYBCgNhc2QSCYOExFCAgPj/DyIkcm9sZTpyb2xlLUE0WVROQk9NQ0ZKTks1T0FL\r
SFFDVVVWSUw4KOzWiaMGMOyys7IGOiAKr3a6vyusAJjuUx8mT71MlSadVMjSAF8l\r
qcRN1tFbCkogmk3Xy1w6CGsBc/Emu/ILhax4hqISnS+Fc6zC4g8J7B8SQHDGZ0E6\r
If65/wHMb+zqN48YG2g1WWrRNckj+DdteNsaq+q1Rq6xDHm+Q0lo2gKLeHSuang4\r
oo+ij8ftl9eB9Qg=\r
-----END NEBULA CERTIFICATE-----\r
"""
disconnect_invalid = true
[hosts.config.config.lighthouse]
interval = 60
[hosts.config.config.listen]
host = "[::]"
read_buffer = 10485760
write_buffer = 10485760
[hosts.config.config.punchy]
punch = true
respond = true
delay = ""
[hosts.config.config.relay]
[hosts.config.config.tun]
dev = "trifid1"
drop_local_broadcast = true
drop_multicast = true
[hosts.config.config.firewall]
inbound = []
[[hosts.config.config.firewall.outbound]]
port = "any"
proto = "any"
host = "any"
[[hosts.config]]
id = 2
[hosts.config.config]
routines = 0
[hosts.config.config.pki]
ca = """
-----BEGIN NEBULA CERTIFICATE-----\r
Cl0KK2NvcmVAY29yZWRvZXMuZGV2J3MgT3JnYW5pemF0aW9uIFNpZ25pbmcgQ0Eo\r
y7iEowYwy+2S0AY6II2RV3kVBopKoTe3j+aT1LbZuWTR/5oQGra185GB5W63QAES\r
QGRgfmRuJOzhtWwwU4BGMo47uoncMGV41sz1NYcvwmruwhJDaYYJ51DLz3v5bYZV\r
LCxfFB661cvoq1OZ7G5ZcgY=\r
-----END NEBULA CERTIFICATE-----\r
"""
cert = """
-----BEGIN NEBULA CERTIFICATE-----\r
CogBCgVhZGRzZBIJg4TEUICA+P8PIiRyb2xlOnJvbGUtQTRZVE5CT01DRkpOSzVP\r
QUtIUUNVVVZJTDgoxteJowYwxrOzsgY6IE6Lw5LG0/vE7pqGnm8ZxuTDbPKSEC1i\r
m5h0ctriibYLSiCaTdfLXDoIawFz8Sa78guFrHiGohKdL4VzrMLiDwnsHxJAhvlc\r
0IW1pObyT4SMpByfpTewjEnQMjW4svI+Wje79ecWWaEJtTiHo11mRSIzi561Bc8C\r
V2Ts13RtK7qUyOtjBw==\r
-----END NEBULA CERTIFICATE-----\r
"""
disconnect_invalid = true
[hosts.config.config.lighthouse]
interval = 60
[hosts.config.config.listen]
host = "[::]"
read_buffer = 10485760
write_buffer = 10485760
[hosts.config.config.punchy]
punch = true
respond = true
delay = ""
[hosts.config.config.relay]
[hosts.config.config.tun]
dev = "trifid1"
drop_local_broadcast = true
drop_multicast = true
[hosts.config.config.firewall]
inbound = []
[[hosts.config.config.firewall.outbound]]
port = "any"
proto = "any"
host = "any"
[[hosts.signing_keys]]
id = 0
key = [108, 174, 65, 117, 166, 239, 62, 150, 81, 111, 185, 79, 158, 206, 104, 43, 163, 224, 206, 219, 147, 71, 158, 88, 103, 149, 113, 152, 123, 41, 78, 255]
[[hosts.signing_keys]]
id = 1
key = [119, 226, 183, 227, 53, 121, 14, 141, 125, 165, 249, 103, 28, 60, 102, 111, 242, 63, 26, 52, 87, 29, 29, 114, 11, 62, 138, 121, 213, 245, 193, 212]
[[hosts.client_keys]]
id = 1
dh_pub = [10, 175, 118, 186, 191, 43, 172, 0, 152, 238, 83, 31, 38, 79, 189, 76, 149, 38, 157, 84, 200, 210, 0, 95, 37, 169, 196, 77, 214, 209, 91, 10]
ed_pub = [135, 237, 110, 71, 189, 155, 246, 66, 50, 229, 80, 254, 93, 99, 35, 29, 87, 138, 132, 193, 118, 216, 218, 60, 142, 178, 42, 126, 182, 25, 31, 103]
[[hosts.client_keys]]
id = 2
dh_pub = [78, 139, 195, 146, 198, 211, 251, 196, 238, 154, 134, 158, 111, 25, 198, 228, 195, 108, 242, 146, 16, 45, 98, 155, 152, 116, 114, 218, 226, 137, 182, 11]
ed_pub = [178, 77, 253, 159, 81, 137, 20, 14, 184, 230, 73, 111, 130, 129, 15, 184, 114, 90, 133, 147, 178, 252, 197, 75, 82, 33, 21, 5, 38, 238, 57, 84]
[[hosts]]
id = "host-2PXIOHLPQA3CQL8O7XD6CXMMRM"
current_signing_key = 1
current_client_key = 2
current_config = 2
current_cert = 2
[[hosts.certs]]
id = 1
[hosts.certs.cert]
signature = [160, 205, 80, 112, 16, 205, 155, 249, 221, 26, 47, 128, 2, 59, 15, 102, 153, 174, 61, 35, 207, 233, 42, 242, 212, 28, 133, 40, 189, 1, 234, 67, 24, 109, 152, 248, 130, 96, 48, 104, 69, 0, 178, 30, 103, 76, 33, 179, 216, 92, 191, 89, 6, 236, 136, 216, 9, 208, 189, 16, 140, 132, 209, 2]
[hosts.certs.cert.details]
name = "testhost4"
ips = ["10.17.4.2/15"]
subnets = []
groups = ["role:role-A4YTNBOMCFJNK5OAKHQCUUVIL8"]
public_key = [40, 175, 28, 13, 183, 102, 108, 21, 53, 79, 113, 191, 101, 74, 77, 151, 66, 146, 250, 155, 196, 38, 178, 44, 41, 186, 71, 1, 152, 237, 245, 93]
is_ca = false
issuer = "9a4dd7cb5c3a086b0173f126bbf20b85ac7886a2129d2f8573acc2e20f09ec1f"
[hosts.certs.cert.details.not_before]
secs_since_epoch = 1684172253
nanos_since_epoch = 219759539
[hosts.certs.cert.details.not_after]
secs_since_epoch = 1716313053
nanos_since_epoch = 219759579
[[hosts.certs]]
id = 2
[hosts.certs.cert]
signature = [54, 210, 5, 3, 189, 187, 221, 142, 238, 142, 175, 248, 12, 128, 6, 58, 99, 44, 248, 198, 51, 3, 152, 118, 113, 46, 41, 191, 138, 15, 120, 103, 170, 24, 229, 27, 241, 182, 236, 220, 51, 117, 224, 118, 191, 25, 84, 111, 100, 15, 53, 234, 132, 214, 213, 66, 95, 8, 44, 162, 212, 60, 151, 13]
[hosts.certs.cert.details]
name = "testhost4"
ips = ["10.17.4.2/15"]
subnets = []
groups = ["role:role-A4YTNBOMCFJNK5OAKHQCUUVIL8"]
public_key = [4, 249, 63, 6, 25, 145, 63, 132, 106, 48, 243, 192, 249, 159, 185, 160, 196, 146, 24, 7, 241, 160, 121, 122, 212, 249, 19, 213, 158, 105, 142, 86]
is_ca = false
issuer = "9a4dd7cb5c3a086b0173f126bbf20b85ac7886a2129d2f8573acc2e20f09ec1f"
[hosts.certs.cert.details.not_before]
secs_since_epoch = 1684172313
nanos_since_epoch = 739770378
[hosts.certs.cert.details.not_after]
secs_since_epoch = 1716313113
nanos_since_epoch = 739770429
[[hosts.config]]
id = 1
[hosts.config.config]
routines = 0
[hosts.config.config.pki]
ca = """
-----BEGIN NEBULA CERTIFICATE-----\r
Cl0KK2NvcmVAY29yZWRvZXMuZGV2J3MgT3JnYW5pemF0aW9uIFNpZ25pbmcgQ0Eo\r
y7iEowYwy+2S0AY6II2RV3kVBopKoTe3j+aT1LbZuWTR/5oQGra185GB5W63QAES\r
QGRgfmRuJOzhtWwwU4BGMo47uoncMGV41sz1NYcvwmruwhJDaYYJ51DLz3v5bYZV\r
LCxfFB661cvoq1OZ7G5ZcgY=\r
-----END NEBULA CERTIFICATE-----\r
"""
cert = """
-----BEGIN NEBULA CERTIFICATE-----\r
CowBCgl0ZXN0aG9zdDQSCYKIxFCAgPj/DyIkcm9sZTpyb2xlLUE0WVROQk9NQ0ZK\r
Tks1T0FLSFFDVVVWSUw4KN3biaMGMN23s7IGOiAorxwNt2ZsFTVPcb9lSk2XQpL6\r
m8QmsiwpukcBmO31XUogmk3Xy1w6CGsBc/Emu/ILhax4hqISnS+Fc6zC4g8J7B8S\r
QKDNUHAQzZv53RovgAI7D2aZrj0jz+kq8tQchSi9AepDGG2Y+IJgMGhFALIeZ0wh\r
s9hcv1kG7IjYCdC9EIyE0QI=\r
-----END NEBULA CERTIFICATE-----\r
"""
disconnect_invalid = true
[hosts.config.config.lighthouse]
am_lighthouse = true
interval = 60
[hosts.config.config.listen]
host = "[::]"
port = 5679
read_buffer = 10485760
write_buffer = 10485760
[hosts.config.config.punchy]
punch = true
respond = true
delay = ""
[hosts.config.config.relay]
[hosts.config.config.tun]
dev = "trifid1"
drop_local_broadcast = true
drop_multicast = true
[hosts.config.config.firewall]
inbound = []
[[hosts.config.config.firewall.outbound]]
port = "any"
proto = "any"
host = "any"
[[hosts.config]]
id = 2
[hosts.config.config]
routines = 0
[hosts.config.config.pki]
ca = """
-----BEGIN NEBULA CERTIFICATE-----\r
Cl0KK2NvcmVAY29yZWRvZXMuZGV2J3MgT3JnYW5pemF0aW9uIFNpZ25pbmcgQ0Eo\r
y7iEowYwy+2S0AY6II2RV3kVBopKoTe3j+aT1LbZuWTR/5oQGra185GB5W63QAES\r
QGRgfmRuJOzhtWwwU4BGMo47uoncMGV41sz1NYcvwmruwhJDaYYJ51DLz3v5bYZV\r
LCxfFB661cvoq1OZ7G5ZcgY=\r
-----END NEBULA CERTIFICATE-----\r
"""
cert = """
-----BEGIN NEBULA CERTIFICATE-----\r
CowBCgl0ZXN0aG9zdDQSCYKIxFCAgPj/DyIkcm9sZTpyb2xlLUE0WVROQk9NQ0ZK\r
Tks1T0FLSFFDVVVWSUw4KJnciaMGMJm4s7IGOiAE+T8GGZE/hGow88D5n7mgxJIY\r
B/GgeXrU+RPVnmmOVkogmk3Xy1w6CGsBc/Emu/ILhax4hqISnS+Fc6zC4g8J7B8S\r
QDbSBQO9u92O7o6v+AyABjpjLPjGMwOYdnEuKb+KD3hnqhjlG/G27NwzdeB2vxlU\r
b2QPNeqE1tVCXwgsotQ8lw0=\r
-----END NEBULA CERTIFICATE-----\r
"""
disconnect_invalid = true
[hosts.config.config.lighthouse]
am_lighthouse = true
interval = 60
[hosts.config.config.listen]
host = "[::]"
port = 5677
read_buffer = 10485760
write_buffer = 10485760
[hosts.config.config.punchy]
punch = true
respond = true
delay = ""
[hosts.config.config.relay]
[hosts.config.config.tun]
dev = "trifid1"
drop_local_broadcast = true
drop_multicast = true
[hosts.config.config.firewall]
inbound = []
[[hosts.config.config.firewall.outbound]]
port = "any"
proto = "any"
host = "any"
[[hosts.signing_keys]]
id = 0
key = [255, 84, 221, 121, 87, 225, 7, 12, 236, 8, 209, 175, 98, 20, 119, 146, 92, 177, 79, 121, 24, 243, 247, 113, 106, 212, 183, 155, 208, 55, 219, 135]
[[hosts.signing_keys]]
id = 1
key = [98, 159, 193, 58, 183, 156, 75, 17, 70, 103, 112, 6, 71, 197, 167, 152, 99, 210, 199, 40, 49, 13, 101, 72, 57, 34, 221, 237, 142, 29, 144, 175]
[[hosts.client_keys]]
id = 1
dh_pub = [40, 175, 28, 13, 183, 102, 108, 21, 53, 79, 113, 191, 101, 74, 77, 151, 66, 146, 250, 155, 196, 38, 178, 44, 41, 186, 71, 1, 152, 237, 245, 93]
ed_pub = [247, 172, 97, 223, 43, 24, 248, 133, 118, 219, 227, 72, 95, 25, 167, 179, 115, 225, 73, 211, 161, 216, 95, 140, 151, 59, 118, 39, 122, 136, 144, 245]
[[hosts.client_keys]]
id = 2
dh_pub = [4, 249, 63, 6, 25, 145, 63, 132, 106, 48, 243, 192, 249, 159, 185, 160, 196, 146, 24, 7, 241, 160, 121, 122, 212, 249, 19, 213, 158, 105, 142, 86]
ed_pub = [55, 82, 153, 75, 220, 207, 87, 221, 50, 200, 77, 9, 242, 136, 64, 91, 60, 96, 31, 100, 58, 162, 150, 147, 109, 109, 117, 188, 164, 217, 248, 140]