Commits vergleichen

..

Keine gemeinsamen Commits. „b8d49d91ba4ef556611399e89eb1083d5b4d8701“ und „0080f592c0ebfea179ed02a4ee437e9c7ec0854e“ haben vollständig unterschiedliche Historien.

20 geänderte Dateien mit 312 neuen und 501 gelöschten Zeilen

Datei anzeigen

@ -55,12 +55,8 @@ repos:
- id: tomlq - id: tomlq
name: toml Format name: toml Format
description: Formats Toml files description: Formats Toml files
entry: tomlq -S -ti . entry: echo tomlq . -ti
language: system language: system
types_or: types:
- toml - toml
- cargo
exclude: '\.lock$' exclude: '\.lock$'
stages:
- manual
- pre-commit

11
Cargo.lock generiert
Datei anzeigen

@ -1543,7 +1543,6 @@ dependencies = [
"schemars", "schemars",
"serde", "serde",
"serde_json", "serde_json",
"systemd-journal-logger",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"toml", "toml",
@ -2017,16 +2016,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "systemd-journal-logger"
version = "2.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7266304d24ca5a4b230545fc558c80e18bd3e1d2eb1be149b6bcd04398d3e79c"
dependencies = [
"log",
"rustix",
]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.20.0" version = "3.20.0"

Datei anzeigen

@ -1,102 +1,97 @@
[package]
name = "racme"
version = "0.1.0"
edition = "2024"
resolver = "3"
[features]
capabilities = ["dep:caps"]
unstable = ["capabilities"]
[dependencies] [dependencies]
derive-new = "0.7.0" derive-new = "0.7.0"
env_logger = "0.11" env_logger = "0.11"
lazy_static = "1.5" lazy_static = "1.5"
toml = "0.8" toml = "0.8"
[dependencies.acme2-eab]
default-features = false
version = "0"
[dependencies.caps]
default-features = false
optional = true
version = "0.5.5"
[dependencies.clap]
features = ["derive"]
version = "4.5"
[dependencies.data-encoding]
default-features = false
features = ["alloc", "std"]
version = "2.9"
[dependencies.libc]
default-features = false
features = ["const-extern-fn", "std"]
version = "0.2"
[dependencies.libsystemd]
default-features = false
version = "0.7"
[dependencies.log]
features = ["std"]
version = "0.4"
[dependencies.macro_rules_attribute]
default-features = false
version = "0.2"
[dependencies.openssl]
default-features = false
version = "0.10"
[dependencies.pem]
default-features = false
features = ["serde", "std"]
version = "3.0"
[dependencies.reqwest]
default-features = false
features = ["rustls-tls-native-roots-no-provider", "default-tls"]
version = "0.12"
[dependencies.schemars] [dependencies.schemars]
version = "0.9.0"
default-features = false default-features = false
features = ["derive", "std", "preserve_order"] features = ["derive", "std", "preserve_order"]
version = "0.9.0"
[dependencies.serde]
default-features = false
features = ["derive", "std"]
version = "1.0"
[dependencies.serde_json] [dependencies.serde_json]
version = "1.0.140"
default-features = false default-features = false
features = ["std"] features = ["std"]
version = "1.0.140"
[dependencies.systemd-journal-logger] [dependencies.caps]
version = "0.5.5"
default-features = false default-features = false
version = "2.2.2" optional = true
[dependencies.tokio] [dependencies.libc]
version = "0.2"
default-features = false default-features = false
features = ["rt", "sync", "time", "net", "macros"] features = ["const-extern-fn", "std"]
version = "1.45"
[dependencies.data-encoding]
version = "2.9"
default-features = false
features = ["alloc", "std"]
[dependencies.macro_rules_attribute]
version = "0.2"
default-features = false
[dependencies.acme2-eab]
version = "0"
default-features = false
[dependencies.openssl]
version = "0.10"
default-features = false
[dependencies.pem]
version = "3.0"
default-features = false
features = ["serde", "std"]
[dependencies.tokio-stream] [dependencies.tokio-stream]
version = "0.1"
default-features = false default-features = false
features = ["fs"] features = ["fs"]
version = "0.1"
[dependencies.libsystemd]
version = "0.7"
default-features = false
[dependencies.zbus_systemd] [dependencies.zbus_systemd]
version = "0.25701"
default-features = false default-features = false
features = ["systemd1"] features = ["systemd1"]
version = "0.25701"
[features] [dependencies.log]
capabilities = ["dep:caps"] version = "0.4"
unstable = ["capabilities"] features = ["std"]
[package] [dependencies.serde]
edition = "2024" version = "1.0"
name = "racme" features = ["derive", "std"]
resolver = "3" default-features = false
rust-version = "1.87"
version = "0.1.0" [dependencies.clap]
version = "4.5"
features = ["derive"]
[dependencies.tokio]
version = "1.45"
default-features = false
features = ["rt", "sync", "time", "net", "macros"]
[dependencies.reqwest]
version = "0.12"
default-features = false
features = ["rustls-tls-native-roots-no-provider", "default-tls"]
[patch.crates-io.acme2-eab] [patch.crates-io.acme2-eab]
path = "../acme2-eab" path = "../acme2-eab"

Datei anzeigen

@ -1,8 +0,0 @@
# Rust + ACME = Racme
I wrote this to scratch an itch i have with existing ACME Clients:
- no ownerchange, the acme process also generates certificates for my mailserver or database
- no DNS-Server integration
- native logging
- can do this without root

Datei anzeigen

@ -1,6 +1,6 @@
{ {
"$schema": "http://json-schema.org/draft-07/schema#", "$schema": "http://json-schema.org/draft-07/schema#",
"title": "General", "title": "GeneralConfig",
"type": "object", "type": "object",
"properties": { "properties": {
"accounts_path": { "accounts_path": {

Datei anzeigen

@ -1,6 +1,6 @@
{ {
"$schema": "http://json-schema.org/draft-07/schema#", "$schema": "http://json-schema.org/draft-07/schema#",
"title": "Site", "title": "SiteConfig",
"type": "object", "type": "object",
"properties": { "properties": {
"ca": { "ca": {

Datei anzeigen

@ -1,4 +1,4 @@
use macro_rules_attribute::attribute_alias; use macro_rules_attribute::derive_alias;
#[allow(unused_macros)] #[allow(unused_macros)]
macro_rules! match_error { macro_rules! match_error {
@ -24,10 +24,9 @@ macro_rules! attr_function {
} }
} }
attribute_alias! { derive_alias! {
#[apply(ConfigFile!)] = #[derive(::serde::Deserialize, ::schemars::JsonSchema)]; #[derive(DefDer!)] = #[derive(Debug, Clone)];
#[apply(Hashable!)] = #[derive(Eq,Hash)]; #[derive(Hashable!)] = #[derive(Eq, Hash)];
#[apply(DefDer!)] = #[derive(Debug, Clone)];
} }
#[allow(unused_imports)] #[allow(unused_imports)]

Datei anzeigen

@ -1,15 +1,9 @@
//! Acme client that supports multiple CAs and configs for sites that can be seperate from the mainconfig //! Acme client that supports multiple CAs and configs for sites that can be seperate from the mainconfig
#![allow(clippy::clone_on_copy)] #![allow(clippy::clone_on_copy)]
#![allow(clippy::collapsible_if)]
#![allow(clippy::identity_op)] #![allow(clippy::identity_op)]
#![allow(dead_code)]
#![allow(refining_impl_trait)] #![allow(refining_impl_trait)]
#![deny(clippy::format_push_string)] #![allow(clippy::collapsible_if)]
#![deny(clippy::macro_use_imports)] #![allow(dead_code)]
#![deny(clippy::module_name_repetitions)]
#![deny(clippy::single_component_path_imports)]
#![deny(clippy::unnecessary_debug_formatting)]
#![deny(clippy::unnecessary_self_imports)]
pub(crate) mod consts; pub(crate) mod consts;
pub(crate) mod macros; pub(crate) mod macros;
@ -19,11 +13,12 @@ pub(crate) mod types;
pub(crate) mod utils; pub(crate) mod utils;
use crate::{ use crate::{
consts::*,
prelude::*, prelude::*,
types::{ types::{
config::{ config::{
General, GeneralConfig,
Site, SiteConfig,
}, },
dns::Manager, dns::Manager,
structs::{ structs::{
@ -33,31 +28,30 @@ use crate::{
SubCommand, SubCommand,
}, },
}, },
utils::{ utils::check_permissions,
check_permissions,
logging,
},
}; };
use acme2_eab::Directory; use acme2_eab::Directory;
use clap::Parser; use clap::Parser;
use env_logger::init as log_init;
use libsystemd::daemon; use libsystemd::daemon;
use log::*; use log::*;
use reqwest::{ use openssl::{
Client, self,
tls::Version, pkey::{
PKey,
Private,
},
}; };
use reqwest::tls::Version;
use schemars::{ use schemars::{
SchemaGenerator, SchemaGenerator,
consts::meta_schemas::DRAFT07, consts::meta_schemas::DRAFT07,
generate::SchemaSettings, generate::SchemaSettings,
}; };
use serde::Serialize; use serde::Serialize;
use serde_json::{ use serde_json::ser::{
Serializer, Formatter,
ser::{ PrettyFormatter,
Formatter,
PrettyFormatter,
},
}; };
use std::{ use std::{
collections::{ collections::{
@ -78,8 +72,10 @@ use tokio::{
create_dir_all, create_dir_all,
read_dir, read_dir,
}, },
io::AsyncWriteExt as _, io::{
runtime::Builder, AsyncReadExt,
AsyncWriteExt,
},
sync::Mutex, sync::Mutex,
}; };
use tokio_stream::{ use tokio_stream::{
@ -88,21 +84,33 @@ use tokio_stream::{
}; };
fn default_client() -> Result<Client, Error> { fn default_client() -> Result<reqwest::Client, Error> {
trace!("Initialized new reqwest Client"); reqwest::Client::builder()
Client::builder()
.min_tls_version(Version::TLS_1_2) .min_tls_version(Version::TLS_1_2)
.https_only(true) .https_only(true)
.pool_max_idle_per_host(POOL_SIZE) .pool_max_idle_per_host(POOL_SIZE)
.connection_verbose(true)
.referer(true)
.build() .build()
.map_err(Error::from_display) .map_err(Error::from_display)
} }
async fn load_privkey(path: PathBuf) -> Result<PKey<Private>, Error> {
let mut file = match FILE_MODE.open(path).await {
Ok(file) => file,
Err(error) => return Error::err(format!("Failed to open Private Key: {error}")),
};
let mut data = String::new();
if let Err(error) = file.read_to_string(&mut data).await {
return Error::err(format!("Failed to read data for the key: {error}"));
}
match PKey::private_key_from_pem(data.as_bytes()) {
Ok(key) => Ok(key),
Err(error) => Error::err(format!("Failed to parse pem data: {error}")),
}
}
async fn racme(flags: Arguments) -> Result<(), Error> { async fn racme(flags: Arguments) -> Result<(), Error> {
let client = default_client()?; let client = default_client()?;
let mut dns_manager = Manager::new(client.clone()); let mut dns_manager = Manager::new();
let systemd_access = daemon::booted(); let systemd_access = daemon::booted();
let mainconfig = { let mainconfig = {
let file = match FILE_MODE.open(flags.config).await { let file = match FILE_MODE.open(flags.config).await {
@ -111,7 +119,7 @@ async fn racme(flags: Arguments) -> Result<(), Error> {
return Error::err(format!("error reading the config: {error}")); return Error::err(format!("error reading the config: {error}"));
}, },
}; };
General::from_file(file).await GeneralConfig::from_file(file).await
}; };
for (zone, builder) in mainconfig.dns.iter() { for (zone, builder) in mainconfig.dns.iter() {
dns_manager.add_builder(zone.clone(), builder.clone()).await; dns_manager.add_builder(zone.clone(), builder.clone()).await;
@ -135,7 +143,7 @@ async fn racme(flags: Arguments) -> Result<(), Error> {
continue; continue;
}, },
}; };
let mut site = Site::from_file(file).await; let mut site = SiteConfig::from_file(file).await;
site.name = filename.file_stem().unwrap().to_string_lossy().to_string(); site.name = filename.file_stem().unwrap().to_string_lossy().to_string();
siteconfigs.push(site); siteconfigs.push(site);
} }
@ -190,7 +198,7 @@ async fn racme(flags: Arguments) -> Result<(), Error> {
fn serialize_with_formatter<T: Serialize, F: Formatter>(value: &T, formatter: F) -> Result<String, Error> { fn serialize_with_formatter<T: Serialize, F: Formatter>(value: &T, formatter: F) -> Result<String, Error> {
let mut store = Vec::with_capacity(2 ^ 10); let mut store = Vec::with_capacity(2 ^ 10);
let mut serializer = Serializer::with_formatter(&mut store, formatter); let mut serializer = serde_json::ser::Serializer::with_formatter(&mut store, formatter);
match value.serialize(&mut serializer) { match value.serialize(&mut serializer) {
Ok(_) => {}, Ok(_) => {},
Err(error) => return Error::err(format!("Failed to Serialize the schema: {error}")), Err(error) => return Error::err(format!("Failed to Serialize the schema: {error}")),
@ -204,7 +212,7 @@ async fn schema_generator() -> Result<(), Error> {
let mut schema_settings = SchemaSettings::default(); let mut schema_settings = SchemaSettings::default();
schema_settings.meta_schema = Some(DRAFT07.into()); schema_settings.meta_schema = Some(DRAFT07.into());
let mut generator = SchemaGenerator::new(schema_settings); let mut generator = SchemaGenerator::new(schema_settings);
let general_schema = serialize_with_formatter(&generator.root_schema_for::<General>(), formatter.clone())?; let general_schema = serialize_with_formatter(&generator.root_schema_for::<GeneralConfig>(), formatter.clone())?;
match FILE_MODE_WRITE.clone().create_new(false).open("schema-general.json").await { match FILE_MODE_WRITE.clone().create_new(false).open("schema-general.json").await {
Ok(mut file) => { Ok(mut file) => {
match file.write(general_schema.as_bytes()).await { match file.write(general_schema.as_bytes()).await {
@ -215,7 +223,7 @@ async fn schema_generator() -> Result<(), Error> {
Err(error) => return Err(Error::from_display(error)), Err(error) => return Err(Error::from_display(error)),
}; };
let site_schema = serialize_with_formatter(&generator.root_schema_for::<Site>(), formatter.clone())?; let site_schema = serialize_with_formatter(&generator.root_schema_for::<SiteConfig>(), formatter.clone())?;
match FILE_MODE_WRITE.clone().create_new(false).open("schema-site.json").await { match FILE_MODE_WRITE.clone().create_new(false).open("schema-site.json").await {
Ok(mut file) => { Ok(mut file) => {
match file.write(site_schema.as_bytes()).await { match file.write(site_schema.as_bytes()).await {
@ -229,7 +237,7 @@ async fn schema_generator() -> Result<(), Error> {
} }
fn main() { fn main() {
logging(); log_init();
let args = Arguments::parse(); let args = Arguments::parse();
if args.subcommands.is_none() && !check_permissions() { if args.subcommands.is_none() && !check_permissions() {
error!( error!(
@ -237,7 +245,7 @@ fn main() {
); );
exit(4) exit(4)
} }
let runtime = match Builder::new_current_thread().enable_all().build() { let runtime = match tokio::runtime::Builder::new_current_thread().enable_all().build() {
Ok(runtime) => runtime, Ok(runtime) => runtime,
Err(error) => { Err(error) => {
error!("Could not initialize Tokio runtime: {error}"); error!("Could not initialize Tokio runtime: {error}");

Datei anzeigen

@ -1,5 +1,4 @@
pub(crate) use crate::{ pub(crate) use crate::{
consts::*,
macros::*, macros::*,
types::traits::{ types::traits::{
FromFile as _, FromFile as _,

Datei anzeigen

@ -5,10 +5,7 @@ use std::{
}, },
fs::Permissions, fs::Permissions,
os::{ os::{
fd::{ fd::AsFd,
AsFd as _,
AsRawFd as _,
},
unix::fs::{ unix::fs::{
PermissionsExt as _, PermissionsExt as _,
fchown, fchown,
@ -19,17 +16,23 @@ use std::{
}; };
use crate::{ use crate::{
consts::{
ATTEMPTS,
FILE_MODE,
FILE_MODE_OVERWRITE,
MODE_PRIVATE,
MODE_PUBLIC,
MODE_SECRETS,
WAIT_TIME,
},
load_privkey,
prelude::*, prelude::*,
types::{ types::{
self, self,
config::CA, config::CA,
cryptography::{ cryptography::Algorithm,
Algorithm,
Strength,
},
dns::Manager, dns::Manager,
structs::{ structs::{
Certificate,
Error, Error,
ProcessorArgs, ProcessorArgs,
San, San,
@ -54,14 +57,9 @@ use acme2_eab::{
OrderBuilder, OrderBuilder,
OrderStatus, OrderStatus,
}; };
use libc::fchmod;
use log::*; use log::*;
use openssl::{ use openssl::{
hash::MessageDigest, hash::MessageDigest,
pkey::{
PKey,
Private,
},
stack::Stack, stack::Stack,
x509::{ x509::{
X509, X509,
@ -76,7 +74,6 @@ use openssl::{
}, },
}, },
}; };
use pem::parse as pem_parse;
use reqwest::Client; use reqwest::Client;
use tokio::{ use tokio::{
fs::{ fs::{
@ -84,15 +81,12 @@ use tokio::{
remove_file, remove_file,
}, },
io::{ io::{
AsyncReadExt as _, AsyncReadExt,
AsyncWriteExt as _, AsyncWriteExt,
}, },
join, join,
}; };
use zbus_systemd::{ use zbus_systemd::systemd1;
systemd1,
zbus::Connection,
};
fn gen_stack(args: &ProcessorArgs, context: X509v3Context) -> Stack<X509Extension> { fn gen_stack(args: &ProcessorArgs, context: X509v3Context) -> Stack<X509Extension> {
let mut stack = Stack::new().unwrap(); let mut stack = Stack::new().unwrap();
@ -115,7 +109,7 @@ fn gen_stack(args: &ProcessorArgs, context: X509v3Context) -> Stack<X509Extensio
} }
pub async fn accounts( pub async fn accounts(
name: &str, name: &String,
ca: &CA, ca: &CA,
directories: &mut HashMap<String, Arc<Directory>>, directories: &mut HashMap<String, Arc<Directory>>,
accounts: &mut HashMap<String, Arc<Account>>, accounts: &mut HashMap<String, Arc<Account>>,
@ -131,12 +125,7 @@ pub async fn accounts(
dir dir
}, },
Err(error) => { Err(error) => {
match error { error!("Failed to initialize directory for ca {name}: {error}");
acme2_eab::Error::Server(server_error) => error!("Failed to get the directory(Server Error): {server_error}"),
acme2_eab::Error::Transport(error) => error!("Failed to connect to the CA(Transport error): {error}"),
acme2_eab::Error::Other(error) => error!("unexpected error(other): {error}"),
x => error!("Unexpected Error: {x}"),
}
return; return;
}, },
} }
@ -152,14 +141,14 @@ pub async fn accounts(
debug!("No Email address given") debug!("No Email address given")
}, },
} }
let accountkey = accountpath.join("file.pem").with_file_name(name); let accountkey = accountpath.join("file.pem").with_file_name(name.clone());
let mut accountkeyfile = None; let mut accountkeyfile = None;
if accountkey.exists() { if accountkey.exists() {
if let Ok(key) = load_privkey(accountkey).await { if let Ok(key) = load_privkey(accountkey).await {
ac.private_key(key); ac.private_key(key);
} }
} else { } else {
info!("creating new key for the account {}", name.to_owned()); info!("creating new key for the account {}", name.clone());
accountkeyfile = match FILE_MODE_OVERWRITE.clone().mode(MODE_SECRETS).open(accountkey).await { accountkeyfile = match FILE_MODE_OVERWRITE.clone().mode(MODE_SECRETS).open(accountkey).await {
Ok(file) => Some(file), Ok(file) => Some(file),
Err(error) => { Err(error) => {
@ -204,7 +193,7 @@ pub async fn accounts(
} }
let account = match ac.build().await { let account = match ac.build().await {
Ok(account) => { Ok(account) => {
accounts.insert(name.to_owned(), Arc::clone(&account)); accounts.insert(name.clone(), Arc::clone(&account));
account account
}, },
Err(error) => { Err(error) => {
@ -220,120 +209,6 @@ pub async fn accounts(
} }
} }
async fn load_privkey(path: PathBuf) -> Result<PKey<Private>, Error> {
let mut file = match FILE_MODE.open(path).await {
Ok(file) => file,
Err(error) => return Error::err(format!("Failed to open Private Key: {error}")),
};
let mut data = String::with_capacity(file.metadata().await.map(|metadata| metadata.len()).unwrap_or_default() as usize);
if let Err(error) = file.read_to_string(&mut data).await {
return Error::err(format!("Failed to read data for the key: {error}"));
}
match PKey::private_key_from_pem(data.as_bytes()) {
Ok(key) => Ok(key),
Err(error) => Error::err(format!("Failed to parse pem data: {error}")),
}
}
async fn load_or_create_privkey(
path: PathBuf,
owner: Option<u32>,
group: Option<u32>,
mode: Option<u32>,
algorithm: Algorithm,
strength: Strength,
) -> types::Result<(PKey<Private>, bool)> {
let mut key_changed = false;
let privkey;
if path.exists() {
match load_privkey(path.clone()).await {
Ok(key) => {
if !key.matches(algorithm, strength) {
info!("Regenerating Key: Parameters changed");
privkey = gen_key(algorithm, strength)?;
key_changed = true;
} else {
privkey = key;
}
},
Err(error) => {
error!("Failed to load key: {error}");
key_changed = true;
privkey = gen_key(algorithm, strength)?;
},
}
} else {
key_changed = true;
privkey = gen_key(algorithm, strength)?;
}
if key_changed {
let mut keyfile = match FILE_MODE_OVERWRITE.clone().open(path.clone()).await {
Ok(file) => file,
Err(error) => return Error::err(format!("Failed to write key: {error}")),
};
if let Err(error) = fchown(keyfile.as_fd(), owner, group) {
error!("Failed to change owner of the private key: {error}")
}
if let Some(mode) = mode {
match unsafe { fchmod(keyfile.as_raw_fd(), mode) } {
0 => {},
libc::EROFS => error!("Not enough Permissions to change the mode of the private key"),
libc::EINVAL => error!("Invalid Mode for the private key"),
libc::EINTR => warn!("chmod was interrupted by an signal"),
err => error!("unkown return code from fchmod: {err}"),
}
}
let pkey = match privkey.private_key_to_pem_pkcs8() {
Ok(pkey) => pkey,
Err(error) => return Err(Error::from_display(error)),
};
if let Err(error) = keyfile.write_all(pkey.as_slice()).await {
return Err(Error::from_display(error));
}
}
Ok((privkey, key_changed))
}
async fn load_public_key(path: PathBuf) -> types::Result<Certificate> {
let mut file = match FILE_MODE.open(path).await {
Ok(file) => file,
Err(error) => return Err(Error::from_display(error)),
};
let mut data = String::with_capacity(file.metadata().await.map(|metadata| metadata.len()).unwrap_or_default() as usize);
if let Err(error) = file.read_to_string(&mut data).await {
return Error::err(format!("Failed to read the public key: {error}"));
}
let pem = match pem_parse(data.as_bytes()) {
Ok(pem) => pem,
Err(error) => return Error::err(format!("Failed to parse PEM file: {error}")),
};
let cert = match X509::from_der(pem.contents()) {
Ok(cert) => cert,
Err(error) => {
return Error::err(format!(
"Failed to parse the certificate:\n- {}",
error
.errors()
.iter()
.map(|err| err.to_string())
.reduce(|mut errorlist, item| {
errorlist.push_str("\n- ");
errorlist.push_str(item.as_str());
errorlist
})
.unwrap_or_default()
));
},
};
Ok(Certificate {
account_id: pem.headers().get("account_id").map(|d| d.to_string()),
cert,
})
}
pub async fn site(args: ProcessorArgs<'_>) { pub async fn site(args: ProcessorArgs<'_>) {
let mut cert_renew = false; let mut cert_renew = false;
@ -347,43 +222,62 @@ pub async fn site(args: ProcessorArgs<'_>) {
cert_renew = true; cert_renew = true;
} }
let (uid, gid) = get_uid_gid(args.owner(), args.group()); let (uid, gid) = get_uid_gid(args.owner(), args.group());
let private_key = match load_or_create_privkey(directory.join("privkey.pem"), uid, gid, Some(MODE_SECRETS), args.algorithm(), args.strength()).await { let mut private_key;
Ok((key, changes)) => { // Private key block
cert_renew |= changes; {
key let private_key_file = directory.join("privkey.pem");
}, let mut write_pkey = false;
Err(error) => { if !private_key_file.exists() {
error!("Failed to load or to create the key: {error}"); cert_renew = true;
return; write_pkey = true;
}, private_key = match_error!(gen_key(args.algorithm(), args.strength())=>Err(error)->"Aborting processing the site due to problem with the certificate generation: {error}");
}; } else if let Ok(key) = load_privkey(private_key_file.clone()).await {
let pubkey_filename = directory.join("pubkey.pem"); private_key = key;
let exists = pubkey_filename.exists(); if !private_key.matches(args.algorithm(), args.strength()) {
info!("Algorithm for the private key has changed, updating the key");
let publickey = load_public_key(pubkey_filename.clone()).await; cert_renew = true;
if exists { write_pkey = true;
if let Ok(pubkey) = publickey.clone() { private_key = match_error!(gen_key(args.algorithm(), args.strength())=>Err(error)->"Aborting processing the site due to problem with the certificate generation: {error}");
if let Some(account) = pubkey.account_id {
if account != args.account().id {
info!("Account changed");
cert_renew = true;
}
} }
if !pubkey.cert.days_left(args.refresh_time()) { } else {
error!("Failed to parse the private key. Renewing the private key.");
write_pkey = true;
cert_renew = true;
private_key = match_error!(gen_key(args.algorithm(), args.strength())=>Err(error)->"Aborting processing the site due to problem with the certificate generation: {error}");
}
if write_pkey {
let pkey = private_key.private_key_to_pem_pkcs8().unwrap();
let mut file = match_error!(FILE_MODE_OVERWRITE.clone().mode(MODE_PRIVATE).open(private_key_file.clone()).await=>Err(error)->"Failed to write new private key: {error}");
#[cfg(feature = "capabilities")]
if let Err(error) = fchown(file.as_fd(), uid, gid) {
error!("Failed to change owner of the new privatekey: {error}");
return;
}
match_error!(file.write_all(&pkey).await=>Err(error)->"Failed to write new private key: {error}");
}
}
let pubkey_filename = directory.join("pubkey.pem");
if pubkey_filename.exists() {
let mut file = match_error!(FILE_MODE.open(pubkey_filename.clone()).await=>Err(error)->"Failed to open publickey. Aborting processing: {error}");
let mut data = String::new();
if let Err(error) = file.read_to_string(&mut data).await {
cert_renew = true;
error!("Failed to read public key: {error}")
} else {
let pubkey = match X509::from_pem(data.as_bytes()) {
Ok(key) => key,
Err(_) => todo!(),
};
if !pubkey.days_left(args.refresh_time()) {
info!("Certificate is running out of time"); info!("Certificate is running out of time");
cert_renew = true cert_renew = true
} } else if !pubkey.match_san(args.san()) {
if !pubkey.cert.match_san(args.san()) { info!("Subject Alternative Names differ from Certifcate");
info!("Subject Alternative Names differ from Certificate");
cert_renew = true; cert_renew = true;
}; };
} }
} } else {
if !exists || publickey.is_err() {
cert_renew = true; cert_renew = true;
if let Err(error) = publickey {
error!("Failed to parse the public key: {error}")
}
} }
if !cert_renew { if !cert_renew {
info!("Site {} doesn't need an update for the certificate.", args.name()); info!("Site {} doesn't need an update for the certificate.", args.name());
@ -556,7 +450,7 @@ pub async fn auth(auth: Authorization, challenge_dir: Option<PathBuf>, manager:
} }
pub async fn services(restart_services: HashSet<String>, reload_services: HashSet<String>) { pub async fn services(restart_services: HashSet<String>, reload_services: HashSet<String>) {
let conn = match_error!(Connection::system().await=>Err(error)-> "Failed to connect with the systemd manager: {error}"); let conn = match_error!(zbus_systemd::zbus::Connection::system().await=>Err(error)-> "Failed to connect with the systemd manager: {error}");
let systemd_manager = systemd1::ManagerProxy::new(&conn).await.unwrap(); let systemd_manager = systemd1::ManagerProxy::new(&conn).await.unwrap();

Datei anzeigen

@ -1,56 +1,52 @@
use crate::{ use crate::{
prelude::*, prelude::*,
types::{ types::{
self,
VString, VString,
cryptography::{ cryptography::{
Algorithm, Algorithm,
Strength, Strength,
}, },
dns::Builder, dns::DnsBuilder,
structs::Error, structs::Error,
}, },
utils::schema::{
email_transform,
uri_transform,
},
}; };
use macro_rules_attribute::apply; use macro_rules_attribute::macro_rules_derive;
use openssl::pkey::{ use openssl::pkey::{
PKey, PKey,
Private, Private,
}; };
use schemars::JsonSchema;
use serde::Deserialize;
use std::{ use std::{
collections::HashMap, collections::HashMap,
net::IpAddr, net::IpAddr,
}; };
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct General { pub struct GeneralConfig {
#[serde(default = "General::default_accounts")] #[serde(default = "GeneralConfig::default_accounts")]
pub accounts_path: String, pub accounts_path: String,
#[serde(default = "General::default_sites")] #[serde(default = "GeneralConfig::default_sites")]
pub sites_path: String, pub sites_path: String,
#[serde(default = "General::default_challenge")] #[serde(default = "GeneralConfig::default_challenge")]
pub http_challenge_path: Option<String>, pub http_challenge_path: Option<String>,
/// This contains the domains(Keys) and the DNS-Servers(values) that are responsible for it. /// This contains the domains(Keys) and the DNS-Servers(values) that are responsible for it.
#[serde(default = "General::default_dns")] #[serde(default = "GeneralConfig::default_dns")]
pub dns: HashMap<String, Builder>, pub dns: HashMap<String, DnsBuilder>,
#[serde(default = "General::default_certificates")] #[serde(default = "GeneralConfig::default_certificates")]
pub certificates_path: String, pub certificates_path: String,
/// The Key of this table describe an nickname for an CA. /// The Key of this table describe an nickname for an CA.
/// Letsencrypt Prod and Staging are builtin configured, so they doesn't have to be configured. /// Letsencrypt Prod and Staging are builtin configured, so they doesn't have to be configured.
#[serde(default = "General::default_cas")] #[serde(default = "GeneralConfig::default_cas")]
pub ca: HashMap<String, CA>, pub ca: HashMap<String, CA>,
} }
impl General { impl GeneralConfig {
#[inline] #[inline]
pub(super) fn default_accounts() -> String { pub(super) fn default_accounts() -> String {
"accounts".into() "accounts".into()
@ -67,7 +63,7 @@ impl General {
} }
#[inline] #[inline]
pub(super) fn default_dns() -> HashMap<String, Builder> { pub(super) fn default_dns() -> HashMap<String, DnsBuilder> {
HashMap::new() HashMap::new()
} }
@ -83,8 +79,8 @@ impl General {
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct Eab { pub struct Eab {
#[serde(rename = "eab_token", alias = "id")] #[serde(rename = "eab_token", alias = "id")]
@ -94,22 +90,22 @@ pub struct Eab {
} }
impl Eab { impl Eab {
pub fn key(&self) -> types::Result<PKey<Private>> { pub fn key(&self) -> super::Result<PKey<Private>> {
let decoded = &match_error!(data_encoding::BASE64URL_NOPAD.decode(self.key.as_bytes())=>Err(error)-> "Failed to decode the HMAC key for the eab_key: {error}", Error::err("Failed to decode eab_key")); let decoded = &match_error!(data_encoding::BASE64URL_NOPAD.decode(self.key.as_bytes())=>Err(error)-> "Failed to decode the HMAC key for the eab_key: {error}", Error::err("Failed to decode eab_key"));
PKey::hmac(decoded).map_err(|error| Error::new(format!("Failed to parse the private key: {error}"))) PKey::hmac(decoded).map_err(|error| Error::new(format!("Failed to parse the private key: {error}")))
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct CA { pub struct CA {
/// Url for the directory /// Url for the directory
#[schemars(transform=uri_transform)] #[schemars(transform=crate::utils::schema::uri_transform)]
pub directory: String, pub directory: String,
/// Email addresses for the CA to contact the user /// Email addresses for the CA to contact the user
#[schemars(transform=email_transform)] #[schemars(transform=crate::utils::schema::email_transform)]
pub email_addresses: Option<VString>, pub email_addresses: Option<VString>,
#[serde(flatten, default)] #[serde(flatten, default)]
@ -131,11 +127,10 @@ impl CA {
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, Default, JsonSchema)]
#[derive(Default)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct Site { pub struct SiteConfig {
/// The Configured Certificate Authority /// The Configured Certificate Authority
pub ca: String, pub ca: String,
@ -148,7 +143,7 @@ pub struct Site {
/// EmailAdresses that this Certificate is valid for /// EmailAdresses that this Certificate is valid for
#[serde(default)] #[serde(default)]
#[schemars(transform=email_transform)] #[schemars(transform=crate::utils::schema::email_transform)]
pub emails: VString, pub emails: VString,
/// The systemd services are reloaded /// The systemd services are reloaded

Datei anzeigen

@ -1,10 +1,14 @@
use crate::prelude::*; use crate::{
use macro_rules_attribute::apply; consts::RsaStrength,
prelude::*,
};
use macro_rules_attribute::macro_rules_derive;
use schemars::JsonSchema;
use serde::Deserialize;
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Copy, Deserialize, Default, JsonSchema)]
#[derive(Copy, Default)]
pub enum Algorithm { pub enum Algorithm {
Rsa, Rsa,
Brainpool, Brainpool,
@ -14,9 +18,8 @@ pub enum Algorithm {
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Copy, Deserialize, Default, JsonSchema)]
#[derive(Copy, Default)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub enum Strength { pub enum Strength {
Weak, Weak,

Datei anzeigen

@ -1,20 +1,18 @@
use macro_rules_attribute::apply; use macro_rules_attribute::macro_rules_derive;
use schemars::JsonSchema;
use serde::Deserialize;
use crate::{ use crate::{
macros::{ macros::DefDer,
ConfigFile,
DefDer,
},
types::{ types::{
self,
dns::Dns, dns::Dns,
structs::DnsToken, structs::DnsToken,
traits::DnsHandler, traits::DnsHandler,
}, },
}; };
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct DNSUpdateClientOptions {} pub struct DNSUpdateClientOptions {}
@ -24,16 +22,16 @@ impl DNSUpdateClientOptions {
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
pub struct DnsUpdateHandler {} pub struct DnsUpdateHandler {}
impl DnsHandler for DnsUpdateHandler { impl DnsHandler for DnsUpdateHandler {
async fn set_record(&self, _domain: String, _content: String) -> types::Result<DnsToken> { async fn set_record(&self, _domain: String, _content: String) -> crate::types::Result<DnsToken> {
Ok(DnsToken::new_dns_update()) Ok(DnsToken::new_dns_update())
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
pub struct DnsUpdateToken {} pub struct DnsUpdateToken {}
impl DnsUpdateToken { impl DnsUpdateToken {

Datei anzeigen

@ -2,12 +2,8 @@ pub(super) mod dnsupdate;
pub(super) mod pdns; pub(super) mod pdns;
use crate::{ use crate::{
macros::{ macros::DefDer,
ConfigFile,
DefDer,
},
types::{ types::{
self,
dns::{ dns::{
dnsupdate::{ dnsupdate::{
DNSUpdateClientOptions, DNSUpdateClientOptions,
@ -22,29 +18,27 @@ use crate::{
DnsToken, DnsToken,
Error, Error,
}, },
traits::DnsHandler as _, traits::DnsHandler,
}, },
}; };
use log::*; use log::*;
use macro_rules_attribute::apply; use macro_rules_attribute::macro_rules_derive;
use reqwest::Client; use schemars::JsonSchema;
use serde::Deserialize;
use std::{ use std::{
collections::HashMap, collections::HashMap,
sync::Arc, sync::Arc,
}; };
use tokio::sync::Mutex; use tokio::sync::Mutex;
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
pub struct Manager(Arc<Mutex<InnerManager>>, Client); pub struct Manager(Arc<Mutex<InnerManager>>);
impl Manager { impl Manager {
pub fn new(client: Client) -> Self { pub fn new() -> Self {
Self( Self(Arc::new(Mutex::new(InnerManager {
Arc::new(Mutex::new(InnerManager { servers: HashMap::new(),
servers: HashMap::new(), })))
})),
client,
)
} }
pub async fn set_record(&self, domain: String, value: String) -> Option<DnsToken> { pub async fn set_record(&self, domain: String, value: String) -> Option<DnsToken> {
@ -77,7 +71,7 @@ impl Manager {
} }
} }
pub async fn add_builder(&mut self, zone: String, builder: Builder) { pub async fn add_builder(&mut self, zone: String, builder: DnsBuilder) {
let mut fixed_zone = zone.clone(); let mut fixed_zone = zone.clone();
if !fixed_zone.ends_with('.') { if !fixed_zone.ends_with('.') {
fixed_zone.push('.'); fixed_zone.push('.');
@ -85,9 +79,9 @@ impl Manager {
self.0.lock().await.servers.insert( self.0.lock().await.servers.insert(
fixed_zone, fixed_zone,
match builder { match builder {
Builder::PowerDNS(pdns_client_options) => pdns_client_options.build(zone, self.1.clone()), DnsBuilder::PowerDNS(pdns_client_options) => pdns_client_options.build(zone),
Builder::DNSUpdate(dnsupdate_client_options) => dnsupdate_client_options.build(zone), DnsBuilder::DNSUpdate(dnsupdate_client_options) => dnsupdate_client_options.build(zone),
Builder::None => Dns::None, DnsBuilder::None => Dns::None,
}, },
); );
} }
@ -101,19 +95,18 @@ struct InnerManager {
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, Default, JsonSchema)]
#[derive(Default)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
#[serde(tag = "type", rename_all = "lowercase")] #[serde(tag = "type", rename_all = "lowercase")]
pub enum Builder { pub enum DnsBuilder {
PowerDNS(PdnsClientOptions), PowerDNS(PdnsClientOptions),
DNSUpdate(DNSUpdateClientOptions), DNSUpdate(DNSUpdateClientOptions),
#[default] #[default]
None, None,
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
pub enum Dns { pub enum Dns {
PowerDNS(PdnsHandler), PowerDNS(PdnsHandler),
DNSUpdate(DnsUpdateHandler), DNSUpdate(DnsUpdateHandler),
@ -121,7 +114,7 @@ pub enum Dns {
} }
impl Dns { impl Dns {
pub async fn set_record(&self, domain: String, content: String) -> types::Result<DnsToken> { pub async fn set_record(&self, domain: String, content: String) -> crate::types::Result<DnsToken> {
match self { match self {
Dns::PowerDNS(pdns_handler) => pdns_handler.set_record(domain, content).await, Dns::PowerDNS(pdns_handler) => pdns_handler.set_record(domain, content).await,
Dns::DNSUpdate(dns_update_handler) => dns_update_handler.set_record(domain, content).await, Dns::DNSUpdate(dns_update_handler) => dns_update_handler.set_record(domain, content).await,

Datei anzeigen

@ -5,25 +5,22 @@ use std::time::{
use derive_new::new; use derive_new::new;
use log::*; use log::*;
use macro_rules_attribute::apply; use macro_rules_attribute::macro_rules_derive;
use reqwest::{ use reqwest::{
Client,
RequestBuilder, RequestBuilder,
StatusCode, StatusCode,
Url, Url,
}; };
use schemars::JsonSchema;
use serde::{ use serde::{
Deserialize, Deserialize,
Serialize, Serialize,
}; };
use crate::{ use crate::{
macros::{ default_client,
ConfigFile, macros::DefDer,
DefDer,
},
types::{ types::{
self,
dns::Dns, dns::Dns,
structs::{ structs::{
DnsToken, DnsToken,
@ -33,8 +30,8 @@ use crate::{
}, },
}; };
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[apply(ConfigFile)] #[derive(Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct PdnsClientOptions { pub struct PdnsClientOptions {
api_key: String, api_key: String,
@ -51,9 +48,9 @@ impl PdnsClientOptions {
} }
impl PdnsClientOptions { impl PdnsClientOptions {
pub fn build(self, zone: String, client: Client) -> Dns { pub fn build(self, zone: String) -> Dns {
Dns::PowerDNS(PdnsHandler { Dns::PowerDNS(PdnsHandler {
client, client: default_client().unwrap(),
server: self.server, server: self.server,
api_key: self.api_key, api_key: self.api_key,
server_id: self.server_id, server_id: self.server_id,
@ -62,9 +59,9 @@ impl PdnsClientOptions {
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
pub struct PdnsHandler { pub struct PdnsHandler {
client: Client, client: reqwest::Client,
server: String, server: String,
api_key: String, api_key: String,
server_id: String, server_id: String,
@ -86,7 +83,7 @@ fn fix_url(baseurl: Url, path: String) -> Result<Url, ()> {
impl DnsHandler for PdnsHandler { impl DnsHandler for PdnsHandler {
async fn set_record(&self, mut domain: String, content: String) -> types::Result<DnsToken> { async fn set_record(&self, mut domain: String, content: String) -> crate::types::Result<DnsToken> {
trace!("Original URL: {}", self.server); trace!("Original URL: {}", self.server);
let baseurl = match reqwest::Url::parse(&self.server) { let baseurl = match reqwest::Url::parse(&self.server) {
Ok(url) => { Ok(url) => {
@ -181,7 +178,7 @@ impl PdnsToken {
unsafe impl Send for PdnsToken {} unsafe impl Send for PdnsToken {}
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(deny_unknown_fields)] #[serde(deny_unknown_fields)]
pub struct PdnsError { pub struct PdnsError {
@ -190,7 +187,7 @@ pub struct PdnsError {
pub errors: Vec<String>, pub errors: Vec<String>,
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Serialize, new)] #[derive(Serialize, new)]
struct Comment { struct Comment {
#[new(into)] #[new(into)]
@ -209,7 +206,7 @@ impl Comment {
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Serialize, new)] #[derive(Serialize, new)]
struct Record { struct Record {
content: String, content: String,
@ -218,7 +215,7 @@ struct Record {
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Serialize)] #[derive(Serialize)]
#[serde(tag = "changetype", rename_all = "UPPERCASE")] #[serde(tag = "changetype", rename_all = "UPPERCASE")]
enum ChangeType { enum ChangeType {
@ -230,7 +227,7 @@ enum ChangeType {
Delete, Delete,
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Serialize, new)] #[derive(Serialize, new)]
struct RRSet { struct RRSet {
name: String, name: String,
@ -241,7 +238,7 @@ struct RRSet {
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Serialize, new)] #[derive(Serialize, new)]
struct RecordUpdate { struct RecordUpdate {
rrsets: Vec<RRSet>, rrsets: Vec<RRSet>,

Datei anzeigen

@ -1,9 +1,5 @@
use std::{ use std::{
fmt::{ fmt::Display,
Display,
Formatter,
Result as fmtResult,
},
net::IpAddr, net::IpAddr,
}; };
@ -11,7 +7,7 @@ use acme2_eab::Identifier;
use openssl::x509::GeneralName; use openssl::x509::GeneralName;
use crate::types::{ use crate::types::{
config::General, config::GeneralConfig,
dns::pdns::PdnsError, dns::pdns::PdnsError,
structs::{ structs::{
Error, Error,
@ -20,7 +16,7 @@ use crate::types::{
}; };
impl Default for General { impl Default for GeneralConfig {
#[inline] #[inline]
fn default() -> Self { fn default() -> Self {
Self { Self {
@ -82,14 +78,14 @@ impl From<San> for Identifier {
} }
impl Display for Error { impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> fmtResult { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.message) f.write_str(&self.message)
} }
} }
impl Display for PdnsError { impl Display for PdnsError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmtResult { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.error)?; f.write_str(&self.error)?;
if !self.errors.is_empty() { if !self.errors.is_empty() {
f.write_str("(")?; f.write_str("(")?;

Datei anzeigen

@ -1,9 +1,7 @@
use std::{ use std::collections::HashSet;
collections::HashSet,
result::Result as stdResult,
};
use tokio::sync::Mutex; use tokio::sync::Mutex;
pub mod config; pub mod config;
pub mod cryptography; pub mod cryptography;
pub mod dns; pub mod dns;
@ -11,11 +9,10 @@ mod foreign_impl;
pub mod structs; pub mod structs;
pub mod traits; pub mod traits;
/// Alias for Vec\<String\> /// Alias for Vec\<String\>
pub type VString = Vec<String>; pub type VString = Vec<String>;
/// Alias for an Safe Hashset /// Alias for an Safe Hashset
pub type SafeSet<T> = Mutex<HashSet<T>>; pub type SafeSet<T> = Mutex<HashSet<T>>;
pub type Result<T> = stdResult<T, structs::Error>; pub type Result<T> = std::result::Result<T, structs::Error>;

Datei anzeigen

@ -15,8 +15,7 @@ use clap::{
Subcommand, Subcommand,
}; };
use derive_new::new; use derive_new::new;
use macro_rules_attribute::apply; use macro_rules_attribute::macro_rules_derive;
use openssl::x509::X509;
use reqwest::{ use reqwest::{
Client, Client,
RequestBuilder, RequestBuilder,
@ -27,7 +26,7 @@ use crate::{
prelude::*, prelude::*,
types::{ types::{
SafeSet, SafeSet,
config::Site, config::SiteConfig,
cryptography::{ cryptography::{
Algorithm, Algorithm,
Strength, Strength,
@ -41,7 +40,7 @@ use crate::{
}; };
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Parser)] #[derive(Parser)]
pub struct Arguments { pub struct Arguments {
pub config: String, pub config: String,
@ -49,17 +48,17 @@ pub struct Arguments {
pub subcommands: Option<SubCommand>, pub subcommands: Option<SubCommand>,
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(Subcommand)] #[derive(Subcommand)]
pub enum SubCommand { pub enum SubCommand {
Schema, Schema,
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
#[derive(new)] #[derive(new)]
pub struct ProcessorArgs<'a> { pub struct ProcessorArgs<'a> {
site: Site, site: SiteConfig,
account: Arc<Account>, account: Arc<Account>,
reload_services: &'a SafeSet<String>, reload_services: &'a SafeSet<String>,
restart_services: &'a SafeSet<String>, restart_services: &'a SafeSet<String>,
@ -129,8 +128,7 @@ impl<'a: 'b, 'b> ProcessorArgs<'a> {
} }
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer, Hashable)]
#[apply(Hashable)]
#[derive(PartialEq)] #[derive(PartialEq)]
pub enum San { pub enum San {
Dns(String), Dns(String),
@ -138,7 +136,7 @@ pub enum San {
IPAddress(IpAddr), IPAddress(IpAddr),
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
#[derive(derive_new::new)] #[derive(derive_new::new)]
pub struct Error { pub struct Error {
pub(super) message: String, pub(super) message: String,
@ -164,7 +162,7 @@ impl Error {
} }
#[apply(DefDer)] #[macro_rules_derive(DefDer)]
pub enum DnsToken { pub enum DnsToken {
None, None,
Pdns(Box<PdnsToken>), Pdns(Box<PdnsToken>),
@ -192,9 +190,3 @@ impl DnsToken {
} }
} }
} }
#[apply(DefDer)]
pub struct Certificate {
pub cert: X509,
pub account_id: Option<String>,
}

Datei anzeigen

@ -2,9 +2,16 @@ use std::collections::HashSet;
use crate::{ use crate::{
prelude::*, consts::{
BRAINPOOL_MIDDLE,
BRAINPOOL_STRONG,
BRAINPOOL_WEAK,
SECP_MIDDLE,
SECP_STRONG,
SECP_WEAK,
},
types,
types::{ types::{
self,
cryptography::{ cryptography::{
Algorithm, Algorithm,
Strength, Strength,
@ -26,10 +33,9 @@ use openssl::{
x509::X509, x509::X509,
}; };
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use std::fmt::Debug as fmtDebug;
use tokio::{ use tokio::{
fs::File, fs::File,
io::AsyncReadExt as _, io::AsyncReadExt,
}; };
pub trait FromFile: Default + DeserializeOwned { pub trait FromFile: Default + DeserializeOwned {
@ -116,6 +122,6 @@ impl MatchX509 for X509 {
} }
} }
pub trait DnsHandler: fmtDebug + Send { pub trait DnsHandler: std::fmt::Debug + Send {
async fn set_record(&self, domain: String, content: String) -> types::Result<DnsToken>; async fn set_record(&self, domain: String, content: String) -> types::Result<DnsToken>;
} }

Datei anzeigen

@ -1,13 +1,14 @@
use std::{ use std::ffi::CString;
env::{
self,
VarError,
},
ffi::CString,
};
use crate::{ use crate::{
prelude::*, consts::{
BRAINPOOL_MIDDLE,
BRAINPOOL_STRONG,
BRAINPOOL_WEAK,
SECP_MIDDLE,
SECP_STRONG,
SECP_WEAK,
},
types::{ types::{
cryptography::{ cryptography::{
Algorithm, Algorithm,
@ -37,7 +38,6 @@ use openssl::{
X509NameBuilder, X509NameBuilder,
}, },
}; };
use systemd_journal_logger::JournalLog;
#[cfg(feature = "capabilities")] #[cfg(feature = "capabilities")]
const CAPABILITY_SET: CapSet = CapSet::Permitted; const CAPABILITY_SET: CapSet = CapSet::Permitted;
@ -249,41 +249,3 @@ pub mod schema {
type_transform(schema, "uri"); type_transform(schema, "uri");
} }
} }
pub(crate) fn logging() {
let level = match env::var("RUST_LOG") {
Ok(levelname) => levelname.to_uppercase(),
Err(error) => {
match error {
VarError::NotPresent => {},
VarError::NotUnicode(text) => println!("Invalid Log Level {}", text.display()),
};
"INFO".to_string()
},
};
let journal = JournalLog::new().map(|logger| logger.with_extra_fields(vec![("PKG_VERSION", env!("CARGO_PKG_VERSION"))]));
match journal {
Ok(logger) => {
match logger.install() {
Ok(()) => {
log::set_max_level(match level.as_str() {
"OFF" => LevelFilter::Off,
"TRACE" => LevelFilter::Trace,
"DEBUG" => LevelFilter::Debug,
"WARN" => LevelFilter::Warn,
"ERROR" => LevelFilter::Error,
_ => LevelFilter::Info,
});
},
Err(error) => {
env_logger::init();
warn!("Failed to initialize the Journal Logger: {error}")
},
}
},
Err(error) => {
env_logger::init();
warn!("Failed to initialize the Journal Logger: {error}")
},
}
}