diff --git a/Cargo.lock b/Cargo.lock index 568fdc95ede..c6ba6660fe1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1831,6 +1831,7 @@ version = "2.0.0-pre-rc.5" dependencies = [ "crossbeam", "derive_more", + "eyre", "iroha_config_derive", "serde", "serde_json", diff --git a/cli/src/lib.rs b/cli/src/lib.rs index ffe28501889..0746ddc25b3 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -8,7 +8,8 @@ use std::{panic, path::PathBuf, sync::Arc}; use color_eyre::eyre::{eyre, Result, WrapErr}; use iroha_actor::{broker::*, prelude::*}; -use iroha_config::iroha::Configuration; +use iroha_config::base::proxy::DocsDefault; +use iroha_config::iroha::{Configuration, ConfigurationProxy}; use iroha_core::{ block_sync::{BlockSynchronizer, BlockSynchronizerTrait}, genesis::{GenesisNetwork, GenesisNetworkTrait, RawGenesisBlock}, diff --git a/cli/src/samples.rs b/cli/src/samples.rs index 64d4408ee81..b310e7aefa7 100644 --- a/cli/src/samples.rs +++ b/cli/src/samples.rs @@ -3,6 +3,7 @@ use std::{collections::HashSet, str::FromStr}; use iroha_config::{ + base::proxy::DocsDefault, block_sync::Configuration as BlockSyncConfiguration, genesis::Configuration as GenesisConfiguration, iroha::Configuration, @@ -13,6 +14,7 @@ use iroha_config::{ wasm::Configuration as WasmConfiguration, wsv::Configuration as WsvConfiguration, }; + use iroha_crypto::{KeyPair, PublicKey}; use iroha_data_model::peer::Id as PeerId; diff --git a/config/base/Cargo.toml b/config/base/Cargo.toml index bdeee1bcc23..d45ed950bf5 100644 --- a/config/base/Cargo.toml +++ b/config/base/Cargo.toml @@ -12,3 +12,4 @@ serde_json = "1.0" thiserror = "1.0.30" crossbeam = "0.8.1" derive_more = "0.99.17" +eyre = "0.6.8" diff --git a/config/base/derive/src/config.rs b/config/base/derive/src/config.rs new file mode 100644 index 00000000000..1228ae560a6 --- /dev/null +++ b/config/base/derive/src/config.rs @@ -0,0 +1,477 @@ +use proc_macro::TokenStream; +use proc_macro2::Span; +use proc_macro_error::abort; +use quote::quote; +use syn::{ + parse::{Parse, ParseStream}, + Data, DataStruct, DeriveInput, Fields, Ident, Lit, LitStr, Meta, Token, Type, TypePath, +}; + +use super::utils; + +pub(crate) struct EnvPrefix { + _ident: Ident, + _eq: Token![=], + pub(crate) prefix: LitStr, +} + +mod attrs { + pub const ENV_PREFIX: &str = "env_prefix"; + pub const SERDE_AS_STR: &str = "serde_as_str"; + pub const INNER: &str = "inner"; +} + +impl Parse for EnvPrefix { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + _ident: utils::parse_const_ident(input, attrs::ENV_PREFIX)?, + _eq: input.parse()?, + prefix: input.parse()?, + }) + } +} + +pub(crate) struct Inner { + _ident: Ident, +} + +impl Parse for Inner { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + _ident: utils::parse_const_ident(input, attrs::INNER)?, + }) + } +} + +pub(crate) struct SerdeAsStr { + _ident: Ident, +} + +impl Parse for SerdeAsStr { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + _ident: utils::parse_const_ident(input, attrs::SERDE_AS_STR)?, + }) + } +} + +pub(crate) fn impl_load_env( + field_idents: &[&Ident], + inner: &[bool], + option: &[bool], + lvalue: &[proc_macro2::TokenStream], + as_str: &[bool], + field_ty: &[Type], + field_environment: &[String], +) -> proc_macro2::TokenStream { + let set_field = field_ty + .iter() + .zip(field_idents.iter()) + .zip(as_str.iter()) + .zip(lvalue.iter()) + .zip(option.iter()) + .map(|((((ty, ident), &as_str_attr), l_value), &is_option)| { + let is_string = if let Type::Path(TypePath { path, .. }) = ty { + path.is_ident("String") + } else { + false + }; + // println!("Type in ") + // TODO: no need for separate fn + let set_field = if is_string { + quote! { #l_value = var } + } else if as_str_attr { + quote! { + #l_value = serde_json::from_value(var.into()) + .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))? + } + } else if is_option { + quote! { + #l_value = Some(serde_json::from_value(var.into()) + .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))?) + } + } + else { + quote! { + #l_value = serde_json::from_str(&var) + .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))? + } + }; + (set_field, l_value) + }) + .zip(field_environment.iter()) + .zip(inner.iter()) + .zip(option.iter()) + .map(|((((set_field, l_value), field_env), &inner_thing), &is_option)| { + let inner_thing2 = if inner_thing && is_option { + quote! { + // <#inner>::load_environment()?; + #l_value.unwrap().load_environment()?; + // if let Ok(config) = <#inner>::load_environment() { + // #l_value = Some(config) + // }; + } + } else if inner_thing { + quote! { + #l_value.load_environment()?; + } + } + else { + quote! {} + }; + quote! { + if let Ok(var) = std::env::var(#field_env) { + #set_field; + } + #inner_thing2 + } + }); + + quote! { + fn load_environment( + &'_ mut self + ) -> core::result::Result<(), iroha_config_base::derive::Error> { + #(#set_field)* + Ok(()) + } + } +} + +fn impl_get_doc_recursive( + field_ty: &[Type], + field_idents: &[&Ident], + inner: Vec, + docs: Vec, +) -> proc_macro2::TokenStream { + if field_idents.is_empty() { + return quote! { + fn get_doc_recursive<'a>( + inner_field: impl AsRef<[&'a str]>, + ) -> core::result::Result, iroha_config_base::derive::Error> + { + Err(iroha_config_base::derive::Error::UnknownField( + inner_field.as_ref().iter().map(ToString::to_string).collect() + )) + } + }; + } + let variants = field_idents + .iter() + .zip(inner) + .zip(docs) + .zip(field_ty) + .map(|(((ident, inner_thing), documentation), ty)| { + if inner_thing { + quote! { + [stringify!(#ident)] => { + let curr_doc = #documentation; + let inner_docs = <#ty as iroha_config_base::Configurable>::get_inner_docs(); + let total_docs = format!("{}\n\nHas following fields:\n\n{}\n", curr_doc, inner_docs); + Some(total_docs) + }, + [stringify!(#ident), rest @ ..] => <#ty as iroha_config_base::Configurable>::get_doc_recursive(rest)?, + } + } else { + quote! { [stringify!(#ident)] => Some(#documentation.to_owned()), } + } + }) + // XXX: Workaround + //Decription of issue is here https://stackoverflow.com/a/65353489 + .fold(quote! {}, |acc, new| quote! { #acc #new }); + + quote! { + fn get_doc_recursive<'a>( + inner_field: impl AsRef<[&'a str]>, + ) -> core::result::Result, iroha_config_base::derive::Error> + { + let inner_field = inner_field.as_ref(); + let doc = match inner_field { + #variants + field => return Err(iroha_config_base::derive::Error::UnknownField( + field.iter().map(ToString::to_string).collect() + )), + }; + Ok(doc) + } + } +} + +fn impl_get_inner_docs( + field_ty: &[Type], + field_idents: &[&Ident], + inner: Vec, + docs: Vec, +) -> proc_macro2::TokenStream { + let inserts = field_idents + .iter() + .zip(inner) + .zip(docs) + .zip(field_ty) + .map(|(((ident, inner_thing), documentation), ty)| { + let doc = if inner_thing { + quote!{ <#ty as iroha_config_base::Configurable>::get_inner_docs().as_str() } + } else { + quote!{ #documentation.into() } + }; + + quote! { + inner_docs.push_str(stringify!(#ident)); + inner_docs.push_str(": "); + inner_docs.push_str(#doc); + inner_docs.push_str("\n\n"); + } + }) + // XXX: Workaround + //Description of issue is here https://stackoverflow.com/a/65353489 + .fold(quote! {}, |acc, new| quote! { #acc #new }); + + quote! { + fn get_inner_docs() -> String { + let mut inner_docs = String::new(); + #inserts + inner_docs + } + } +} + +fn impl_get_docs( + field_ty: &[Type], + field_idents: &[&Ident], + inner: Vec, + docs: Vec, +) -> proc_macro2::TokenStream { + let inserts = field_idents + .iter() + .zip(inner) + .zip(docs) + .zip(field_ty) + .map(|(((ident, inner_thing), documentation), ty)| { + let doc = if inner_thing { + quote!{ <#ty as iroha_config_base::Configurable>::get_docs().into() } + } else { + quote!{ #documentation.into() } + }; + + quote! { map.insert(stringify!(#ident).to_owned(), #doc); } + }) + // XXX: Workaround + //Decription of issue is here https://stackoverflow.com/a/65353489 + .fold(quote! {}, |acc, new| quote! { #acc #new }); + + quote! { + fn get_docs() -> serde_json::Value { + let mut map = serde_json::Map::new(); + #inserts + map.into() + } + } +} + +fn impl_get_recursive( + field_idents: &[&Ident], + inner: &[bool], + option: &[bool], + lvalue: &[proc_macro2::TokenStream], +) -> proc_macro2::TokenStream { + if field_idents.is_empty() { + return quote! { + fn get_recursive<'a, T>( + &self, + inner_field: T, + ) -> iroha_config_base::BoxedFuture<'a, core::result::Result> + where + T: AsRef<[&'a str]> + Send + 'a, + { + Err(iroha_config_base::derive::Error::UnknownField( + inner_field.as_ref().iter().map(ToString::to_string).collect() + )) + } + }; + } + let variants = field_idents + .iter() + .zip(inner) + .zip(option) + .zip(lvalue.iter()) + .map(|(((ident, &inner_thing), &is_option), l_value)| { + let inner_thing2 = if inner_thing { + quote! { + [stringify!(#ident), rest @ ..] => { + #l_value.get_recursive(rest)? + }, + } + } else { + quote! {} + }; + quote! { + [stringify!(#ident)] => { + serde_json::to_value(&#l_value) + .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))? + } + #inner_thing2 + } + }) + // XXX: Workaround + //Decription of issue is here https://stackoverflow.com/a/65353489 + .fold(quote! {}, |acc, new| quote! { #acc #new }); + + quote! { + fn get_recursive<'a, T>( + &self, + inner_field: T, + ) -> core::result::Result + where + T: AsRef<[&'a str]> + Send + 'a, + { + let inner_field = inner_field.as_ref(); + let value = match inner_field { + #variants + field => return Err(iroha_config_base::derive::Error::UnknownField( + field.iter().map(ToString::to_string).collect() + )), + }; + Ok(value) + } + } +} + +#[allow(clippy::too_many_lines, clippy::str_to_string)] +pub(super) fn impl_configurable(ast: &DeriveInput) -> TokenStream { + let name = &ast.ident; + let prefix = ast + .attrs + .iter() + .find_map(|attr| attr.parse_args::().ok()) + .map(|pref| pref.prefix.value()) + .unwrap_or_default(); + + let fields = if let Data::Struct(DataStruct { + fields: Fields::Named(fields), + .. + }) = &ast.data + { + &fields.named + } else { + abort!(ast, "Only structs are supported") + }; + let field_idents = fields + .iter() + .map(|field| { + #[allow(clippy::expect_used)] + field + .ident + .as_ref() + .expect("Should always be set for named structures") + }) + .collect::>(); + let field_attrs = fields.iter().map(|field| &field.attrs).collect::>(); + let field_ty = fields + .iter() + .map(|field| field.ty.clone()) + .collect::>(); + + let inner = field_attrs + .iter() + .map(|attrs| attrs.iter().any(|attr| attr.parse_args::().is_ok())) + .collect::>(); + + let option = field_ty + .iter() + .map(utils::is_option_type) + .collect::>(); + + let as_str = field_attrs + .iter() + .map(|attrs| { + attrs + .iter() + .any(|attr| attr.parse_args::().is_ok()) + }) + .collect::>(); + + let field_environment = field_idents + .iter() + .map(|ident| prefix.clone() + &ident.to_string().to_uppercase()) + .collect::>(); + let docs = field_attrs + .iter() + .zip(field_environment.iter()) + .zip(field_ty.iter()) + .map(|((attrs, env), field_type)| { + let real_doc = attrs + .iter() + .filter_map(|attr| attr.parse_meta().ok()) + .find_map(|metadata| { + if let Meta::NameValue(meta) = metadata { + if meta.path.is_ident("doc") { + if let Lit::Str(s) = meta.lit { + return Some(s); + } + } + } + None + }); + let real_doc = real_doc.map(|doc| doc.value() + "\n\n").unwrap_or_default(); + let docs = format!( + "{}Has type `{}`. Can be configured via environment variable `{}`", + real_doc, + quote! { #field_type }.to_string().replace(' ', ""), + env + ); + LitStr::new(&docs, Span::mixed_site()) + }) + .collect::>(); + let lvalue = field_ty + .iter() + .map(utils::is_arc_rwlock) + .zip(field_ty.iter().map(utils::is_option_type)) + .zip(field_idents.iter()); + let lvalue_read = lvalue + .clone() + .map(|((is_arc_rwlock, _), ident)| { + if is_arc_rwlock { + quote! { self.#ident.read().await } + } else { + quote! { self.#ident } + } + }) + .collect::>(); + let lvalue_write = lvalue + .clone() + .map(|((is_arc_rwlock, is_option), ident)| { + if is_arc_rwlock { + quote! { self.#ident.write().await } + } else { + quote! { self.#ident } + } + }) + .collect::>(); + + let load_environment = impl_load_env( + &field_idents, + &inner, + &option, + &lvalue_write, + &as_str, + &field_ty, + &field_environment, + ); + let get_recursive = impl_get_recursive(&field_idents, &inner, &option, &lvalue_read); + let get_doc_recursive = + impl_get_doc_recursive(&field_ty, &field_idents, inner.clone(), docs.clone()); + let get_inner_docs = impl_get_inner_docs(&field_ty, &field_idents, inner.clone(), docs.clone()); + let get_docs = impl_get_docs(&field_ty, &field_idents, inner, docs); + + let out = quote! { + impl iroha_config_base::Configurable for #name { + type Error = iroha_config_base::derive::Error; + + #get_recursive + #get_doc_recursive + #get_docs + #get_inner_docs + #load_environment + } + }; + out.into() +} diff --git a/config/base/derive/src/lib.rs b/config/base/derive/src/lib.rs index 0d8dd02c7e8..401c97ac380 100644 --- a/config/base/derive/src/lib.rs +++ b/config/base/derive/src/lib.rs @@ -1,102 +1,23 @@ //! Contains the `#[derive(Configurable)]` macro definition. use proc_macro::TokenStream; -use proc_macro2::Span; -use proc_macro_error::{abort, abort_call_site}; -use quote::{format_ident, quote, ToTokens}; -use syn::{ - parse::{Parse, ParseStream}, - Attribute, Data, DataStruct, DeriveInput, Field, Fields, GenericArgument, Ident, Lit, LitStr, - Meta, NestedMeta, PathArguments, Token, Type, TypePath, -}; +use proc_macro_error::abort_call_site; +use quote::quote; -struct EnvPrefix { - _ident: Ident, - _eq: Token![=], - prefix: LitStr, -} - -mod attrs { - pub const ENV_PREFIX: &str = "env_prefix"; - pub const SERDE_AS_STR: &str = "serde_as_str"; - pub const INNER: &str = "inner"; -} +pub(crate) mod config; +pub(crate) mod proxy; +pub(crate) mod utils; +pub(crate) mod view; -fn get_type_argument<'sl, 'tl>(s: &'sl str, ty: &'tl Type) -> Option<&'tl GenericArgument> { - let path = if let Type::Path(r#type) = ty { - r#type - } else { - return None; - }; - let segments = &path.path.segments; - if segments.len() != 1 || segments[0].ident != s { - return None; - } - - if let PathArguments::AngleBracketed(bracketed_arguments) = &segments[0].arguments { - if bracketed_arguments.args.len() == 1 { - return Some(&bracketed_arguments.args[0]); +#[proc_macro_derive(Proxy)] +pub fn proxy_derive(input: TokenStream) -> TokenStream { + let ast = match syn::parse(input) { + Ok(ast) => ast, + Err(err) => { + abort_call_site!("Failed to parse input Token Stream: {}", err) } - } - None -} - -fn is_arc_rwlock(ty: &Type) -> bool { - #[allow(clippy::shadow_unrelated)] - let dearced_ty = get_type_argument("Arc", ty) - .and_then(|ty| { - if let GenericArgument::Type(r#type) = ty { - Some(r#type) - } else { - None - } - }) - .unwrap_or(ty); - get_type_argument("RwLock", dearced_ty).is_some() -} - -// TODO: make it const generic type once it will be stabilized -fn parse_const_ident(input: ParseStream, ident: &'static str) -> syn::Result { - let parse_ident: Ident = input.parse()?; - if parse_ident == ident { - Ok(parse_ident) - } else { - Err(syn::Error::new_spanned(parse_ident, "Unknown ident")) - } -} - -impl Parse for EnvPrefix { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - _ident: parse_const_ident(input, attrs::ENV_PREFIX)?, - _eq: input.parse()?, - prefix: input.parse()?, - }) - } -} - -struct Inner { - _ident: Ident, -} - -impl Parse for Inner { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - _ident: parse_const_ident(input, attrs::INNER)?, - }) - } -} - -struct SerdeAsStr { - _ident: Ident, -} - -impl Parse for SerdeAsStr { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - _ident: parse_const_ident(input, attrs::SERDE_AS_STR)?, - }) - } + }; + proxy::impl_proxy(&ast) } /// Derive for config. More details in `iroha_config_base` reexport @@ -108,728 +29,27 @@ pub fn configurable_derive(input: TokenStream) -> TokenStream { abort_call_site!("Failed to parse input Token Stream: {}", err) } }; - impl_configurable(&ast) -} - -fn impl_load_env( - field_idents: &[&Ident], - inner: &[bool], - lvalue: &[proc_macro2::TokenStream], - as_str: &[bool], - field_ty: &[Type], - field_environment: &[String], -) -> proc_macro2::TokenStream { - let set_field = field_ty - .iter() - .zip(field_idents.iter()) - .zip(as_str.iter()) - .zip(lvalue.iter()) - .map(|(((ty, ident), &as_str_attr), l_value)| { - let is_string = if let Type::Path(TypePath { path, .. }) = ty { - path.is_ident("String") - } else { - false - }; - let set_field = if is_string { - quote! { #l_value = var } - } else if as_str_attr { - quote! { - #l_value = serde_json::from_value(var.into()) - .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))? - } - } else { - quote! { - #l_value = serde_json::from_str(&var) - .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))? - } - }; - (set_field, l_value) - }) - .zip(field_environment.iter()) - .zip(inner.iter()) - .map(|(((set_field, l_value), field_env), &inner_thing)| { - let inner_thing2 = if inner_thing { - quote! { - #l_value.load_environment()?; - } - } else { - quote! {} - }; - quote! { - if let Ok(var) = std::env::var(#field_env) { - #set_field; - } - #inner_thing2 - } - }); - - quote! { - fn load_environment( - &'_ mut self - ) -> core::result::Result<(), iroha_config_base::derive::Error> { - #(#set_field)* - Ok(()) - } - } -} - -fn impl_get_doc_recursive( - field_ty: &[Type], - field_idents: &[&Ident], - inner: Vec, - docs: Vec, -) -> proc_macro2::TokenStream { - if field_idents.is_empty() { - return quote! { - fn get_doc_recursive<'a>( - inner_field: impl AsRef<[&'a str]>, - ) -> core::result::Result, iroha_config_base::derive::Error> - { - Err(iroha_config_base::derive::Error::UnknownField( - inner_field.as_ref().iter().map(ToString::to_string).collect() - )) - } - }; - } - let variants = field_idents - .iter() - .zip(inner) - .zip(docs) - .zip(field_ty) - .map(|(((ident, inner_thing), documentation), ty)| { - if inner_thing { - quote! { - [stringify!(#ident)] => { - let curr_doc = #documentation; - let inner_docs = <#ty as iroha_config_base::Configurable>::get_inner_docs(); - let total_docs = format!("{}\n\nHas following fields:\n\n{}\n", curr_doc, inner_docs); - Some(total_docs) - }, - [stringify!(#ident), rest @ ..] => <#ty as iroha_config_base::Configurable>::get_doc_recursive(rest)?, - } - } else { - quote! { [stringify!(#ident)] => Some(#documentation.to_owned()), } - } - }) - // XXX: Workaround - //Decription of issue is here https://stackoverflow.com/a/65353489 - .fold(quote! {}, |acc, new| quote! { #acc #new }); - - quote! { - fn get_doc_recursive<'a>( - inner_field: impl AsRef<[&'a str]>, - ) -> core::result::Result, iroha_config_base::derive::Error> - { - let inner_field = inner_field.as_ref(); - let doc = match inner_field { - #variants - field => return Err(iroha_config_base::derive::Error::UnknownField( - field.iter().map(ToString::to_string).collect() - )), - }; - Ok(doc) - } - } -} - -fn impl_get_inner_docs( - field_ty: &[Type], - field_idents: &[&Ident], - inner: Vec, - docs: Vec, -) -> proc_macro2::TokenStream { - let inserts = field_idents - .iter() - .zip(inner) - .zip(docs) - .zip(field_ty) - .map(|(((ident, inner_thing), documentation), ty)| { - let doc = if inner_thing { - quote!{ <#ty as iroha_config_base::Configurable>::get_inner_docs().as_str() } - } else { - quote!{ #documentation.into() } - }; - - quote! { - inner_docs.push_str(stringify!(#ident)); - inner_docs.push_str(": "); - inner_docs.push_str(#doc); - inner_docs.push_str("\n\n"); - } - }) - // XXX: Workaround - //Description of issue is here https://stackoverflow.com/a/65353489 - .fold(quote! {}, |acc, new| quote! { #acc #new }); - - quote! { - fn get_inner_docs() -> String { - let mut inner_docs = String::new(); - #inserts - inner_docs - } - } -} - -fn impl_get_docs( - field_ty: &[Type], - field_idents: &[&Ident], - inner: Vec, - docs: Vec, -) -> proc_macro2::TokenStream { - let inserts = field_idents - .iter() - .zip(inner) - .zip(docs) - .zip(field_ty) - .map(|(((ident, inner_thing), documentation), ty)| { - let doc = if inner_thing { - quote!{ <#ty as iroha_config_base::Configurable>::get_docs().into() } - } else { - quote!{ #documentation.into() } - }; - - quote! { map.insert(stringify!(#ident).to_owned(), #doc); } - }) - // XXX: Workaround - //Decription of issue is here https://stackoverflow.com/a/65353489 - .fold(quote! {}, |acc, new| quote! { #acc #new }); - - quote! { - fn get_docs() -> serde_json::Value { - let mut map = serde_json::Map::new(); - #inserts - map.into() - } - } -} - -fn impl_get_recursive( - field_idents: &[&Ident], - inner: Vec, - lvalue: &[proc_macro2::TokenStream], -) -> proc_macro2::TokenStream { - if field_idents.is_empty() { - return quote! { - fn get_recursive<'a, T>( - &self, - inner_field: T, - ) -> iroha_config_base::BoxedFuture<'a, core::result::Result> - where - T: AsRef<[&'a str]> + Send + 'a, - { - Err(iroha_config_base::derive::Error::UnknownField( - inner_field.as_ref().iter().map(ToString::to_string).collect() - )) - } - }; - } - let variants = field_idents - .iter() - .zip(inner) - .zip(lvalue.iter()) - .map(|((ident, inner_thing), l_value)| { - let inner_thing2 = if inner_thing { - quote! { - [stringify!(#ident), rest @ ..] => { - #l_value.get_recursive(rest)? - }, - } - } else { - quote! {} - }; - quote! { - [stringify!(#ident)] => { - serde_json::to_value(&#l_value) - .map_err(|e| iroha_config_base::derive::Error::field_error(stringify!(#ident), e))? - } - #inner_thing2 - } - }) - // XXX: Workaround - //Decription of issue is here https://stackoverflow.com/a/65353489 - .fold(quote! {}, |acc, new| quote! { #acc #new }); - - quote! { - fn get_recursive<'a, T>( - &self, - inner_field: T, - ) -> core::result::Result - where - T: AsRef<[&'a str]> + Send + 'a, - { - let inner_field = inner_field.as_ref(); - let value = match inner_field { - #variants - field => return Err(iroha_config_base::derive::Error::UnknownField( - field.iter().map(ToString::to_string).collect() - )), - }; - Ok(value) - } - } -} - -#[allow(clippy::too_many_lines, clippy::str_to_string)] -fn impl_configurable(ast: &DeriveInput) -> TokenStream { - let name = &ast.ident; - let prefix = ast - .attrs - .iter() - .find_map(|attr| attr.parse_args::().ok()) - .map(|pref| pref.prefix.value()) - .unwrap_or_default(); - - let fields = if let Data::Struct(DataStruct { - fields: Fields::Named(fields), - .. - }) = &ast.data - { - &fields.named - } else { - abort!(ast, "Only structs are supported") - }; - let field_idents = fields - .iter() - .map(|field| { - #[allow(clippy::expect_used)] - field - .ident - .as_ref() - .expect("Should always be set for named structures") - }) - .collect::>(); - let field_attrs = fields.iter().map(|field| &field.attrs).collect::>(); - let field_ty = fields - .iter() - .map(|field| field.ty.clone()) - .collect::>(); - - let inner = field_attrs - .iter() - .map(|attrs| attrs.iter().any(|attr| attr.parse_args::().is_ok())) - .collect::>(); - - let as_str = field_attrs - .iter() - .map(|attrs| { - attrs - .iter() - .any(|attr| attr.parse_args::().is_ok()) - }) - .collect::>(); - - let field_environment = field_idents - .iter() - .into_iter() - .map(|ident| prefix.clone() + &ident.to_string().to_uppercase()) - .collect::>(); - let docs = field_attrs - .iter() - .zip(field_environment.iter()) - .zip(field_ty.iter()) - .map(|((attrs, env), field_type)| { - let real_doc = attrs - .iter() - .filter_map(|attr| attr.parse_meta().ok()) - .find_map(|metadata| { - if let Meta::NameValue(meta) = metadata { - if meta.path.is_ident("doc") { - if let Lit::Str(s) = meta.lit { - return Some(s); - } - } - } - None - }); - let real_doc = real_doc.map(|doc| doc.value() + "\n\n").unwrap_or_default(); - let docs = format!( - "{}Has type `{}`. Can be configured via environment variable `{}`", - real_doc, - quote! { #field_type }.to_string().replace(' ', ""), - env - ); - LitStr::new(&docs, Span::mixed_site()) - }) - .collect::>(); - let lvalue = field_ty.iter().map(is_arc_rwlock).zip(field_idents.iter()); - let lvalue_read = lvalue - .clone() - .map(|(is_arc_rwlock, ident)| { - if is_arc_rwlock { - quote! { self.#ident.read().await } - } else { - quote! { self.#ident } - } - }) - .collect::>(); - let lvalue_write = lvalue - .clone() - .map(|(is_arc_rwlock, ident)| { - if is_arc_rwlock { - quote! { self.#ident.write().await } - } else { - quote! { self.#ident } - } - }) - .collect::>(); - - let load_environment = impl_load_env( - &field_idents, - &inner, - &lvalue_write, - &as_str, - &field_ty, - &field_environment, - ); - let get_recursive = impl_get_recursive(&field_idents, inner.clone(), &lvalue_read); - let get_doc_recursive = - impl_get_doc_recursive(&field_ty, &field_idents, inner.clone(), docs.clone()); - let get_inner_docs = impl_get_inner_docs(&field_ty, &field_idents, inner.clone(), docs.clone()); - let get_docs = impl_get_docs(&field_ty, &field_idents, inner, docs); - - let out = quote! { - impl iroha_config_base::Configurable for #name { - type Error = iroha_config_base::derive::Error; - - #get_recursive - #get_doc_recursive - #get_docs - #get_inner_docs - #load_environment - } - }; - out.into() -} - -// Take struct with named fields as input -#[derive(Debug, Clone)] -struct ViewInput { - attrs: Vec, - vis: syn::Visibility, - _struct_token: Token![struct], - ident: Ident, - generics: syn::Generics, - fields: Vec, - _semi_token: Option, -} - -impl Parse for ViewInput { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - attrs: input.call(Attribute::parse_outer)?, - vis: input.parse()?, - _struct_token: input.parse()?, - ident: input.parse()?, - generics: input.parse()?, - fields: input - .parse::()? - .named - .into_iter() - .collect(), - _semi_token: input.parse()?, - }) - } -} - -// Recreate struct -impl ToTokens for ViewInput { - fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - let ViewInput { - attrs, - vis, - ident, - generics, - fields, - .. - } = self; - let stream = quote! { - #(#attrs)* - #vis struct #ident #generics { - #(#fields),* - } - }; - tokens.extend(stream); - } -} - -/// Keywords used inside `#[view(...)]` -mod kw { - syn::custom_keyword!(ignore); - syn::custom_keyword!(into); -} - -/// Structure to parse `#[view(...)]` attributes -/// [`Inner`] is responsible for parsing attribute arguments -struct View(std::marker::PhantomData); - -impl View { - fn parse(attr: &Attribute) -> syn::Result { - attr.path - .is_ident("view") - .then(|| attr.parse_args::()) - .map_or_else( - || { - Err(syn::Error::new_spanned( - attr, - "Attribute must be in form #[view...]", - )) - }, - |inner| inner, - ) - } -} - -struct ViewIgnore { - _kw: kw::ignore, -} - -impl Parse for ViewIgnore { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - _kw: input.parse()?, - }) - } -} - -struct ViewFieldType { - _kw: kw::into, - _eq: Token![=], - ty: Type, -} - -impl Parse for ViewFieldType { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - _kw: input.parse()?, - _eq: input.parse()?, - ty: input.parse()?, - }) - } -} - -impl From for Type { - fn from(value: ViewFieldType) -> Self { - value.ty - } + config::impl_configurable(&ast) } /// Generate view for given struct and convert from type to its view. /// More details in `iroha_config_base` reexport. #[proc_macro] pub fn view(input: TokenStream) -> TokenStream { - let ast = syn::parse_macro_input!(input as ViewInput); - let original = gen_original_struct(ast.clone()); - let view = gen_view_struct(ast); - let impl_from = gen_impl_from(&original, &view); - let impl_default = gen_impl_default(&original, &view); - let impl_has_view = gen_impl_has_view(&original); - let assertions = gen_assertions(&view); + let ast = syn::parse_macro_input!(input as view::ViewInput); + let original = view::gen_original_struct(ast.clone()); + let view = view::gen_view_struct(ast); + let impl_from = view::gen_impl_from(&original, &view); + // let impl_default = gen_impl_default(&original, &view); + let impl_has_view = view::gen_impl_has_view(&original); + let assertions = view::gen_assertions(&view); let out = quote! { #original #impl_has_view #view #impl_from - #impl_default + // #impl_default #assertions }; out.into() } - -fn gen_original_struct(mut ast: ViewInput) -> ViewInput { - remove_attr_struct(&mut ast, "view"); - ast -} - -#[allow(clippy::str_to_string, clippy::expect_used)] -fn gen_view_struct(mut ast: ViewInput) -> ViewInput { - // Remove fields with #[view(ignore)] - ast.fields.retain(is_view_field_ignored); - // Change field type to `Type` if it has attribute #[view(into = Type)] - ast.fields.iter_mut().for_each(view_field_change_type); - // Replace doc-string for view - remove_attr(&mut ast.attrs, "doc"); - let view_doc = format!("View for {}", ast.ident); - ast.attrs.push(syn::parse_quote!( - #[doc = #view_doc] - )); - // Remove `Default` from #[derive(..., Default, ...)] or #[derive(Default)] because we implement `Default` inside macro - ast.attrs - .iter_mut() - .filter(|attr| attr.path.is_ident("derive")) - .for_each(|attr| { - let meta = attr - .parse_meta() - .expect("derive macro must be in one of the meta forms"); - match meta { - Meta::List(list) => { - let items: Vec = list - .nested - .into_iter() - .filter(|nested| { - if let NestedMeta::Meta(Meta::Path(path)) = nested { - if path.is_ident("Default") { - return false; - } - } - true - }) - .collect(); - *attr = syn::parse_quote!( - #[derive(#(#items),*)] - ) - } - Meta::Path(path) if path.is_ident("Default") => { - *attr = syn::parse_quote!( - #[derive()] - ) - } - _ => {} - } - }); - remove_attr_struct(&mut ast, "view"); - ast.ident = format_ident!("{}View", ast.ident); - ast -} - -fn gen_impl_from(original: &ViewInput, view: &ViewInput) -> proc_macro2::TokenStream { - let ViewInput { - ident: original_ident, - .. - } = original; - let ViewInput { - generics, - ident: view_ident, - fields, - .. - } = view; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - let field_idents = extract_field_idents(fields); - - quote! { - impl #impl_generics core::convert::From<#original_ident> for #view_ident #ty_generics #where_clause { - fn from(config: #original_ident) -> Self { - let #original_ident { - #( - #field_idents, - )* - .. - } = config; - Self { - #( - #field_idents: core::convert::From::<_>::from(#field_idents), - )* - } - } - } - } -} - -fn gen_impl_default(original: &ViewInput, view: &ViewInput) -> proc_macro2::TokenStream { - let ViewInput { - ident: original_ident, - .. - } = original; - let ViewInput { - generics, - ident: view_ident, - .. - } = view; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - - quote! { - impl #impl_generics core::default::Default for #view_ident #ty_generics #where_clause { - fn default() -> Self { - core::convert::From::<_>::from(<#original_ident as core::default::Default>::default()) - } - } - } -} - -fn gen_impl_has_view(original: &ViewInput) -> proc_macro2::TokenStream { - let ViewInput { - generics, - ident: view_ident, - .. - } = original; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - - quote! { - impl #impl_generics iroha_config_base::view::HasView for #view_ident #ty_generics #where_clause {} - } -} - -fn gen_assertions(view: &ViewInput) -> proc_macro2::TokenStream { - let ViewInput { fields, .. } = view; - let field_types = extract_field_types(fields); - let messages: Vec = extract_field_idents(fields) - .iter() - .map(|ident| { - format!("Field `{ident}` has it's own view, consider adding attribute #[view(into = ViewType)]") - }) - .collect(); - quote! { - /// Assert that every field of 'View' doesn't implement `HasView` trait - const _: () = { - use iroha_config_base::view::NoView; - #( - const _: () = assert!(!iroha_config_base::view::IsHasView::<#field_types>::IS_HAS_VIEW, #messages); - )* - }; - } -} - -/// Change [`Field`] type to `Type` if `#[view(type = Type)]` is present -fn view_field_change_type(field: &mut Field) { - if let Some(ty) = field - .attrs - .iter() - .map(View::::parse) - .find_map(Result::ok) - .map(ViewFieldType::into) - { - field.ty = ty; - } -} - -/// Check if [`Field`] has `#[view(ignore)]` -fn is_view_field_ignored(field: &Field) -> bool { - field - .attrs - .iter() - .map(View::::parse) - .find_map(Result::ok) - .is_none() -} - -/// Remove attributes with ident [`attr_ident`] from struct attributes and field attributes -fn remove_attr_struct(ast: &mut ViewInput, attr_ident: &str) { - let ViewInput { attrs, fields, .. } = ast; - for field in fields { - remove_attr(&mut field.attrs, attr_ident) - } - remove_attr(attrs, attr_ident); -} - -/// Remove attributes with ident [`attr_ident`] from attributes -fn remove_attr(attrs: &mut Vec, attr_ident: &str) { - attrs.retain(|attr| !attr.path.is_ident(attr_ident)); -} - -/// Return [`Vec`] of fields idents -fn extract_field_idents(fields: &[Field]) -> Vec<&Ident> { - fields - .iter() - .map(|field| { - #[allow(clippy::expect_used)] - field - .ident - .as_ref() - .expect("Should always be set for named structures") - }) - .collect::>() -} - -/// Return [`Vec`] of fields types -fn extract_field_types(fields: &[Field]) -> Vec<&Type> { - fields.iter().map(|field| &field.ty).collect::>() -} diff --git a/config/base/derive/src/proxy.rs b/config/base/derive/src/proxy.rs new file mode 100644 index 00000000000..b16af007f87 --- /dev/null +++ b/config/base/derive/src/proxy.rs @@ -0,0 +1,217 @@ +use proc_macro::TokenStream; +use proc_macro_error::abort; +use quote::{format_ident, quote}; +use syn::{ + parse::{Parse, ParseStream}, + parse_quote, Attribute, Data, DataStruct, DeriveInput, Field, Fields, Ident, Token, Type, +}; + +use super::{config, utils}; + +// Take struct with named fields as input +#[derive(Debug, Clone)] +struct ProxyInput { + attrs: Vec, + vis: syn::Visibility, + _struct_token: Token![struct], + ident: Ident, + generics: syn::Generics, + fields: Vec, + _semi_token: Option, +} + +impl Parse for ProxyInput { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + _struct_token: input.parse()?, + ident: input.parse()?, + generics: input.parse()?, + fields: input + .parse::()? + .named + .into_iter() + .collect(), + _semi_token: input.parse()?, + }) + } +} +impl ProxyInput {} + +pub(super) fn impl_proxy(ast: &DeriveInput) -> TokenStream { + let DeriveInput { + attrs, + vis, + ident: parent_name, + generics, + data, + } = ast; + + let prefix = ast + .attrs + .iter() + .find_map(|attr| attr.parse_args::().ok()) + .map(|pref| pref.prefix.value()) + .unwrap_or_default(); + + let proxy_name = format_ident!("{}Proxy", parent_name); + + let fields = if let Data::Struct(DataStruct { + fields: Fields::Named(fields), + .. + }) = &data + { + &fields.named + } else { + abort!(ast, "Only structs are supported") + }; + + let field_idents = fields + .iter() + .map(|field| { + #[allow(clippy::expect_used)] + field + .ident + .as_ref() + .expect("Should always be set for named structures") + }) + .collect::>(); + // dbg!(&field_idents); + let field_attrs = fields + .iter() + .map(|field| field.attrs.as_slice()) + .collect::>(); + let field_ty = fields + .iter() + .map(|field| field.ty.clone()) + .collect::>(); + + let inner = utils::field_has_inner_attr(&field_attrs); + + let field_environment = field_idents + .iter() + .map(|ident| prefix.clone() + &ident.to_string().to_uppercase()) + .collect::>(); + + let lvalue = field_ty + .iter() + .map(utils::is_arc_rwlock) + .zip(field_idents.iter()); + + let lvalue_write = lvalue + .clone() + .map(|(is_arc_rwlock, ident)| { + if is_arc_rwlock { + quote! { self.#ident.write().await } + } else { + quote! { self.#ident } + } + }) + .collect::>(); + + let as_str = field_attrs + .iter() + .map(|attrs| { + attrs + .iter() + .any(|attr| attr.parse_args::().is_ok()) + }) + .collect::>(); + + println!("GETTING PROXY LOADENV"); + // dbg!(&field_ty); + // dbg!(&field_idents); + let proxy_ty = field_ty + .iter() + .map(|ty| { + let new_ty: Type = parse_quote! { + // #(#attrs)* + Option<#ty> + }; + new_ty + }) + .collect::>(); + + // let load_env_fn = config::impl_load_env( + // &field_idents, + // &inner, + // &lvalue_write, + // &as_str, + // &proxy_ty, + // &field_environment, + // ); + + let proxy_fields = gen_proxy_struct_fields(&field_idents, &field_attrs, &field_ty); + let build_fn = impl_build(&field_idents, &field_attrs); + + quote! { + // #[derive(Debug, Clone, Serialize, Deserialize)] + // // #(#attrs)* + // #vis struct #proxy_name #generics { + // #proxy_fields + // } + // impl iroha_config_base::proxy::Combine for #proxy_name { + // type Target = #parent_name; + // #load_env_fn + // #build_fn + + // } + } + .into() +} + +fn impl_build(field_idents: &[&Ident], field_attrs: &[&[Attribute]]) -> proc_macro2::TokenStream { + let checked_fields = gen_none_fields_check(field_idents, field_attrs); + + quote! { + fn build(self) -> Result { + Ok(Self::Target { + #checked_fields + }) + } + } +} + +fn gen_proxy_struct_fields( + field_idents: &[&Ident], + field_attrs: &[&[Attribute]], + field_ty: &[Type], +) -> proc_macro2::TokenStream { + let combined_fields = field_idents + .iter() + .zip(field_attrs.iter()) + .zip(field_ty.iter()) + .map(|((ident, attrs), ty)| { + if utils::is_option_type(ty) { + quote! { #ident: #ty } + } else { + quote! { + // #(#attrs)* + #ident: Option<#ty> + } + } + }); + quote! { + #(#combined_fields),* + } +} + +/// Helper function for checking inner +fn gen_none_fields_check( + field_idents: &[&Ident], + field_attrs: &[&[Attribute]], +) -> proc_macro2::TokenStream { + let checked_fields = field_idents + .iter() + .zip(field_attrs.iter()) + .map(|(ident, attrs)| { + quote! { + // #(#attrs)* + #ident: self.#ident.ok_or(iroha_config_base::derive::Error::ProxyError(ident.to_string()))? + } + }); + quote! { + #(#checked_fields),* + } +} diff --git a/config/base/derive/src/utils.rs b/config/base/derive/src/utils.rs new file mode 100644 index 00000000000..81ce6c82414 --- /dev/null +++ b/config/base/derive/src/utils.rs @@ -0,0 +1,98 @@ +use syn::{ + parse::{Parse, ParseStream}, + Attribute, Field, GenericArgument, Ident, PathArguments, Type, +}; + +use super::config; + +/// Remove attributes with ident [`attr_ident`] from attributes +pub(crate) fn remove_attr(attrs: &mut Vec, attr_ident: &str) { + attrs.retain(|attr| !attr.path.is_ident(attr_ident)); +} + +/// Return [`Vec`] of fields idents +pub(crate) fn extract_field_idents(fields: &[Field]) -> Vec<&Ident> { + fields + .iter() + .map(|field| { + #[allow(clippy::expect_used)] + field + .ident + .as_ref() + .expect("Should always be set for named structures") + }) + .collect::>() +} + +/// Return [`Vec`] of fields types +pub(crate) fn extract_field_types(fields: &[Field]) -> Vec<&Type> { + fields.iter().map(|field| &field.ty).collect::>() +} + +pub(crate) fn get_type_argument<'sl, 'tl>( + s: &'sl str, + ty: &'tl Type, +) -> Option<&'tl GenericArgument> { + let path = if let Type::Path(r#type) = ty { + r#type + } else { + return None; + }; + let segments = &path.path.segments; + if segments.len() != 1 || segments[0].ident != s { + return None; + } + + if let PathArguments::AngleBracketed(bracketed_arguments) = &segments[0].arguments { + if bracketed_arguments.args.len() == 1 { + return Some(&bracketed_arguments.args[0]); + } + } + None +} + +pub(crate) fn get_inner_type<'tl, 'sl>(outer_ty_ident: &'sl str, ty: &'tl Type) -> &'tl Type { + #[allow(clippy::shadow_unrelated)] + get_type_argument(outer_ty_ident, ty) + .and_then(|ty| { + if let GenericArgument::Type(r#type) = ty { + Some(r#type) + } else { + None + } + }) + .unwrap_or(ty) +} + +pub(crate) fn is_arc_rwlock(ty: &Type) -> bool { + let dearced_ty = get_inner_type("Arc", ty); + get_type_argument("RwLock", dearced_ty).is_some() +} + +// TODO: make it const generic type once it will be stabilized +pub(crate) fn parse_const_ident(input: ParseStream, ident: &'static str) -> syn::Result { + let parse_ident: Ident = input.parse()?; + if parse_ident == ident { + Ok(parse_ident) + } else { + Err(syn::Error::new_spanned(parse_ident, "Unknown ident")) + } +} + +// TODO: complete doc +/// Receives all the attrs on fields, returns a vec ... +pub(crate) fn field_has_inner_attr(field_attrs: &[&[Attribute]]) -> Vec { + field_attrs + .iter() + .map(|attrs| { + attrs + .iter() + .any(|attr| attr.parse_args::().is_ok()) + }) + .collect::>() +} + +/// Check if the provided type is of the form [`Option<..>`] +pub(crate) fn is_option_type(ty: &Type) -> bool { + get_type_argument("Option", ty).is_some() +} diff --git a/config/base/derive/src/view.rs b/config/base/derive/src/view.rs new file mode 100644 index 00000000000..af583b12d30 --- /dev/null +++ b/config/base/derive/src/view.rs @@ -0,0 +1,295 @@ +use quote::{format_ident, quote, ToTokens}; +use syn::{ + parse::{Parse, ParseStream}, + Attribute, Field, Ident, Meta, NestedMeta, Token, Type, +}; + +use super::utils; + +// Take struct with named fields as input +#[derive(Debug, Clone)] +pub(crate) struct ViewInput { + attrs: Vec, + vis: syn::Visibility, + _struct_token: Token![struct], + ident: Ident, + generics: syn::Generics, + fields: Vec, + _semi_token: Option, +} + +impl Parse for ViewInput { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + _struct_token: input.parse()?, + ident: input.parse()?, + generics: input.parse()?, + fields: input + .parse::()? + .named + .into_iter() + .collect(), + _semi_token: input.parse()?, + }) + } +} + +// Recreate struct +impl ToTokens for ViewInput { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + let ViewInput { + attrs, + vis, + ident, + generics, + fields, + .. + } = self; + let stream = quote! { + #(#attrs)* + #vis struct #ident #generics { + #(#fields),* + } + }; + tokens.extend(stream); + } +} + +/// Keywords used inside `#[view(...)]` +mod kw { + syn::custom_keyword!(ignore); + syn::custom_keyword!(into); +} + +/// Structure to parse `#[view(...)]` attributes +/// [`Inner`] is responsible for parsing attribute arguments +pub(crate) struct View(std::marker::PhantomData); + +impl View { + fn parse(attr: &Attribute) -> syn::Result { + attr.path + .is_ident("view") + .then(|| attr.parse_args::()) + .map_or_else( + || { + Err(syn::Error::new_spanned( + attr, + "Attribute must be in form #[view...]", + )) + }, + |inner| inner, + ) + } +} + +pub(crate) struct ViewIgnore { + _kw: kw::ignore, +} + +impl Parse for ViewIgnore { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + _kw: input.parse()?, + }) + } +} + +pub(crate) struct ViewFieldType { + _kw: kw::into, + _eq: Token![=], + ty: Type, +} + +impl Parse for ViewFieldType { + fn parse(input: ParseStream) -> syn::Result { + Ok(Self { + _kw: input.parse()?, + _eq: input.parse()?, + ty: input.parse()?, + }) + } +} + +impl From for Type { + fn from(value: ViewFieldType) -> Self { + value.ty + } +} + +pub(crate) fn gen_original_struct(mut ast: ViewInput) -> ViewInput { + remove_attr_struct(&mut ast, "view"); + ast +} + +#[allow(clippy::str_to_string, clippy::expect_used)] +pub(crate) fn gen_view_struct(mut ast: ViewInput) -> ViewInput { + // Remove fields with #[view(ignore)] + ast.fields.retain(is_view_field_ignored); + // Change field type to `Type` if it has attribute #[view(into = Type)] + ast.fields.iter_mut().for_each(view_field_change_type); + // Replace doc-string for view + utils::remove_attr(&mut ast.attrs, "doc"); + let view_doc = format!("View for {}", ast.ident); + ast.attrs.push(syn::parse_quote!( + #[doc = #view_doc] + )); + // Remove `Default` from #[derive(..., Default, ...)] or #[derive(Default)] because we implement `Default` inside macro + // TODO: also add info with remove proxy + ast.attrs + .iter_mut() + .filter(|attr| attr.path.is_ident("derive")) + .for_each(|attr| { + let meta = attr + .parse_meta() + .expect("derive macro must be in one of the meta forms"); + match meta { + Meta::List(list) => { + let items: Vec = list + .nested + .into_iter() + .filter(|nested| { + if let NestedMeta::Meta(Meta::Path(path)) = nested { + if path.is_ident("Default") || path.is_ident("Proxy") { + return false; + } + } + true + }) + .collect(); + *attr = syn::parse_quote!( + #[derive(#(#items),*)] + ) + } + Meta::Path(path) if path.is_ident("Default") => { + *attr = syn::parse_quote!( + #[derive()] + ) + } + _ => {} + } + }); + remove_attr_struct(&mut ast, "view"); + ast.ident = format_ident!("{}View", ast.ident); + ast +} + +pub(crate) fn gen_impl_from(original: &ViewInput, view: &ViewInput) -> proc_macro2::TokenStream { + let ViewInput { + ident: original_ident, + .. + } = original; + let ViewInput { + generics, + ident: view_ident, + fields, + .. + } = view; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + let field_idents = utils::extract_field_idents(fields); + + quote! { + impl #impl_generics core::convert::From<#original_ident> for #view_ident #ty_generics #where_clause { + fn from(config: #original_ident) -> Self { + let #original_ident { + #( + #field_idents, + )* + .. + } = config; + Self { + #( + #field_idents: core::convert::From::<_>::from(#field_idents), + )* + } + } + } + } +} + +pub(crate) fn gen_impl_default(original: &ViewInput, view: &ViewInput) -> proc_macro2::TokenStream { + let ViewInput { + ident: original_ident, + .. + } = original; + let ViewInput { + generics, + ident: view_ident, + .. + } = view; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + quote! { + impl #impl_generics core::default::Default for #view_ident #ty_generics #where_clause { + fn default() -> Self { + core::convert::From::<_>::from(<#original_ident as core::default::Default>::default()) + } + } + } +} + +pub(crate) fn gen_impl_has_view(original: &ViewInput) -> proc_macro2::TokenStream { + let ViewInput { + generics, + ident: view_ident, + .. + } = original; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + quote! { + impl #impl_generics iroha_config_base::view::HasView for #view_ident #ty_generics #where_clause {} + } +} + +pub(crate) fn gen_assertions(view: &ViewInput) -> proc_macro2::TokenStream { + let ViewInput { fields, .. } = view; + let field_types = utils::extract_field_types(fields); + let messages: Vec = utils::extract_field_idents(fields) + .iter() + .map(|ident| { + format!("Field `{ident}` has it's own view, consider adding attribute #[view(into = ViewType)]") + }) + .collect(); + quote! { + /// Assert that every field of 'View' doesn't implement `HasView` trait + const _: () = { + use iroha_config_base::view::NoView; + #( + const _: () = assert!(!iroha_config_base::view::IsHasView::<#field_types>::IS_HAS_VIEW, #messages); + )* + }; + } +} + +/// Check if [`Field`] has `#[view(ignore)]` +pub(crate) fn is_view_field_ignored(field: &Field) -> bool { + field + .attrs + .iter() + .map(View::::parse) + .find_map(Result::ok) + .is_none() +} + +/// Remove attributes with ident [`attr_ident`] from struct attributes and field attributes +pub(crate) fn remove_attr_struct(ast: &mut ViewInput, attr_ident: &str) { + let ViewInput { attrs, fields, .. } = ast; + for field in fields { + utils::remove_attr(&mut field.attrs, attr_ident) + } + utils::remove_attr(attrs, attr_ident); +} + +/// Change [`Field`] type to `Type` if `#[view(type = Type)]` is present +pub(crate) fn view_field_change_type(field: &mut Field) { + if let Some(ty) = field + .attrs + .iter() + .map(View::::parse) + .find_map(Result::ok) + .map(ViewFieldType::into) + { + field.ty = ty; + } +} diff --git a/config/base/src/lib.rs b/config/base/src/lib.rs index 3336a2c400d..e6c9be375fa 100644 --- a/config/base/src/lib.rs +++ b/config/base/src/lib.rs @@ -1,7 +1,9 @@ //! Package for managing iroha configuration +use eyre::WrapErr; use serde::{de::DeserializeOwned, Serialize}; use serde_json::Value; +use std::{fmt::Debug, fs::File, io::BufReader, path::Path}; pub mod derive { //! Modules for `Configurable` entities @@ -13,6 +15,7 @@ pub mod derive { /// View contains a subset of the fields that the type has. /// /// Works only with structs. + // TODO: alter as won't be true after yeeting [`Default`] /// Type must implement [`Default`]. /// /// ## Container attributes @@ -122,6 +125,8 @@ pub mod derive { /// assert_eq!(ip.ip, Ipv4Addr::new(127, 0, 0, 1)); /// ``` pub use iroha_config_derive::Configurable; + // TODO: write doc for new macro + pub use iroha_config_derive::Proxy; /// Error related to deserializing specific field #[derive(Debug, Display)] @@ -139,13 +144,16 @@ pub mod derive { } } - /// Derive `Configurable` error + /// Derive `Configurable` and `Proxy` error #[derive(Debug)] + #[allow(clippy::enum_variant_names)] pub enum Error { /// Got unknown field UnknownField(Vec), /// Failed to deserialize or serialize a field FieldError(FieldError), + /// Some of the proxy fields were [`None`] at build stage + ProxyError(String), } impl fmt::Display for Error { @@ -163,6 +171,13 @@ pub mod derive { write!(f, "Failed to deserialize: Unknown field {}", field) } Self::FieldError(_) => write!(f, "Failed to deserialize"), + Self::ProxyError(field) => { + write!( + f, + "Proxy structure had at least one uninitialized field: {}", + field + ) + } } } } @@ -256,3 +271,59 @@ pub mod view { pub const IS_HAS_VIEW: bool = true; } } + +pub mod proxy { + //! Module for configuration proxies' traits + + use super::*; + + /// Pseudo-default trait only used for doc generation + pub trait DocsDefault { + fn default() -> Self; + } + + /// Trait used to convert configs from file and env + pub trait Combine: Sized + Serialize + DeserializeOwned { + /// Which type of [`Configuration`] it builds into + type Target; + + /// Build the config, do the necessary checks + fn build(self) -> Result; + + /// If any of the fields in [`other`] are filled, they + /// override the values of the fields in [`self`]. + fn combine(self, other: Self) -> eyre::Result; + + /// Construct [`Self`] from a path-like object. + /// + /// # Errors + /// - File not found. + /// - File found, but peer configuration parsing failed. + /// - The length of the array in raw JSON representation is different + /// from the length of the array in + /// [`self.sumeragi.trusted_peers.peers`], most likely due to two + /// (or more) peers having the same public key. + fn from_path + Debug + Clone>(path: P) -> eyre::Result { + let file = + File::open(&path).wrap_err(format!("Failed to open the config file {:?}", path))?; + let reader = BufReader::new(file); + serde_json::from_reader(reader) + .wrap_err(format!("Failed to deserialize json {:?} from reader", path)) + } + + // fn finalize(&mut self) -> Result<()> { + // self.sumeragi.key_pair = + // KeyPair::new(self.public_key.clone(), self.private_key.clone())?; + // self.sumeragi.peer_id = + // iroha_data_model::peer::Id::new(&self.torii.p2p_addr, &self.public_key.clone()); + + // Ok(()) + // } + + /// Load configuration from the environment + /// + /// # Errors + /// Fails if Configuration deserialization fails (e.g. if `TrustedPeers` contains entries with duplicate public keys) + fn load_environment(&mut self) -> core::result::Result<(), derive::Error>; + } +} diff --git a/config/src/client.rs b/config/src/client.rs index a26e3a96be5..37ce9facaaf 100644 --- a/config/src/client.rs +++ b/config/src/client.rs @@ -3,7 +3,7 @@ use std::{fmt, fs::File, io::BufReader, path::Path, str::FromStr}; use derive_more::Display; use eyre::{eyre, Result, WrapErr}; -use iroha_config_base::derive::Configurable; +use iroha_config_base::derive::{Configurable, Proxy}; use iroha_crypto::prelude::*; use iroha_data_model::{prelude::*, transaction}; use iroha_primitives::small::SmallStr; @@ -137,6 +137,7 @@ impl Configuration { AccountId::from_str("alice@wonderland").expect("Account ID not valid") } + // TODO: Delete this? /// This method will build `Configuration` from a json *pretty* formatted file (without `:` in /// key names). /// diff --git a/config/src/genesis.rs b/config/src/genesis.rs index 535acdbd0fb..3ee203a06c5 100644 --- a/config/src/genesis.rs +++ b/config/src/genesis.rs @@ -1,5 +1,8 @@ //! Module with genesis configuration logic. -use iroha_config_base::derive::{view, Configurable}; +use iroha_config_base::{ + derive::{view, Configurable, Proxy}, + proxy::DocsDefault, +}; use iroha_crypto::{KeyPair, PrivateKey, PublicKey}; use serde::{Deserialize, Serialize}; @@ -11,7 +14,6 @@ const DEFAULT_GENESIS_SUBMISSION_DELAY_MS: u64 = 1000; view! { /// Configuration of the genesis block and the process of its submission. #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Configurable)] - #[serde(default)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "IROHA_GENESIS_")] pub struct Configuration { @@ -34,7 +36,7 @@ view! { } } -impl Default for Configuration { +impl DocsDefault for Configuration { fn default() -> Self { let (public_key, private_key) = Self::placeholder_keypair().into(); diff --git a/config/src/iroha.rs b/config/src/iroha.rs index 0c8aedaa9b8..c65a45634fc 100644 --- a/config/src/iroha.rs +++ b/config/src/iroha.rs @@ -2,7 +2,8 @@ use std::{fmt::Debug, fs::File, io::BufReader, path::Path}; use eyre::{Result, WrapErr}; -use iroha_config_base::derive::{view, Configurable}; +use iroha_config_base::derive::{view, Configurable, Proxy}; +use iroha_config_base::proxy::DocsDefault; use iroha_crypto::prelude::*; use serde::{Deserialize, Serialize}; @@ -11,8 +12,7 @@ use super::*; // Generate `ConfigurationView` without the private key view! { /// Configuration parameters for a peer - #[derive(Debug, Clone, Deserialize, Serialize, Configurable)] - #[serde(default)] + #[derive(Debug, Clone, Deserialize, Serialize, Configurable, Proxy)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "IROHA_")] pub struct Configuration { @@ -23,11 +23,14 @@ view! { #[view(ignore)] pub private_key: PrivateKey, /// Disable coloring of the backtrace and error report on panic + #[serde(default), config(inner)] pub disable_panic_terminal_colors: bool, /// Iroha will shutdown on any panic if this option is set to `true`. + #[serde(default)] pub shutdown_on_panic: bool, /// `Kura` configuration #[config(inner)] + #[serde(default)] pub kura: kura::Configuration, /// `Sumeragi` configuration #[config(inner)] @@ -38,12 +41,15 @@ view! { pub torii: torii::Configuration, /// `BlockSynchronizer` configuration #[config(inner)] + #[serde(default)] pub block_sync: block_sync::Configuration, /// `Queue` configuration #[config(inner)] + #[serde(default)] pub queue: queue::Configuration, /// `Logger` configuration #[config(inner)] + #[serde(default)] pub logger: logger::Configuration, /// `GenesisBlock` configuration #[config(inner)] @@ -51,17 +57,20 @@ view! { pub genesis: genesis::Configuration, /// `WorldStateView` configuration #[config(inner)] + #[serde(default)] pub wsv: wsv::Configuration, /// Network configuration #[config(inner)] + #[serde(default)] pub network: network::Configuration, /// Telemetry configuration #[config(inner)] + #[serde(default)] pub telemetry: telemetry::Configuration, } } -impl Default for Configuration { +impl DocsDefault for Configuration { fn default() -> Self { let sumeragi_configuration = sumeragi::Configuration::default(); let (public_key, private_key) = sumeragi_configuration.key_pair.clone().into(); diff --git a/config/src/sumeragi.rs b/config/src/sumeragi.rs index ebb9d8897cb..7a45db75ff8 100644 --- a/config/src/sumeragi.rs +++ b/config/src/sumeragi.rs @@ -2,7 +2,10 @@ use std::{collections::HashSet, fmt::Debug, fs::File, io::BufReader, path::Path}; use eyre::{Result, WrapErr}; -use iroha_config_base::derive::{view, Configurable}; +use iroha_config_base::{ + derive::{view, Configurable, Proxy}, + proxy::DocsDefault, +}; use iroha_crypto::prelude::*; use iroha_data_model::{prelude::*, transaction}; use serde::{Deserialize, Serialize}; @@ -24,12 +27,10 @@ view! { /// [`Configuration`] provides an ability to define parameters such as `BLOCK_TIME_MS` /// and a list of `TRUSTED_PEERS`. #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Configurable)] - #[serde(default)] #[serde(rename_all = "UPPERCASE")] #[config(env_prefix = "SUMERAGI_")] pub struct Configuration { /// The key pair consisting of a private and a public key. - #[serde(skip)] #[view(ignore)] pub key_pair: KeyPair, /// Current Peer Identification. @@ -53,7 +54,7 @@ view! { } } -impl Default for Configuration { +impl DocsDefault for Configuration { fn default() -> Self { Self { key_pair: Self::placeholder_keypair(), diff --git a/tools/kagami/src/main.rs b/tools/kagami/src/main.rs index eb0bce7ba44..a7f23c4dbea 100644 --- a/tools/kagami/src/main.rs +++ b/tools/kagami/src/main.rs @@ -6,6 +6,7 @@ use std::io::{stdout, BufWriter, Write}; use clap::{ArgGroup, StructOpt}; use color_eyre::eyre::WrapErr as _; +use iroha_config::base::proxy::DocsDefault; use iroha_config::iroha::Configuration; pub type Outcome = color_eyre::Result<()>; @@ -209,7 +210,7 @@ mod docs { use super::*; - impl + Send + Sync + Default> PrintDocs for C {} + impl + Send + Sync + DocsDefault> PrintDocs for C {} #[derive(StructOpt, Debug, Clone, Copy)] pub struct Args; @@ -220,7 +221,7 @@ mod docs { } } - pub trait PrintDocs: Configurable + Send + Sync + Default + pub trait PrintDocs: Configurable + Send + Sync + DocsDefault where Self::Error: Debug, {