Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(contract): derive procedural macros #2152

Merged
merged 11 commits into from
Feb 14, 2023
Merged
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions ethers-contract/ethers-contract-derive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,14 @@ proc-macro = true
ethers-core = { version = "^1.0.0", path = "../../ethers-core" }
ethers-contract-abigen = { version = "^1.0.0", path = "../ethers-contract-abigen", default-features = false }

serde_json = "1.0.53"
hex = { version = "0.4.3", default-features = false, features = ["std"] }
proc-macro2 = "1.0"
quote = "1.0"
syn = "1.0.12"

serde_json = "1.0.53"
hex = { version = "0.4.3", default-features = false, features = ["std"] }
eyre = "0.6"

[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
164 changes: 78 additions & 86 deletions ethers-contract/ethers-contract-derive/src/abi_ty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,42 +4,32 @@ use crate::utils;
use ethers_core::macros::ethers_core_crate;
use proc_macro2::{Ident, Literal, TokenStream};
use quote::{quote, quote_spanned};
use syn::{parse::Error, spanned::Spanned as _, Data, DeriveInput, Fields, Variant};
use syn::{parse::Error, spanned::Spanned, Data, DeriveInput, Fields, Variant};

/// Generates the tokenize implementation
pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
let core_crate = ethers_core_crate();
pub fn derive_tokenizeable_impl(input: &DeriveInput) -> Result<TokenStream, Error> {
let ethers_core = ethers_core_crate();
let name = &input.ident;
let generic_params = input.generics.params.iter().map(|p| quote! { #p });
let generic_params = quote! { #(#generic_params,)* };

let generic_args = input.generics.type_params().map(|p| {
let name = &p.ident;
quote_spanned! { p.ident.span() => #name }
});

let generic_args = quote! { #(#generic_args,)* };

let generic_predicates = match input.generics.where_clause {
Some(ref clause) => {
let predicates = clause.predicates.iter().map(|p| quote! { #p });
quote! { #(#predicates,)* }
}
None => quote! {},
};
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let generic_predicates = where_clause.map(|c| &c.predicates);

let (tokenize_predicates, params_len, init_struct_impl, into_token_impl) = match input.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
let tokenize_predicates = fields.named.iter().map(|f| {
let ty = &f.ty;
quote_spanned! { f.span() => #ty: #core_crate::abi::Tokenize }
quote_spanned! { f.span() => #ty: #ethers_core::abi::Tokenize }
});
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };

let assignments = fields.named.iter().map(|f| {
let name = f.ident.as_ref().expect("Named fields have names");
quote_spanned! { f.span() => #name: #core_crate::abi::Tokenizable::from_token(iter.next().expect("The iter is guaranteed to be something due to the size check"))? }
quote_spanned! { f.span() =>
#name: #ethers_core::abi::Tokenizable::from_token(
iter.next().expect("The iter is guaranteed to be something due to the size check")
)?
}
});
let init_struct_impl = quote! { Self { #(#assignments,)* } };

Expand All @@ -54,12 +44,16 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
Fields::Unnamed(ref fields) => {
let tokenize_predicates = fields.unnamed.iter().map(|f| {
let ty = &f.ty;
quote_spanned! { f.span() => #ty: #core_crate::abi::Tokenize }
quote_spanned! { f.span() => #ty: #ethers_core::abi::Tokenize }
});
let tokenize_predicates = quote! { #(#tokenize_predicates,)* };

let assignments = fields.unnamed.iter().map(|f| {
quote_spanned! { f.span() => #core_crate::abi::Tokenizable::from_token(iter.next().expect("The iter is guaranteed to be something due to the size check"))? }
quote_spanned! { f.span() =>
#ethers_core::abi::Tokenizable::from_token(
iter.next().expect("The iter is guaranteed to be something due to the size check")
)?
}
});
let init_struct_impl = quote! { Self(#(#assignments,)* ) };

Expand All @@ -71,17 +65,11 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream

(tokenize_predicates, fields.unnamed.len(), init_struct_impl, into_token_impl)
}
Fields::Unit => return tokenize_unit_type(&input.ident),
Fields::Unit => return Ok(tokenize_unit_type(&input.ident)),
},
Data::Enum(ref data) => {
return match tokenize_enum(name, data.variants.iter()) {
Ok(tokens) => tokens,
Err(err) => err.to_compile_error(),
}
}
Data::Enum(ref data) => return tokenize_enum(name, data.variants.iter()),
Data::Union(_) => {
return Error::new(input.span(), "EthAbiType cannot be derived for unions")
.to_compile_error()
return Err(Error::new(input.span(), "EthAbiType cannot be derived for unions"))
}
};

Expand All @@ -95,14 +83,14 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
// can't encode an empty struct
// TODO: panic instead?
quote! {
#core_crate::abi::Token::Tuple(Vec::new())
#ethers_core::abi::Token::Tuple(Vec::new())
},
),
_ => {
let from_token = quote! {
if let #core_crate::abi::Token::Tuple(tokens) = token {
if let #ethers_core::abi::Token::Tuple(tokens) = token {
if tokens.len() != #params_len {
return Err(#core_crate::abi::InvalidOutputType(::std::format!(
return Err(#ethers_core::abi::InvalidOutputType(::std::format!(
"Expected {} tokens, got {}: {:?}",
#params_len,
tokens.len(),
Expand All @@ -114,15 +102,15 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream

Ok(#init_struct_impl)
} else {
Err(#core_crate::abi::InvalidOutputType(::std::format!(
Err(#ethers_core::abi::InvalidOutputType(::std::format!(
"Expected Tuple, got {:?}",
token
)))
}
};

let into_token = quote! {
#core_crate::abi::Token::Tuple(
#ethers_core::abi::Token::Tuple(
::std::vec![
#into_token_impl
]
Expand All @@ -132,56 +120,57 @@ pub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream
}
};

let params = match utils::derive_param_type_with_abi_type(input, "EthAbiType") {
Ok(params) => params,
Err(err) => return err.to_compile_error(),
};
quote! {
let params = utils::derive_param_type_with_abi_type(input, "EthAbiType")?;

impl<#generic_params> #core_crate::abi::AbiType for #name<#generic_args> {
fn param_type() -> #core_crate::abi::ParamType {
Ok(quote! {
impl #impl_generics #ethers_core::abi::AbiType for #name #ty_generics #where_clause {
fn param_type() -> #ethers_core::abi::ParamType {
#params
}
}

impl<#generic_params> #core_crate::abi::AbiArrayType for #name<#generic_args> {}

impl<#generic_params> #core_crate::abi::Tokenizable for #name<#generic_args>
where
#generic_predicates
#tokenize_predicates
{
impl #impl_generics #ethers_core::abi::AbiArrayType for #name #ty_generics #where_clause {}

fn from_token(token: #core_crate::abi::Token) -> ::std::result::Result<Self, #core_crate::abi::InvalidOutputType> where
Self: Sized {
impl #impl_generics #ethers_core::abi::Tokenizable for #name #ty_generics
where
#generic_predicates
#tokenize_predicates
{
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType>
where
Self: Sized,
{
#from_token_impl
}
}

fn into_token(self) -> #core_crate::abi::Token {
fn into_token(self) -> #ethers_core::abi::Token {
#into_token_impl
}
}
}
}

impl<#generic_params> #core_crate::abi::TokenizableItem for #name<#generic_args>
where
#generic_predicates
#tokenize_predicates
{ }
}
impl #impl_generics #ethers_core::abi::TokenizableItem for #name #ty_generics
where
#generic_predicates
#tokenize_predicates
{}
})
}

fn tokenize_unit_type(name: &Ident) -> TokenStream {
let ethers_core = ethers_core_crate();

quote! {
impl #ethers_core::abi::Tokenizable for #name {
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType> where
Self: Sized {
impl #ethers_core::abi::Tokenizable for #name {
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType>
where
Self: Sized,
{
if let #ethers_core::abi::Token::Tuple(tokens) = token {
if !tokens.is_empty() {
Err(#ethers_core::abi::InvalidOutputType(::std::format!(
"Expected empty tuple, got {:?}",
tokens
)))
Err(#ethers_core::abi::InvalidOutputType(::std::format!(
"Expected empty tuple, got {:?}",
tokens
)))
} else {
Ok(#name{})
}
Expand All @@ -194,10 +183,11 @@ fn tokenize_unit_type(name: &Ident) -> TokenStream {
}

fn into_token(self) -> #ethers_core::abi::Token {
#ethers_core::abi::Token::Tuple(::std::vec::Vec::new())
#ethers_core::abi::Token::Tuple(::std::vec::Vec::new())
}
}
impl #ethers_core::abi::TokenizableItem for #name { }
}

impl #ethers_core::abi::TokenizableItem for #name {}
}
}

Expand All @@ -210,7 +200,7 @@ fn tokenize_unit_type(name: &Ident) -> TokenStream {
fn tokenize_enum<'a>(
enum_name: &Ident,
variants: impl Iterator<Item = &'a Variant> + 'a,
) -> ::std::result::Result<TokenStream, Error> {
) -> Result<TokenStream, Error> {
let ethers_core = ethers_core_crate();

let mut into_tokens = TokenStream::new();
Expand All @@ -225,12 +215,12 @@ fn tokenize_enum<'a>(
} else if variant.fields.is_empty() {
let value = Literal::u8_unsuffixed(idx as u8);
from_tokens.extend(quote! {
if let Ok(#value) = u8::from_token(token.clone()) {
if let Ok(#value) = u8::from_token(token.clone()) {
return Ok(#enum_name::#var_ident)
}
});
into_tokens.extend(quote! {
#enum_name::#var_ident => #value.into_token(),
#enum_name::#var_ident => #value.into_token(),
});
} else if let Some(field) = variant.fields.iter().next() {
let ty = &field.ty;
Expand All @@ -240,30 +230,32 @@ fn tokenize_enum<'a>(
}
});
into_tokens.extend(quote! {
#enum_name::#var_ident(element) => element.into_token(),
#enum_name::#var_ident(element) => element.into_token(),
});
} else {
into_tokens.extend(quote! {
#enum_name::#var_ident(element) => # ethers_core::abi::Token::Tuple(::std::vec::Vec::new()),
});
#enum_name::#var_ident(element) => # ethers_core::abi::Token::Tuple(::std::vec::Vec::new()),
});
}
}

Ok(quote! {
impl #ethers_core::abi::Tokenizable for #enum_name {

fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType> where
Self: Sized {
impl #ethers_core::abi::Tokenizable for #enum_name {
fn from_token(token: #ethers_core::abi::Token) -> ::std::result::Result<Self, #ethers_core::abi::InvalidOutputType>
where
Self: Sized,
{
#from_tokens
Err(#ethers_core::abi::InvalidOutputType("Failed to decode all type variants".to_string()))
}

fn into_token(self) -> #ethers_core::abi::Token {
match self {
#into_tokens
#into_tokens
}
}
}
impl #ethers_core::abi::TokenizableItem for #enum_name { }
}

impl #ethers_core::abi::TokenizableItem for #enum_name {}
})
}