use proc_macro2::{Span, TokenStream};
use quote::ToTokens;
use syn::{punctuated::Punctuated, Token};
use crate::attributes::{CrateAttribute, RenamingRule};
macro_rules! err_spanned {
($span:expr => $msg:expr) => {
syn::Error::new($span, $msg)
};
}
macro_rules! bail_spanned {
($span:expr => $msg:expr) => {
return Err(err_spanned!($span => $msg))
};
}
macro_rules! ensure_spanned {
($condition:expr, $span:expr => $msg:expr) => {
if !($condition) {
bail_spanned!($span => $msg);
}
}
}
pub fn is_python(ty: &syn::Type) -> bool {
match unwrap_ty_group(ty) {
syn::Type::Path(typath) => typath
.path
.segments
.last()
.map(|seg| seg.ident == "Python")
.unwrap_or(false),
_ => false,
}
}
pub fn option_type_argument(ty: &syn::Type) -> Option<&syn::Type> {
if let syn::Type::Path(syn::TypePath { path, .. }) = ty {
let seg = path.segments.last().filter(|s| s.ident == "Option")?;
if let syn::PathArguments::AngleBracketed(params) = &seg.arguments {
if let syn::GenericArgument::Type(ty) = params.args.first()? {
return Some(ty);
}
}
}
None
}
#[derive(Clone)]
pub struct PythonDoc(TokenStream);
pub fn get_doc(attrs: &[syn::Attribute], mut text_signature: Option<String>) -> PythonDoc {
if let Some(text_signature) = &mut text_signature {
text_signature.push_str("\n--\n\n");
}
let mut parts = Punctuated::<TokenStream, Token![,]>::new();
let mut first = true;
let mut current_part = text_signature.unwrap_or_default();
for attr in attrs {
if attr.path().is_ident("doc") {
if let Ok(nv) = attr.meta.require_name_value() {
if !first {
current_part.push('\n');
} else {
first = false;
}
if let syn::Expr::Lit(syn::ExprLit {
lit: syn::Lit::Str(lit_str),
..
}) = &nv.value
{
let doc_line = lit_str.value();
current_part.push_str(doc_line.strip_prefix(' ').unwrap_or(&doc_line));
} else {
parts.push(current_part.to_token_stream());
current_part.clear();
parts.push(nv.value.to_token_stream());
}
}
}
}
if !parts.is_empty() {
if !current_part.is_empty() {
parts.push(current_part.to_token_stream());
}
let mut tokens = TokenStream::new();
syn::Ident::new("concat", Span::call_site()).to_tokens(&mut tokens);
syn::token::Not(Span::call_site()).to_tokens(&mut tokens);
syn::token::Bracket(Span::call_site()).surround(&mut tokens, |tokens| {
parts.to_tokens(tokens);
syn::token::Comma(Span::call_site()).to_tokens(tokens);
syn::LitStr::new("\0", Span::call_site()).to_tokens(tokens);
});
PythonDoc(tokens)
} else {
current_part.push('\0');
PythonDoc(current_part.to_token_stream())
}
}
impl quote::ToTokens for PythonDoc {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.0.to_tokens(tokens)
}
}
pub fn unwrap_ty_group(mut ty: &syn::Type) -> &syn::Type {
while let syn::Type::Group(g) = ty {
ty = &*g.elem;
}
ty
}
pub struct Ctx {
pub pyo3_path: PyO3CratePath,
}
impl Ctx {
pub(crate) fn new(attr: &Option<CrateAttribute>) -> Self {
let pyo3_path = match attr {
Some(attr) => PyO3CratePath::Given(attr.value.0.clone()),
None => PyO3CratePath::Default,
};
Self { pyo3_path }
}
}
pub enum PyO3CratePath {
Given(syn::Path),
Default,
}
impl PyO3CratePath {
pub fn to_tokens_spanned(&self, span: Span) -> TokenStream {
match self {
Self::Given(path) => quote::quote_spanned! { span => #path },
Self::Default => quote::quote_spanned! { span => ::pyo3 },
}
}
}
impl quote::ToTokens for PyO3CratePath {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
Self::Given(path) => path.to_tokens(tokens),
Self::Default => quote::quote! { ::pyo3 }.to_tokens(tokens),
}
}
}
pub fn apply_renaming_rule(rule: RenamingRule, name: &str) -> String {
use heck::*;
match rule {
RenamingRule::CamelCase => name.to_lower_camel_case(),
RenamingRule::KebabCase => name.to_kebab_case(),
RenamingRule::Lowercase => name.to_lowercase(),
RenamingRule::PascalCase => name.to_upper_camel_case(),
RenamingRule::ScreamingKebabCase => name.to_shouty_kebab_case(),
RenamingRule::ScreamingSnakeCase => name.to_shouty_snake_case(),
RenamingRule::SnakeCase => name.to_snake_case(),
RenamingRule::Uppercase => name.to_uppercase(),
}
}
pub(crate) fn is_abi3() -> bool {
pyo3_build_config::get().abi3
}