diff --git a/CHANGELOG.md b/CHANGELOG.md index 535d3e837c..7b2ed9dc8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 0.6.42 + - Fix `QueryBuilder` for Microsoft SQL Server: https://github.com/sqlpage/sqlx-oldapi/issues/11 + - Add support for Microsoft SQL Server DateTime columns in sqlx macros: macros https://github.com/sqlpage/sqlx-oldapi/issues/16 + ## 0.6.41 - Upgrade rustls to 0.23 - Provide detailed error messages on TLS connection issues diff --git a/Cargo.lock b/Cargo.lock index e0e32f0547..b2dbace575 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3701,7 +3701,7 @@ dependencies = [ "sha2", "sqlx-core-oldapi", "sqlx-rt-oldapi", - "syn 1.0.109", + "syn 2.0.101", "url", ] diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 4d2c23e71b..3bc2be8d9a 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -85,7 +85,7 @@ url = { version = "2.2.2", default-features = false } [dependencies.syn] # This is basically default features plus "full" but if they add more defaults later then we don't need to enable those. -version = "1.0.109" +version = "2.0.101" default-features = false features = ["full", "parsing", "printing", "derive", "clone-impls", "proc-macro"] diff --git a/sqlx-macros/src/common.rs b/sqlx-macros/src/common.rs index fab09b7cae..84b50b937c 100644 --- a/sqlx-macros/src/common.rs +++ b/sqlx-macros/src/common.rs @@ -14,11 +14,7 @@ pub(crate) fn resolve_path(path: impl AsRef, err_span: Span) -> syn::Resul // requires `proc_macro::SourceFile::path()` to be stable // https://github.com/rust-lang/rust/issues/54725 - if path.is_relative() - && !path - .parent() - .map_or(false, |parent| !parent.as_os_str().is_empty()) - { + if path.is_relative() && path.parent().is_none_or(|p| p.as_os_str().is_empty()) { return Err(syn::Error::new( err_span, "paths relative to the current file's directory are not currently supported", diff --git a/sqlx-macros/src/derives/attributes.rs b/sqlx-macros/src/derives/attributes.rs index 8ee8c6d738..d0c8c4ed31 100644 --- a/sqlx-macros/src/derives/attributes.rs +++ b/sqlx-macros/src/derives/attributes.rs @@ -3,7 +3,9 @@ use quote::{quote, quote_spanned}; use syn::punctuated::Punctuated; use syn::spanned::Spanned; use syn::token::Comma; -use syn::{Attribute, DeriveInput, Field, Lit, Meta, MetaNameValue, NestedMeta, Variant}; +use syn::{ + Attribute, DeriveInput, Expr, Field, Lit, LitStr, Meta, MetaNameValue, Path, Token, Variant, +}; macro_rules! assert_attribute { ($e:expr, $err:expr, $input:expr) => { @@ -83,89 +85,94 @@ pub fn parse_container_attributes(input: &[Attribute]) -> syn::Result { - for value in list.nested.iter() { - match value { - NestedMeta::Meta(meta) => match meta { - Meta::Path(p) if p.is_ident("transparent") => { - try_set!(transparent, true, value) + let nested_metas = + list.parse_args_with(Punctuated::::parse_terminated)?; + for meta_item in nested_metas { + match meta_item { + Meta::Path(p) if p.is_ident("transparent") => { + try_set!(transparent, true, p) + } + Meta::NameValue(mnv) if mnv.path.is_ident("rename_all") => { + if let Expr::Lit(expr_lit) = &mnv.value { + if let Lit::Str(val_str) = &expr_lit.lit { + let val = match &*val_str.value() { + "lowercase" => RenameAll::LowerCase, + "snake_case" => RenameAll::SnakeCase, + "UPPERCASE" => RenameAll::UpperCase, + "SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase, + "kebab-case" => RenameAll::KebabCase, + "camelCase" => RenameAll::CamelCase, + "PascalCase" => RenameAll::PascalCase, + _ => fail!(val_str, "unexpected value for rename_all"), + }; + try_set!(rename_all, val, &mnv.path) + } else { + fail!(expr_lit, "expected string literal for rename_all") + } + } else { + fail!(&mnv.value, "expected literal expression for rename_all") } - - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename_all") => { - let val = match &*val.value() { - "lowercase" => RenameAll::LowerCase, - "snake_case" => RenameAll::SnakeCase, - "UPPERCASE" => RenameAll::UpperCase, - "SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase, - "kebab-case" => RenameAll::KebabCase, - "camelCase" => RenameAll::CamelCase, - "PascalCase" => RenameAll::PascalCase, - _ => fail!(meta, "unexpected value for rename_all"), - }; - - try_set!(rename_all, val, value) + } + Meta::NameValue(mnv) if mnv.path.is_ident("type_name") => { + if let Expr::Lit(expr_lit) = &mnv.value { + if let Lit::Str(val_str) = &expr_lit.lit { + try_set!( + type_name, + TypeName { + val: val_str.value(), + span: val_str.span(), + deprecated_rename: false + }, + &mnv.path + ) + } else { + fail!(expr_lit, "expected string literal for type_name") + } + } else { + fail!(&mnv.value, "expected literal expression for type_name") } - - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("type_name") => { - try_set!( - type_name, - TypeName { - val: val.value(), - span: value.span(), - deprecated_rename: false - }, - value - ) + } + Meta::NameValue(mnv) if mnv.path.is_ident("rename") => { + if let Expr::Lit(expr_lit) = &mnv.value { + if let Lit::Str(val_str) = &expr_lit.lit { + try_set!( + type_name, + TypeName { + val: val_str.value(), + span: val_str.span(), + deprecated_rename: true + }, + &mnv.path + ) + } else { + fail!(expr_lit, "expected string literal for rename") + } + } else { + fail!(&mnv.value, "expected literal expression for rename") } - - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename") => { - try_set!( - type_name, - TypeName { - val: val.value(), - span: value.span(), - deprecated_rename: true - }, - value - ) - } - - u => fail!(u, "unexpected attribute"), - }, - u => fail!(u, "unexpected attribute"), + } + u => fail!(u, "unexpected attribute inside sqlx(...)"), } } } Meta::List(list) if list.path.is_ident("repr") => { - if list.nested.len() != 1 { - fail!(&list.nested, "expected one value") + let nested_metas = + list.parse_args_with(Punctuated::::parse_terminated)?; + if nested_metas.len() != 1 { + fail!(&list.path, "expected one value for repr") } - match list.nested.first().unwrap() { - NestedMeta::Meta(Meta::Path(p)) if p.get_ident().is_some() => { - try_set!(repr, p.get_ident().unwrap().clone(), list); + match nested_metas.first().unwrap() { + Meta::Path(p) if p.get_ident().is_some() => { + try_set!(repr, p.get_ident().unwrap().clone(), &list.path); } - u => fail!(u, "unexpected value"), + u => fail!(u, "unexpected value for repr"), } } - _ => {} + _ => { /* Not an attribute we are interested in, or not a list */ } } } @@ -183,30 +190,37 @@ pub fn parse_child_attributes(input: &[Attribute]) -> syn::Result match meta { - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename") => try_set!(rename, val.value(), value), - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("try_from") => try_set!(try_from, val.parse()?, value), - Meta::Path(path) if path.is_ident("default") => default = true, - Meta::Path(path) if path.is_ident("flatten") => flatten = true, - u => fail!(u, "unexpected attribute"), - }, - u => fail!(u, "unexpected attribute"), + for attr in input.iter().filter(|a| a.path().is_ident("sqlx")) { + if let Meta::List(list) = &attr.meta { + let nested_metas = + list.parse_args_with(Punctuated::::parse_terminated)?; + for meta_item in nested_metas { + match meta_item { + Meta::NameValue(mnv) if mnv.path.is_ident("rename") => { + if let Expr::Lit(expr_lit) = &mnv.value { + if let Lit::Str(val_str) = &expr_lit.lit { + try_set!(rename, val_str.value(), &mnv.path) + } else { + fail!(expr_lit, "expected string literal for rename") + } + } else { + fail!(&mnv.value, "expected literal expression for rename") + } + } + Meta::NameValue(mnv) if mnv.path.is_ident("try_from") => { + if let Expr::Lit(expr_lit) = &mnv.value { + if let Lit::Str(val_str) = &expr_lit.lit { + try_set!(try_from, val_str.parse()?, &mnv.path) + } else { + fail!(expr_lit, "expected string literal for try_from") + } + } else { + fail!(&mnv.value, "expected literal expression for try_from") + } + } + Meta::Path(path) if path.is_ident("default") => default = true, + Meta::Path(path) if path.is_ident("flatten") => flatten = true, + u => fail!(u, "unexpected attribute inside sqlx(...)"), } } } diff --git a/sqlx-macros/src/derives/encode.rs b/sqlx-macros/src/derives/encode.rs index 37303e42bb..23885e6962 100644 --- a/sqlx-macros/src/derives/encode.rs +++ b/sqlx-macros/src/derives/encode.rs @@ -9,7 +9,7 @@ use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{ parse_quote, Data, DataEnum, DataStruct, DeriveInput, Expr, Field, Fields, FieldsNamed, - FieldsUnnamed, Lifetime, LifetimeDef, Stmt, Variant, + FieldsUnnamed, Lifetime, LifetimeParam, Stmt, Variant, }; pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result { @@ -66,7 +66,7 @@ fn expand_derive_encode_transparent( let mut generics = generics.clone(); generics .params - .insert(0, LifetimeDef::new(lifetime.clone()).into()); + .insert(0, LifetimeParam::new(lifetime.clone()).into()); generics .params diff --git a/sqlx-macros/src/lib.rs b/sqlx-macros/src/lib.rs index c858b204dd..67911c50ac 100644 --- a/sqlx-macros/src/lib.rs +++ b/sqlx-macros/src/lib.rs @@ -1,3 +1,4 @@ +#![allow(clippy::large_enum_variant)] #![cfg_attr( not(any(feature = "postgres", feature = "mysql", feature = "offline")), allow(dead_code, unused_macros, unused_imports) @@ -12,6 +13,10 @@ use proc_macro::TokenStream; use quote::quote; +use syn::parse::{Parse, ParseStream}; +use syn::punctuated::Punctuated; +use syn::{parse_macro_input, DeriveInput, ItemFn, LitStr, Meta, Token}; + type Error = Box; type Result = std::result::Result; @@ -27,6 +32,16 @@ mod test_attr; #[cfg(feature = "migrate")] mod migrate; +struct ArgsParser(Punctuated); + +impl Parse for ArgsParser { + fn parse(input: ParseStream) -> syn::Result { + Ok(ArgsParser(Punctuated::::parse_terminated( + input, + )?)) + } +} + #[proc_macro] pub fn expand_query(input: TokenStream) -> TokenStream { let input = syn::parse_macro_input!(input as query::QueryMacroInput); @@ -101,19 +116,12 @@ pub fn migrate(input: TokenStream) -> TokenStream { } #[proc_macro_attribute] -pub fn test(args: TokenStream, input: TokenStream) -> TokenStream { - let args = syn::parse_macro_input!(args as syn::AttributeArgs); - let input = syn::parse_macro_input!(input as syn::ItemFn); - - match test_attr::expand(args, input) { - Ok(ts) => ts.into(), - Err(e) => { - if let Some(parse_err) = e.downcast_ref::() { - parse_err.to_compile_error().into() - } else { - let msg = e.to_string(); - quote!(::std::compile_error!(#msg)).into() - } - } - } +pub fn test( + args: proc_macro::TokenStream, + input: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + let args = parse_macro_input!(args as ArgsParser).0; + let input = parse_macro_input!(input as ItemFn); + + test_attr::expand(args, input) } diff --git a/sqlx-macros/src/migrate.rs b/sqlx-macros/src/migrate.rs index a776902ed5..392da864e4 100644 --- a/sqlx-macros/src/migrate.rs +++ b/sqlx-macros/src/migrate.rs @@ -19,7 +19,7 @@ impl ToTokens for QuotedMigrationType { quote! { ::sqlx_oldapi::migrate::MigrationType::ReversibleDown } } }; - tokens.append_all(ts.into_iter()); + tokens.append_all(ts); } } @@ -101,7 +101,7 @@ pub(crate) fn expand_migrator(path: &Path) -> crate::Result { .replace('_', " ") .to_owned(); - let sql = fs::read_to_string(&entry.path())?; + let sql = fs::read_to_string(entry.path())?; let checksum = Vec::from(Sha384::digest(sql.as_bytes()).as_slice()); diff --git a/sqlx-macros/src/query/args.rs b/sqlx-macros/src/query/args.rs index fd363288d9..708187945d 100644 --- a/sqlx-macros/src/query/args.rs +++ b/sqlx-macros/src/query/args.rs @@ -5,7 +5,7 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote, quote_spanned}; use sqlx_core::describe::Describe; use syn::spanned::Spanned; -use syn::{Expr, ExprCast, ExprGroup, ExprType, Type}; +use syn::{Expr, ExprCast, ExprGroup, Type}; /// Returns a tokenstream which typechecks the arguments passed to the macro /// and binds them to `DB::Arguments` with the ident `query_args`. @@ -118,7 +118,6 @@ fn get_type_override(expr: &Expr) -> Option<&Type> { match expr { Expr::Group(group) => get_type_override(&group.expr), Expr::Cast(cast) => Some(&cast.ty), - Expr::Type(ascription) => Some(&ascription.ty), _ => None, } } @@ -135,7 +134,8 @@ fn strip_wildcard(expr: Expr) -> Expr { expr: Box::new(strip_wildcard(*expr)), }), // type ascription syntax is experimental so we always strip it - Expr::Type(ExprType { expr, .. }) => *expr, + // In syn v2, Expr::Type and ExprType are removed. + // Expr::Type(ExprType { expr, .. }) => *expr, // we want to retain casts if they semantically matter Expr::Cast(ExprCast { attrs, diff --git a/sqlx-macros/src/query/mod.rs b/sqlx-macros/src/query/mod.rs index c85ce56144..7ba3bd2cc4 100644 --- a/sqlx-macros/src/query/mod.rs +++ b/sqlx-macros/src/query/mod.rs @@ -125,6 +125,7 @@ static METADATA: Lazy = Lazy::new(|| { } }); +#[allow(unused_variables)] pub fn expand_input(input: QueryMacroInput) -> crate::Result { match &*METADATA { #[cfg(not(any( @@ -135,7 +136,7 @@ pub fn expand_input(input: QueryMacroInput) -> crate::Result { )))] Metadata { offline: false, - database_url: Some(db_url), + database_url: Some(_db_url), .. } => Err( "At least one of the features ['postgres', 'mysql', 'mssql', 'sqlite'] must be enabled \ @@ -359,13 +360,9 @@ where } } - let record_fields = columns.iter().map( - |&output::RustColumn { - ref ident, - ref type_, - .. - }| quote!(#ident: #type_,), - ); + let record_fields = columns + .iter() + .map(|output::RustColumn { ident, type_, .. }| quote!(#ident: #type_,)); let mut record_tokens = quote! { #[derive(Debug)] diff --git a/sqlx-macros/src/test_attr.rs b/sqlx-macros/src/test_attr.rs index 23454130de..d56e86ea42 100644 --- a/sqlx-macros/src/test_attr.rs +++ b/sqlx-macros/src/test_attr.rs @@ -1,6 +1,7 @@ use proc_macro2::{Span, TokenStream}; use quote::quote; -use syn::LitStr; +use syn::punctuated::Punctuated; +use syn::{Expr, LitBool, LitStr, Meta, MetaList, MetaNameValue, Path, Token}; struct Args { fixtures: Vec, @@ -14,54 +15,76 @@ enum MigrationsOpt { Disabled, } -pub fn expand(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result { - if input.sig.inputs.is_empty() { +pub fn expand( + args: Punctuated, + input: syn::ItemFn, +) -> proc_macro::TokenStream { + let result: crate::Result = if input.sig.inputs.is_empty() { if !args.is_empty() { if cfg!(feature = "migrate") { - return Err(syn::Error::new_spanned( + Err(syn::Error::new_spanned( args.first().unwrap(), "control attributes are not allowed unless \ the `migrate` feature is enabled and \ automatic test DB management is used; see docs", ) - .into()); + .into()) + } else { + Err(syn::Error::new_spanned( + args.first().unwrap(), + "control attributes are not allowed unless \ + automatic test DB management is used; see docs", + ) + .into()) } + } else { + expand_simple(input) + } + } else { + #[cfg(feature = "migrate")] + { + expand_advanced(args, input) + } - return Err(syn::Error::new_spanned( - args.first().unwrap(), - "control attributes are not allowed unless \ - automatic test DB management is used; see docs", - ) - .into()); + #[cfg(not(feature = "migrate"))] + { + Err(syn::Error::new_spanned(&input, "`migrate` feature required").into()) } + }; - return Ok(expand_simple(input)); + match result { + Ok(ts) => ts.into(), + Err(e) => { + if let Some(parse_err) = e.downcast_ref::() { + parse_err.to_compile_error().into() + } else { + let msg = e.to_string(); + quote!(::std::compile_error!(#msg)).into() + } + } } - - #[cfg(feature = "migrate")] - return expand_advanced(args, input); - - #[cfg(not(feature = "migrate"))] - return Err(syn::Error::new_spanned(input, "`migrate` feature required").into()); } -fn expand_simple(input: syn::ItemFn) -> TokenStream { +fn expand_simple(input: syn::ItemFn) -> crate::Result { let ret = &input.sig.output; let name = &input.sig.ident; let body = &input.block; let attrs = &input.attrs; - quote! { + Ok(quote! { #[::core::prelude::v1::test] #(#attrs)* fn #name() #ret { ::sqlx_oldapi::test_block_on(async { #body }) } - } + }) } #[cfg(feature = "migrate")] -fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result { +fn expand_advanced( + args: Punctuated, + input: syn::ItemFn, +) -> crate::Result { let ret = &input.sig.output; let name = &input.sig.ident; let inputs = &input.sig.inputs; @@ -127,27 +150,23 @@ fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Resul } #[cfg(feature = "migrate")] -fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result { +fn parse_args(attr_args: Punctuated) -> syn::Result { let mut fixtures = vec![]; let mut migrations = MigrationsOpt::InferredPath; for arg in attr_args { match arg { - syn::NestedMeta::Meta(syn::Meta::List(list)) if list.path.is_ident("fixtures") => { + syn::Meta::List(list) if list.path.is_ident("fixtures") => { if !fixtures.is_empty() { return Err(syn::Error::new_spanned(list, "duplicate `fixtures` arg")); } - for nested in list.nested { - match nested { - syn::NestedMeta::Lit(syn::Lit::Str(litstr)) => fixtures.push(litstr), - other => { - return Err(syn::Error::new_spanned(other, "expected string literal")) - } - } + let parsed_fixtures = list.parse_args_with(Punctuated::::parse_terminated)?; + for litstr in parsed_fixtures { + fixtures.push(litstr); } } - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) + syn::Meta::NameValue(namevalue) if namevalue.path.is_ident("migrations") => { if !matches!(migrations, MigrationsOpt::InferredPath) { @@ -157,30 +176,38 @@ fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result { )); } - migrations = match namevalue.lit { - syn::Lit::Bool(litbool) => { - if !litbool.value { - // migrations = false - MigrationsOpt::Disabled - } else { - // migrations = true + migrations = match &namevalue.value { + syn::Expr::Lit(ref expr_lit) => match &expr_lit.lit { + syn::Lit::Bool(litbool) => { + if !litbool.value { + // migrations = false + MigrationsOpt::Disabled + } else { + // migrations = true + return Err(syn::Error::new_spanned( + expr_lit, + "`migrations = true` is redundant", + )); + } + } + // migrations = "" + syn::Lit::Str(litstr) => MigrationsOpt::ExplicitPath(litstr.clone()), + _ => { return Err(syn::Error::new_spanned( - litbool, - "`migrations = true` is redundant", - )); + &namevalue.value, + "expected string or `false` for migrations value", + )) } } - // migrations = "" - syn::Lit::Str(litstr) => MigrationsOpt::ExplicitPath(litstr), _ => { return Err(syn::Error::new_spanned( - namevalue, - "expected string or `false`", + &namevalue.value, + "expected literal for migrations value", )) } }; } - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) + syn::Meta::NameValue(namevalue) if namevalue.path.is_ident("migrator") => { if !matches!(migrations, MigrationsOpt::InferredPath) { @@ -190,13 +217,21 @@ fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result { )); } - migrations = match namevalue.lit { + migrations = match &namevalue.value { // migrator = "" - syn::Lit::Str(litstr) => MigrationsOpt::ExplicitMigrator(litstr.parse()?), + syn::Expr::Lit(ref expr_lit) => match &expr_lit.lit { + syn::Lit::Str(litstr) => MigrationsOpt::ExplicitMigrator(litstr.parse()?), + _ => { + return Err(syn::Error::new_spanned( + &namevalue.value, + "expected string for migrator path", + )) + } + }, _ => { return Err(syn::Error::new_spanned( - namevalue, - "expected string", + &namevalue.value, + "expected string literal for migrator path", )) } }; @@ -204,7 +239,7 @@ fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result { other => { return Err(syn::Error::new_spanned( other, - "expected `fixtures(\"\", ...)` or `migrations = \"\" | false` or `migrator = \"\"`", + "expected `fixtures(\"\", ...)` or `migrations = \"\" | false` or `migrator = \"\"`" )) } }