diff --git a/ci/scripts/rust_fmt.sh b/ci/scripts/rust_fmt.sh index 9d8325877aad..f7db0baf5a47 100755 --- a/ci/scripts/rust_fmt.sh +++ b/ci/scripts/rust_fmt.sh @@ -18,4 +18,10 @@ # under the License. set -ex -cargo fmt --all -- --check + +# Install nightly toolchain (skips if already installed) +rustup toolchain install nightly --component rustfmt + +# Use nightly rustfmt to check formatting including doc comments +# This requires nightly because format_code_in_doc_comments is an unstable feature +cargo +nightly fmt --all -- --check --config format_code_in_doc_comments=true diff --git a/datafusion-examples/examples/data_io/remote_catalog.rs b/datafusion-examples/examples/data_io/remote_catalog.rs index 10ec26b1d5c0..b338efddba99 100644 --- a/datafusion-examples/examples/data_io/remote_catalog.rs +++ b/datafusion-examples/examples/data_io/remote_catalog.rs @@ -16,7 +16,6 @@ // under the License. //! See `main.rs` for how to run it. -//! /// This example shows how to implement the DataFusion [`CatalogProvider`] API /// for catalogs that are remote (require network access) and/or offer only /// asynchronous APIs such as [Polaris], [Unity], and [Hive]. diff --git a/datafusion-examples/examples/udf/simple_udaf.rs b/datafusion-examples/examples/udf/simple_udaf.rs index 42ea0054b759..736c5b7476fd 100644 --- a/datafusion-examples/examples/udf/simple_udaf.rs +++ b/datafusion-examples/examples/udf/simple_udaf.rs @@ -16,7 +16,6 @@ // under the License. //! See `main.rs` for how to run it. -//! /// In this example we will declare a single-type, single return type UDAF that computes the geometric mean. /// The geometric mean is described here: https://en.wikipedia.org/wiki/Geometric_mean use datafusion::arrow::{ diff --git a/datafusion/common/src/scalar/mod.rs b/datafusion/common/src/scalar/mod.rs index 2eb7e970df25..b2256977c152 100644 --- a/datafusion/common/src/scalar/mod.rs +++ b/datafusion/common/src/scalar/mod.rs @@ -2790,8 +2790,8 @@ impl ScalarValue { /// ``` /// use arrow::array::{Int32Array, ListArray}; /// use arrow::datatypes::{DataType, Int32Type}; - /// use datafusion_common::cast::as_list_array; /// use datafusion_common::ScalarValue; + /// use datafusion_common::cast::as_list_array; /// /// let scalars = vec![ /// ScalarValue::Int32(Some(1)), @@ -2851,8 +2851,8 @@ impl ScalarValue { /// ``` /// use arrow::array::{Int32Array, ListArray}; /// use arrow::datatypes::{DataType, Int32Type}; - /// use datafusion_common::cast::as_list_array; /// use datafusion_common::ScalarValue; + /// use datafusion_common::cast::as_list_array; /// /// let scalars = vec![ /// ScalarValue::Int32(Some(1)), @@ -2895,8 +2895,8 @@ impl ScalarValue { /// ``` /// use arrow::array::{Int32Array, LargeListArray}; /// use arrow::datatypes::{DataType, Int32Type}; - /// use datafusion_common::cast::as_large_list_array; /// use datafusion_common::ScalarValue; + /// use datafusion_common::cast::as_large_list_array; /// /// let scalars = vec![ /// ScalarValue::Int32(Some(1)), @@ -3345,8 +3345,8 @@ impl ScalarValue { /// ``` /// use arrow::array::ListArray; /// use arrow::datatypes::{DataType, Int32Type}; - /// use datafusion_common::utils::SingleRowListArrayBuilder; /// use datafusion_common::ScalarValue; + /// use datafusion_common::utils::SingleRowListArrayBuilder; /// use std::sync::Arc; /// /// let list_arr = ListArray::from_iter_primitive::(vec![ diff --git a/datafusion/core/src/dataframe/mod.rs b/datafusion/core/src/dataframe/mod.rs index 0d060db3bf14..9d2738d8a0c1 100644 --- a/datafusion/core/src/dataframe/mod.rs +++ b/datafusion/core/src/dataframe/mod.rs @@ -1380,6 +1380,7 @@ impl DataFrame { /// .read_csv("tests/data/example.csv", CsvReadOptions::new()) /// .await?; /// let count = df.count().await?; // 1 + /// /// # assert_eq!(count, 1); /// # Ok(()) /// # } diff --git a/datafusion/core/src/execution/context/mod.rs b/datafusion/core/src/execution/context/mod.rs index a769bb01b435..3c2d1e8dfb1d 100644 --- a/datafusion/core/src/execution/context/mod.rs +++ b/datafusion/core/src/execution/context/mod.rs @@ -443,7 +443,7 @@ impl SessionContext { /// # use datafusion::execution::SessionStateBuilder; /// # use datafusion_optimizer::push_down_filter::PushDownFilter; /// let my_rule = PushDownFilter {}; // pretend it is a new rule - /// // Create a new builder with a custom optimizer rule + /// // Create a new builder with a custom optimizer rule /// let context: SessionContext = SessionStateBuilder::new() /// .with_optimizer_rule(Arc::new(my_rule)) /// .build() @@ -635,9 +635,10 @@ impl SessionContext { /// .sql_with_options("CREATE TABLE foo (x INTEGER)", options) /// .await /// .unwrap_err(); - /// assert!(err - /// .to_string() - /// .starts_with("Error during planning: DDL not supported: CreateMemoryTable")); + /// assert!( + /// err.to_string() + /// .starts_with("Error during planning: DDL not supported: CreateMemoryTable") + /// ); /// # Ok(()) /// # } /// ``` diff --git a/datafusion/core/src/lib.rs b/datafusion/core/src/lib.rs index e83934a8e281..cb39c2e08376 100644 --- a/datafusion/core/src/lib.rs +++ b/datafusion/core/src/lib.rs @@ -491,7 +491,7 @@ //! consumes it immediately as well. //! //! ```text -//! +//! //! Step 3: FilterExec calls next() Step 2: ProjectionExec calls //! on input Stream next() on input Stream //! ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐ diff --git a/datafusion/datasource-parquet/src/access_plan.rs b/datafusion/datasource-parquet/src/access_plan.rs index 570792d40e5b..5359c45ce72d 100644 --- a/datafusion/datasource-parquet/src/access_plan.rs +++ b/datafusion/datasource-parquet/src/access_plan.rs @@ -42,11 +42,11 @@ use parquet::file::metadata::RowGroupMetaData; /// // Use parquet reader RowSelector to specify scanning rows 100-200 and 350-400 /// // in a row group that has 1000 rows /// let row_selection = RowSelection::from(vec![ -/// RowSelector::skip(100), -/// RowSelector::select(100), -/// RowSelector::skip(150), -/// RowSelector::select(50), -/// RowSelector::skip(600), // skip last 600 rows +/// RowSelector::skip(100), +/// RowSelector::select(100), +/// RowSelector::skip(150), +/// RowSelector::select(50), +/// RowSelector::skip(600), // skip last 600 rows /// ]); /// access_plan.scan_selection(1, row_selection); /// access_plan.skip(2); // skip row group 2 diff --git a/datafusion/doc/src/lib.rs b/datafusion/doc/src/lib.rs index 836cb9345b51..46b4594ceb40 100644 --- a/datafusion/doc/src/lib.rs +++ b/datafusion/doc/src/lib.rs @@ -196,7 +196,7 @@ impl Default for DocSection { /// Example: /// /// ```rust -/// +/// /// # fn main() { /// use datafusion_doc::{DocSection, Documentation}; /// let doc_section = DocSection { diff --git a/datafusion/expr-common/src/interval_arithmetic.rs b/datafusion/expr-common/src/interval_arithmetic.rs index f93ef3b79595..5eb1e54ff5bf 100644 --- a/datafusion/expr-common/src/interval_arithmetic.rs +++ b/datafusion/expr-common/src/interval_arithmetic.rs @@ -1317,7 +1317,7 @@ fn min_of_bounds(first: &ScalarValue, second: &ScalarValue) -> ScalarValue { /// Example usage: /// ``` /// use datafusion_common::DataFusionError; -/// use datafusion_expr_common::interval_arithmetic::{satisfy_greater, Interval}; +/// use datafusion_expr_common::interval_arithmetic::{Interval, satisfy_greater}; /// /// let left = Interval::make(Some(-1000.0_f32), Some(1000.0_f32))?; /// let right = Interval::make(Some(500.0_f32), Some(2000.0_f32))?; diff --git a/datafusion/expr-common/src/signature.rs b/datafusion/expr-common/src/signature.rs index 90bd1415003c..e970d808eb93 100644 --- a/datafusion/expr-common/src/signature.rs +++ b/datafusion/expr-common/src/signature.rs @@ -899,7 +899,7 @@ fn get_data_types(native_type: &NativeType) -> Vec { /// # Examples /// /// ``` -/// use datafusion_common::types::{logical_binary, logical_string, NativeType}; +/// use datafusion_common::types::{NativeType, logical_binary, logical_string}; /// use datafusion_expr_common::signature::{Coercion, TypeSignatureClass}; /// /// // Exact coercion that only accepts timestamp types diff --git a/datafusion/expr/src/expr.rs b/datafusion/expr/src/expr.rs index c7d825ce1d52..a4dc578eee51 100644 --- a/datafusion/expr/src/expr.rs +++ b/datafusion/expr/src/expr.rs @@ -1851,7 +1851,7 @@ impl Expr { /// Example /// ``` /// # use datafusion_common::Column; - /// use datafusion_expr::{col, Expr}; + /// use datafusion_expr::{Expr, col}; /// let expr = col("foo"); /// assert_eq!(expr.try_as_col(), Some(&Column::from("foo"))); /// diff --git a/datafusion/expr/src/logical_plan/ddl.rs b/datafusion/expr/src/logical_plan/ddl.rs index 8a46e842a861..ae15e9cb1ef4 100644 --- a/datafusion/expr/src/logical_plan/ddl.rs +++ b/datafusion/expr/src/logical_plan/ddl.rs @@ -255,8 +255,9 @@ impl CreateExternalTable { /// TableReference::bare("my_table"), /// "/path/to/data", /// "parquet", - /// Arc::new(DFSchema::empty()) - /// ).build(); + /// Arc::new(DFSchema::empty()), + /// ) + /// .build(); /// ``` pub fn builder( name: impl Into, diff --git a/datafusion/expr/src/logical_plan/plan.rs b/datafusion/expr/src/logical_plan/plan.rs index 4219c24bfc9c..7a69ca0de3e0 100644 --- a/datafusion/expr/src/logical_plan/plan.rs +++ b/datafusion/expr/src/logical_plan/plan.rs @@ -1587,7 +1587,7 @@ impl LogicalPlan { /// /// ``` /// use arrow::datatypes::{DataType, Field, Schema}; - /// use datafusion_expr::{col, lit, logical_plan::table_scan, LogicalPlanBuilder}; + /// use datafusion_expr::{LogicalPlanBuilder, col, lit, logical_plan::table_scan}; /// let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]); /// let plan = table_scan(Some("t1"), &schema, None) /// .unwrap() @@ -1629,7 +1629,7 @@ impl LogicalPlan { /// /// ``` /// use arrow::datatypes::{DataType, Field, Schema}; - /// use datafusion_expr::{col, lit, logical_plan::table_scan, LogicalPlanBuilder}; + /// use datafusion_expr::{LogicalPlanBuilder, col, lit, logical_plan::table_scan}; /// let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]); /// let plan = table_scan(Some("t1"), &schema, None) /// .unwrap() @@ -1694,7 +1694,7 @@ impl LogicalPlan { /// /// ``` /// use arrow::datatypes::{DataType, Field, Schema}; - /// use datafusion_expr::{col, lit, logical_plan::table_scan, LogicalPlanBuilder}; + /// use datafusion_expr::{LogicalPlanBuilder, col, lit, logical_plan::table_scan}; /// let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]); /// let plan = table_scan(Some("t1"), &schema, None) /// .unwrap() @@ -1752,7 +1752,7 @@ impl LogicalPlan { /// ``` /// ``` /// use arrow::datatypes::{DataType, Field, Schema}; - /// use datafusion_expr::{col, lit, logical_plan::table_scan, LogicalPlanBuilder}; + /// use datafusion_expr::{LogicalPlanBuilder, col, lit, logical_plan::table_scan}; /// let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]); /// let plan = table_scan(Some("t1"), &schema, None) /// .unwrap() diff --git a/datafusion/expr/src/predicate_bounds.rs b/datafusion/expr/src/predicate_bounds.rs index 992d9f88bb14..92342bde03b4 100644 --- a/datafusion/expr/src/predicate_bounds.rs +++ b/datafusion/expr/src/predicate_bounds.rs @@ -45,7 +45,6 @@ use datafusion_expr_common::operator::Operator; /// /// The function returns a [NullableInterval] that describes the possible boolean values the /// predicate can evaluate to. -/// pub(super) fn evaluate_bounds( predicate: &Expr, certainly_null_expr: Option<&Expr>, diff --git a/datafusion/expr/src/udf.rs b/datafusion/expr/src/udf.rs index 26d7fc99cb17..794e0d1b7948 100644 --- a/datafusion/expr/src/udf.rs +++ b/datafusion/expr/src/udf.rs @@ -142,7 +142,7 @@ impl ScalarUDF { /// /// # Example /// ```no_run - /// use datafusion_expr::{col, lit, ScalarUDF}; + /// use datafusion_expr::{ScalarUDF, col, lit}; /// # fn my_udf() -> ScalarUDF { unimplemented!() } /// let my_func: ScalarUDF = my_udf(); /// // Create an expr for `my_func(a, 12.3)` diff --git a/datafusion/functions-aggregate-common/src/tdigest.rs b/datafusion/functions-aggregate-common/src/tdigest.rs index 225c61b71939..523d845b58e0 100644 --- a/datafusion/functions-aggregate-common/src/tdigest.rs +++ b/datafusion/functions-aggregate-common/src/tdigest.rs @@ -523,7 +523,7 @@ impl TDigest { /// [`ScalarValue::Float64`]: /// /// ```text - /// + /// /// ┌────────┬────────┬────────┬───────┬────────┬────────┐ /// │max_size│ sum │ count │ max │ min │centroid│ /// └────────┴────────┴────────┴───────┴────────┴────────┘ diff --git a/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs b/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs index 01de44cee1f6..7b82710416e9 100644 --- a/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs +++ b/datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs @@ -148,7 +148,7 @@ impl ExprSimplifier { /// use datafusion_expr::execution_props::ExecutionProps; /// use datafusion_expr::simplify::SimplifyContext; /// use datafusion_expr::simplify::SimplifyInfo; - /// use datafusion_expr::{col, lit, Expr}; + /// use datafusion_expr::{Expr, col, lit}; /// use datafusion_optimizer::simplify_expressions::ExprSimplifier; /// use std::sync::Arc; /// @@ -290,7 +290,7 @@ impl ExprSimplifier { /// use datafusion_expr::execution_props::ExecutionProps; /// use datafusion_expr::interval_arithmetic::{Interval, NullableInterval}; /// use datafusion_expr::simplify::SimplifyContext; - /// use datafusion_expr::{col, lit, Expr}; + /// use datafusion_expr::{Expr, col, lit}; /// use datafusion_optimizer::simplify_expressions::ExprSimplifier; /// /// let schema = Schema::new(vec![ @@ -352,7 +352,7 @@ impl ExprSimplifier { /// use datafusion_expr::execution_props::ExecutionProps; /// use datafusion_expr::interval_arithmetic::{Interval, NullableInterval}; /// use datafusion_expr::simplify::SimplifyContext; - /// use datafusion_expr::{col, lit, Expr}; + /// use datafusion_expr::{Expr, col, lit}; /// use datafusion_optimizer::simplify_expressions::ExprSimplifier; /// /// let schema = Schema::new(vec![ diff --git a/datafusion/physical-expr/src/expressions/case/literal_lookup_table/mod.rs b/datafusion/physical-expr/src/expressions/case/literal_lookup_table/mod.rs index 67b045f9988f..25db6f8bb1a1 100644 --- a/datafusion/physical-expr/src/expressions/case/literal_lookup_table/mod.rs +++ b/datafusion/physical-expr/src/expressions/case/literal_lookup_table/mod.rs @@ -63,7 +63,6 @@ use std::fmt::Debug; /// ELSE /// END /// ``` -/// #[derive(Debug)] pub(in super::super) struct LiteralLookupTable { /// The lookup table to use for evaluating the CASE expression @@ -252,7 +251,6 @@ pub(super) trait WhenLiteralIndexMap: Debug + Send + Sync { /// /// the returned vector will be: /// - `[0, 2, else_index, 1, 0]` - /// fn map_to_when_indices( &self, array: &ArrayRef, diff --git a/datafusion/physical-expr/src/intervals/cp_solver.rs b/datafusion/physical-expr/src/intervals/cp_solver.rs index e5e9304ab1d9..8f4adb8311e4 100644 --- a/datafusion/physical-expr/src/intervals/cp_solver.rs +++ b/datafusion/physical-expr/src/intervals/cp_solver.rs @@ -562,11 +562,11 @@ impl ExprIntervalGraph { /// use arrow::datatypes::Field; /// use arrow::datatypes::Schema; /// use datafusion_common::ScalarValue; - /// use datafusion_expr::interval_arithmetic::Interval; /// use datafusion_expr::Operator; + /// use datafusion_expr::interval_arithmetic::Interval; + /// use datafusion_physical_expr::PhysicalExpr; /// use datafusion_physical_expr::expressions::{BinaryExpr, Column, Literal}; /// use datafusion_physical_expr::intervals::cp_solver::ExprIntervalGraph; - /// use datafusion_physical_expr::PhysicalExpr; /// use std::sync::Arc; /// /// let expr = Arc::new(BinaryExpr::new( diff --git a/datafusion/physical-expr/src/projection.rs b/datafusion/physical-expr/src/projection.rs index c46df87fd8b3..986034809f4a 100644 --- a/datafusion/physical-expr/src/projection.rs +++ b/datafusion/physical-expr/src/projection.rs @@ -257,12 +257,12 @@ impl ProjectionExprs { /// # Example /// /// ```rust - /// use std::sync::Arc; /// use arrow::datatypes::{DataType, Field, Schema}; /// use datafusion_common::Result; + /// use datafusion_physical_expr::PhysicalExpr; /// use datafusion_physical_expr::expressions::Column; /// use datafusion_physical_expr::projection::ProjectionExprs; - /// use datafusion_physical_expr::PhysicalExpr; + /// use std::sync::Arc; /// /// // Create a schema and projection /// let schema = Arc::new(Schema::new(vec![ @@ -492,10 +492,10 @@ impl ProjectionExprs { /// /// ```rust /// use arrow::datatypes::{DataType, Field, Schema}; - /// use datafusion_common::stats::{ColumnStatistics, Precision, Statistics}; - /// use datafusion_physical_expr::projection::ProjectionExprs; /// use datafusion_common::Result; /// use datafusion_common::ScalarValue; + /// use datafusion_common::stats::{ColumnStatistics, Precision, Statistics}; + /// use datafusion_physical_expr::projection::ProjectionExprs; /// use std::sync::Arc; /// /// fn main() -> Result<()> { diff --git a/datafusion/physical-plan/src/aggregates/group_values/multi_group_by/mod.rs b/datafusion/physical-plan/src/aggregates/group_values/multi_group_by/mod.rs index 4c9e376fc400..5a403821b233 100644 --- a/datafusion/physical-plan/src/aggregates/group_values/multi_group_by/mod.rs +++ b/datafusion/physical-plan/src/aggregates/group_values/multi_group_by/mod.rs @@ -180,7 +180,6 @@ pub struct GroupValuesColumn { /// We don't really store the actual `group values` in `hashtable`, /// instead we store the `group indices` pointing to values in `GroupValues`. /// And we use [`GroupIndexView`] to represent such `group indices` in table. - /// map: HashTable<(u64, GroupIndexView)>, /// The size of `map` in bytes diff --git a/datafusion/physical-plan/src/aggregates/row_hash.rs b/datafusion/physical-plan/src/aggregates/row_hash.rs index cb22fbf9a06a..40112d35416e 100644 --- a/datafusion/physical-plan/src/aggregates/row_hash.rs +++ b/datafusion/physical-plan/src/aggregates/row_hash.rs @@ -234,7 +234,7 @@ enum OutOfMemoryMode { /// # Architecture /// /// ```text -/// +/// /// Assigns a consecutive group internally stores aggregate values /// index for each unique set for all groups /// of group values diff --git a/datafusion/physical-plan/src/joins/hash_join/exec.rs b/datafusion/physical-plan/src/joins/hash_join/exec.rs index bd92cf496426..c04357f09e82 100644 --- a/datafusion/physical-plan/src/joins/hash_join/exec.rs +++ b/datafusion/physical-plan/src/joins/hash_join/exec.rs @@ -211,7 +211,7 @@ impl JoinLeftData { /// /// /// ```text -/// +/// /// Original build-side data Inserting build-side values into hashmap Concatenated build-side batch /// ┌───────────────────────────┐ /// hashmap.insert(row-hash, row-idx + offset) │ idx │ diff --git a/datafusion/physical-plan/src/joins/hash_join/stream.rs b/datafusion/physical-plan/src/joins/hash_join/stream.rs index e6735675125b..a8d9b8e5e9e5 100644 --- a/datafusion/physical-plan/src/joins/hash_join/stream.rs +++ b/datafusion/physical-plan/src/joins/hash_join/stream.rs @@ -110,7 +110,7 @@ impl BuildSide { /// Expected state transitions performed by HashJoinStream are: /// /// ```text -/// +/// /// WaitBuildSide /// │ /// ▼ diff --git a/datafusion/physical-plan/src/sort_pushdown.rs b/datafusion/physical-plan/src/sort_pushdown.rs index 8432fd5dabee..d184469d798c 100644 --- a/datafusion/physical-plan/src/sort_pushdown.rs +++ b/datafusion/physical-plan/src/sort_pushdown.rs @@ -100,15 +100,24 @@ impl SortOrderPushdownResult { /// # use datafusion_physical_plan::SortOrderPushdownResult; /// let exact = SortOrderPushdownResult::Exact { inner: 42 }; /// let inexact = exact.into_inexact(); - /// assert!(matches!(inexact, SortOrderPushdownResult::Inexact { inner: 42 })); + /// assert!(matches!( + /// inexact, + /// SortOrderPushdownResult::Inexact { inner: 42 } + /// )); /// /// let already_inexact = SortOrderPushdownResult::Inexact { inner: 42 }; /// let still_inexact = already_inexact.into_inexact(); - /// assert!(matches!(still_inexact, SortOrderPushdownResult::Inexact { inner: 42 })); + /// assert!(matches!( + /// still_inexact, + /// SortOrderPushdownResult::Inexact { inner: 42 } + /// )); /// /// let unsupported = SortOrderPushdownResult::::Unsupported; /// let still_unsupported = unsupported.into_inexact(); - /// assert!(matches!(still_unsupported, SortOrderPushdownResult::Unsupported)); + /// assert!(matches!( + /// still_unsupported, + /// SortOrderPushdownResult::Unsupported + /// )); /// ``` pub fn into_inexact(self) -> Self { match self { diff --git a/datafusion/physical-plan/src/sorts/cursor.rs b/datafusion/physical-plan/src/sorts/cursor.rs index de3ec2e7a91e..f1bea6ce41f0 100644 --- a/datafusion/physical-plan/src/sorts/cursor.rs +++ b/datafusion/physical-plan/src/sorts/cursor.rs @@ -52,7 +52,7 @@ pub trait CursorValues { /// [`CursorValues`] /// /// ```text -/// +/// /// ┌───────────────────────┐ /// │ │ ┌──────────────────────┐ /// │ ┌─────────┐ ┌─────┐ │ ─ ─ ─ ─│ Cursor │ diff --git a/datafusion/physical-plan/src/unnest.rs b/datafusion/physical-plan/src/unnest.rs index 5fef754e8078..e9a69e178ed8 100644 --- a/datafusion/physical-plan/src/unnest.rs +++ b/datafusion/physical-plan/src/unnest.rs @@ -591,7 +591,7 @@ struct UnnestingResult { /// ``` /// Result: /// ```text -/// +/// /// ┌────────────────────────────────────────────────┬────────────────────────────────────────────────┐ /// │ unnest(i, "recursive" := CAST('t' AS BOOLEAN)) │ unnest(j, "recursive" := CAST('t' AS BOOLEAN)) │ /// │ int32 │ int32 │ diff --git a/datafusion/proto/src/bytes/mod.rs b/datafusion/proto/src/bytes/mod.rs index d95bdd388699..12d8afefca63 100644 --- a/datafusion/proto/src/bytes/mod.rs +++ b/datafusion/proto/src/bytes/mod.rs @@ -47,7 +47,7 @@ mod registry; /// bytes. /// /// ``` -/// use datafusion_expr::{col, lit, Expr}; +/// use datafusion_expr::{Expr, col, lit}; /// use datafusion_proto::bytes::Serializeable; /// /// // Create a new `Expr` a < 32 diff --git a/datafusion/spark/src/function/math/abs.rs b/datafusion/spark/src/function/math/abs.rs index 97703937f39f..b0bae8f71227 100644 --- a/datafusion/spark/src/function/math/abs.rs +++ b/datafusion/spark/src/function/math/abs.rs @@ -36,7 +36,6 @@ use std::sync::Arc; /// TODOs: /// - Spark's ANSI-compliant dialect, when off (i.e. `spark.sql.ansi.enabled=false`), taking absolute value on the minimal value of a signed integer returns the value as is. DataFusion's abs throws "DataFusion error: Arrow error: Compute error" on arithmetic overflow /// - Spark's abs also supports ANSI interval types: YearMonthIntervalType and DayTimeIntervalType. DataFusion's abs doesn't. -/// #[derive(Debug, PartialEq, Eq, Hash)] pub struct SparkAbs { signature: Signature, diff --git a/datafusion/spark/src/function/url/url_decode.rs b/datafusion/spark/src/function/url/url_decode.rs index e4a9cf6acd3e..08282143f1c7 100644 --- a/datafusion/spark/src/function/url/url_decode.rs +++ b/datafusion/spark/src/function/url/url_decode.rs @@ -64,7 +64,6 @@ impl UrlDecode { /// /// * `Ok(String)` - The decoded string /// * `Err(DataFusionError)` - If the input is malformed or contains invalid UTF-8 - /// fn decode(value: &str) -> Result { // Check if the string has valid percent encoding Self::validate_percent_encoding(value)?; @@ -170,7 +169,6 @@ impl ScalarUDFImpl for UrlDecode { /// /// * `Ok(ArrayRef)` - A new array of the same type containing decoded strings /// * `Err(DataFusionError)` - If validation fails or invalid arguments are provided -/// fn spark_url_decode(args: &[ArrayRef]) -> Result { spark_handled_url_decode(args, |x| x) } diff --git a/datafusion/spark/src/function/url/url_encode.rs b/datafusion/spark/src/function/url/url_encode.rs index 7292eb530a6a..712561ab9292 100644 --- a/datafusion/spark/src/function/url/url_encode.rs +++ b/datafusion/spark/src/function/url/url_encode.rs @@ -57,7 +57,6 @@ impl UrlEncode { /// # Returns /// /// * `Ok(String)` - The encoded string - /// fn encode(value: &str) -> Result { Ok(byte_serialize(value.as_bytes()).collect::()) } @@ -104,7 +103,6 @@ impl ScalarUDFImpl for UrlEncode { /// /// * `Ok(ArrayRef)` - A new array of the same type containing encoded strings /// * `Err(DataFusionError)` - If invalid arguments are provided -/// fn spark_url_encode(args: &[ArrayRef]) -> Result { if args.len() != 1 { return exec_err!("`url_encode` expects 1 argument"); diff --git a/datafusion/sql/src/unparser/mod.rs b/datafusion/sql/src/unparser/mod.rs index 05b472dc92a9..2aeffdaa4d45 100644 --- a/datafusion/sql/src/unparser/mod.rs +++ b/datafusion/sql/src/unparser/mod.rs @@ -44,7 +44,7 @@ pub mod extension_unparser; /// use datafusion_sql::unparser::Unparser; /// let expr = col("a").gt(lit(4)); // form an expression `a > 4` /// let unparser = Unparser::default(); -/// let sql = unparser.expr_to_sql(&expr).unwrap();// convert to AST +/// let sql = unparser.expr_to_sql(&expr).unwrap(); // convert to AST /// // use the Display impl to convert to SQL text /// assert_eq!(sql.to_string(), "(a > 4)"); /// // now convert to pretty sql