Skip to content

Commit 5cfd81d

Browse files
Fix up doc tests
1 parent 66eac9f commit 5cfd81d

File tree

3 files changed

+22
-12
lines changed

3 files changed

+22
-12
lines changed

datafusion/datasource-parquet/src/source.rs

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -98,18 +98,20 @@ use object_store::ObjectStore;
9898
/// # use datafusion_physical_expr::expressions::lit;
9999
/// # use datafusion_datasource::source::DataSourceExec;
100100
/// # use datafusion_common::config::TableParquetOptions;
101+
/// use datafusion_datasource::file::FileSource;
101102
///
102103
/// # let file_schema = Arc::new(Schema::empty());
103104
/// # let object_store_url = ObjectStoreUrl::local_filesystem();
104105
/// # let predicate = lit(true);
105-
/// let source = Arc::new(
106-
/// ParquetSource::new(TableParquetOptions::default(), config)
107-
/// .with_predicate(predicate)
108-
/// );
109106
/// // Create a DataSourceExec for reading `file1.parquet` with a file size of 100MB
110-
/// let config = FileScanConfigBuilder::new(object_store_url, file_schema, source)
107+
/// let config = FileScanConfigBuilder::new(object_store_url, file_schema)
111108
/// .with_file(PartitionedFile::new("file1.parquet", 100*1024*1024)).build();
112-
/// let exec =DataSourceExec::from_data_source(source);
109+
///
110+
/// let source = ParquetSource::new(TableParquetOptions::default(), config)
111+
/// .with_predicate(predicate)
112+
/// .as_data_source();
113+
///
114+
/// let exec = Arc::new(DataSourceExec::new(source));
113115
/// ```
114116
///
115117
/// # Features
@@ -176,7 +178,7 @@ use object_store::ObjectStore;
176178
/// # use datafusion_datasource::file::FileSource;
177179
/// # use datafusion_datasource::file_scan_config::{FileScanConfig, FileScanConfigBuilder};
178180
/// # use datafusion_datasource::PartitionedFile;
179-
/// # use datafusion_datasource::source::DataSourceExec;///
181+
/// # use datafusion_datasource::source::DataSourceExec;
180182
/// # use datafusion_datasource_parquet::source::ParquetSource;
181183
///
182184
/// fn parquet_exec() -> DataSourceExec { unimplemented!() }
@@ -194,7 +196,9 @@ use object_store::ObjectStore;
194196
/// .with_file_groups(vec![file_group.clone()])
195197
/// .build();
196198
///
197-
/// Arc::new(DataSourceExec::new(parquet_source.with_config(new_config)))
199+
/// let source = parquet_source.with_config(new_config).as_data_source();
200+
///
201+
/// Arc::new(DataSourceExec::new(source))
198202
/// })
199203
/// .collect::<Vec<_>>();
200204
/// ```
@@ -215,6 +219,7 @@ use object_store::ObjectStore;
215219
/// ```
216220
/// # use std::sync::Arc;
217221
/// # use arrow::datatypes::{Schema, SchemaRef};
222+
/// # use datafusion_common::config::TableParquetOptions;
218223
/// # use datafusion_datasource::PartitionedFile;
219224
/// # use datafusion_datasource_parquet::ParquetAccessPlan;
220225
/// # use datafusion_datasource::file_scan_config::{FileScanConfig, FileScanConfigBuilder};
@@ -233,11 +238,14 @@ use object_store::ObjectStore;
233238
/// let partitioned_file = PartitionedFile::new("my_file.parquet", 1234)
234239
/// .with_extensions(Arc::new(access_plan));
235240
/// // create a FileScanConfig to scan this file
236-
/// let config = FileScanConfigBuilder::new(ObjectStoreUrl::local_filesystem(), schema(), Arc::new(ParquetSource::new(TableParquetOptions::default(), config)))
241+
/// let config = FileScanConfigBuilder::new(ObjectStoreUrl::local_filesystem(), schema())
237242
/// .with_file(partitioned_file).build();
243+
///
244+
/// let source = ParquetSource::new(TableParquetOptions::default(), config);
245+
///
238246
/// // this parquet DataSourceExec will not even try to read row groups 2 and 4. Additional
239247
/// // pruning based on predicates may also happen
240-
/// let exec =DataSourceExec::from_data_source(source);
248+
/// let exec = DataSourceExec::from_data_source(source);
241249
/// ```
242250
///
243251
/// For a complete example, see the [`advanced_parquet_index` example]).

datafusion/datasource/src/file_scan_config.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@ use log::{debug, warn};
6060
/// Owned by file sources that implement [`DataSource`]. Use [`DataSourceExec::from_data_source`]
6161
/// to create a [`DataSourceExec`] from a data source that owns this config.
6262
///
63+
/// [`DataSourceExec::from_data_source`]: crate::source::DataSourceExec::from_data_source
64+
/// [`DataSourceExec`]: crate::source::DataSourceExec
6365
/// [`DataSource`]: crate::source::DataSource
6466
///
6567
/// # Example
@@ -237,7 +239,7 @@ pub struct FileScanConfigBuilder {
237239
/// This schema is used to read the files, but is **not** necessarily the
238240
/// schema of the physical files. Rather this is the schema that the
239241
/// physical file schema will be mapped onto, and the schema that the
240-
/// [`DataSourceExec`] will return.
242+
/// [`crate::source::DataSourceExec`] will return.
241243
///
242244
/// This is usually the same as the table schema as specified by the `TableProvider` minus any partition columns.
243245
///

datafusion/datasource/src/source.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -468,7 +468,7 @@ impl DataSourceExec {
468468
///
469469
/// Returns `None` if
470470
/// 1. the datasource is not scanning files (`FileScanConfig`)
471-
/// 2. The [`FileScanConfig::file_source`] is not of type `T`
471+
/// 2. the file source is not of type `T`
472472
pub fn downcast_to_file_source<T: FileSource + 'static>(&self) -> Option<&T> {
473473
self.data_source().as_any().downcast_ref::<T>()
474474
}

0 commit comments

Comments
 (0)