Skip to main content
Version: 0.16.8

SparkFilesystemDatasource

class great_expectations.datasource.fluent.SparkFilesystemDatasource(*, type: Literal['spark_filesystem'] = 'spark_filesystem', name: str, id: Optional[uuid.UUID] = None, assets: List[great_expectations.datasource.fluent.file_path_data_asset._FilePathDataAsset] = [], base_directory: pathlib.Path, data_context_root_directory: Optional[pathlib.Path] = None)#
add_csv_asset(name: str, *, id: Optional[uuid.UUID] = None, order_by: List[great_expectations.datasource.fluent.interfaces.Sorter] = None, batch_metadata: Dict[str, Any] = None, batching_regex: Pattern = re.compile('.*'), connect_options: Mapping = None, header: bool = False, InferSchema: bool = False) pydantic.BaseModel#