package datasource
- Alphabetic
- Public
- All
Type Members
- abstract class CassandraBaseInJoinReader extends PartitionReader[InternalRow] with Logging
- case class CassandraBulkWrite(session: SparkSession, connector: CassandraConnector, tableDef: TableDef, writeConf: WriteConf, inputSchema: StructType, consolidatedConf: SparkConf) extends BatchWrite with StreamingWrite with Product with Serializable
-
class
CassandraCatalog extends CatalogPlugin with TableCatalog with SupportsNamespaces with Logging
A Spark Sql Catalog for inter-operation with Cassandra
A Spark Sql Catalog for inter-operation with Cassandra
Namespaces naturally map to C* Keyspaces, but they are always only a single element deep.
- class CassandraCatalogException extends IllegalArgumentException
- case class CassandraCommitMessage() extends WriterCommitMessage with Product with Serializable
-
case class
CassandraCountPartitionReader(connector: CassandraConnector, tableDef: TableDef, schema: StructType, readConf: ReadConf, queryParts: CqlQueryParts, partition: CassandraPartition[Any, _ <: Token[Any]]) extends CassandraPartitionReaderBase with Product with Serializable
Runs a COUNT(*) query instead of a request for actual rows Takes the results and returns that many empty internal rows
- case class CassandraDriverDataWriter(connector: CassandraConnector, tableDef: TableDef, inputSchema: StructType, writeConf: WriteConf) extends DataWriter[InternalRow] with Product with Serializable
- case class CassandraDriverDataWriterFactory(connector: CassandraConnector, tableDef: TableDef, inputSchema: StructType, writeConf: WriteConf) extends DataWriterFactory with StreamingDataWriterFactory with Product with Serializable
- case class CassandraInJoin(session: SparkSession, connector: CassandraConnector, tableDef: TableDef, inClauses: Seq[In], cqlQueryParts: CqlQueryParts, readSchema: StructType, readConf: ReadConf, consolidatedConf: SparkConf) extends Scan with Batch with SupportsReportPartitioning with Product with Serializable
- case class CassandraInJoinCountReader(connector: CassandraConnector, tableDef: TableDef, inClauses: Seq[In], readConf: ReadConf, schema: StructType, cqlQueryParts: CqlQueryParts, partition: InputPartition) extends CassandraBaseInJoinReader with Product with Serializable
- case class CassandraInJoinReader(connector: CassandraConnector, tableDef: TableDef, inClauses: Seq[In], readConf: ReadConf, schema: StructType, cqlQueryParts: CqlQueryParts, partition: InputPartition) extends CassandraBaseInJoinReader with Product with Serializable
- case class CassandraInJoinReaderFactory(connector: CassandraConnector, tableDef: TableDef, inClauses: Seq[In], readConf: ReadConf, schema: StructType, cqlQueryParts: CqlQueryParts) extends PartitionReaderFactory with Product with Serializable
- abstract class CassandraPartitionReaderBase extends PartitionReader[InternalRow] with SupportsReportStatistics with Logging
- class CassandraPartitioning extends KeyGroupedPartitioning
- case class CassandraScan(session: SparkSession, connector: CassandraConnector, tableDef: TableDef, cqlQueryParts: CqlQueryParts, readSchema: StructType, readConf: ReadConf, consolidatedConf: SparkConf) extends Scan with Batch with SupportsReportPartitioning with Product with Serializable
- case class CassandraScanBuilder(session: SparkSession, tableDef: TableDef, catalogName: String, options: CaseInsensitiveStringMap) extends ScanBuilder with SupportsPushDownFilters with SupportsPushDownRequiredColumns with Logging with Product with Serializable
-
case class
CassandraScanPartitionReader(connector: CassandraConnector, tableDef: TableDef, schema: StructType, readConf: ReadConf, queryParts: CqlQueryParts, partition: CassandraPartition[Any, _ <: Token[Any]]) extends CassandraPartitionReaderBase with Product with Serializable
Physical Scan Reader of Cassandra
Physical Scan Reader of Cassandra
- connector
Connection to Cassandra to use for Reading
- tableDef
Table Definition Information for the table being scanned
- schema
Output Schema to be produced from this read
- readConf
Options relating to how the read should be performed
- queryParts
Additional query elements to add to the TokenRange Scan query
- partition
The Token Range to Query with Localization Info
- case class CassandraScanPartitionReaderFactory(connector: CassandraConnector, tableDef: TableDef, schema: StructType, readConf: ReadConf, queryParts: CqlQueryParts) extends PartitionReaderFactory with Product with Serializable
- case class CassandraTable(session: SparkSession, catalogConf: CaseInsensitiveStringMap, connector: CassandraConnector, catalogName: String, metadata: RelationMetadata, optionalSchema: Option[StructType] = None) extends Table with SupportsRead with SupportsWrite with Product with Serializable
- case class CassandraWriteBuilder(session: SparkSession, tableDef: TableDef, catalogName: String, options: CaseInsensitiveStringMap, inputSchema: StructType) extends WriteBuilder with SupportsTruncate with Product with Serializable
-
class
InternalRowWriter extends RowWriter[InternalRow]
A RowWriter that can write SparkSQL
InternalRow", schema defines the structure of InternalRows that will be processed by this writer.
- class InternalRowWriterFactory extends RowWriterFactory[InternalRow]
- case class NumberedInputPartition(index: Int, total: Int) extends InputPartition with Product with Serializable
- class UnsafeRowReader extends RowReader[UnsafeRow]
- class UnsafeRowReaderFactory extends RowReaderFactory[UnsafeRow]
-
class
UnsafeRowWriter extends RowWriter[UnsafeRow]
A RowWriter that can write SparkSQL
UnsafeRow
objects.A RowWriter that can write SparkSQL
UnsafeRow
objects. expressions needs to be a sequence of already BoundReferences to the incoming UnsafeRows - class UnsafeRowWriterFactory extends RowWriterFactory[UnsafeRow]
Value Members
- object CassandraCatalog
- object CassandraScanBuilder extends Serializable
- object CassandraSourceUtil extends Logging
- object InClauseKeyGenerator
- object JoinHelper extends Logging
- object ScanHelper extends Logging
-
object
UdtProjectionDecoder
Helper for decoding sub selections of Cassandra UDTs.
Helper for decoding sub selections of Cassandra UDTs. (Cassandra always responds with the full UDT, however Spark exepects only selected fields).
The conversion is done on the level of Scala Data types, after conversion from Cassandra and before conversion to Catalyst.