Documentation
¶
Index ¶
- Constants
- func ConvertColumnType(colType *sql.ColumnType, typeConverter TypeConverter) (arrow.DataType, bool, arrow.Metadata, error)
- func ExecuteBatchedBulkIngest(ctx context.Context, conn *LoggingConn, options *driverbase.BulkIngestOptions, ...) (totalRowsInserted int64, err error)
- func ExecutePartialBatch(ctx context.Context, conn *LoggingConn, quotedTableName string, ...) (rowsInserted int64, err error)
- type BulkIngester
- type ColumnType
- type ConnectionFactory
- type ConnectionImpl
- type ConnectionImplBase
- func (c *ConnectionImplBase) ClearPending() error
- func (c *ConnectionImplBase) Close() error
- func (c *ConnectionImplBase) Commit(ctx context.Context) error
- func (c *ConnectionImplBase) GetOption(key string) (string, error)
- func (c *ConnectionImplBase) NewStatement() (adbc.Statement, error)
- func (c *ConnectionImplBase) OfferPending(pending io.Closer) error
- func (c *ConnectionImplBase) Rollback(ctx context.Context) error
- func (c *ConnectionImplBase) SetOption(key, value string) error
- func (c *ConnectionImplBase) SetTypeConverter(converter TypeConverter)
- type DBFactory
- type DecimalBuilder
- type DefaultTypeConverter
- func (d DefaultTypeConverter) ConvertArrowToGo(arrowArray arrow.Array, index int, field *arrow.Field) (any, error)
- func (d DefaultTypeConverter) ConvertRawColumnType(colType ColumnType) (arrow.DataType, bool, arrow.Metadata, error)
- func (d DefaultTypeConverter) CreateInserter(field *arrow.Field, builder array.Builder) (Inserter, error)
- type Driver
- func (d *Driver) NewDatabase(opts map[string]string) (adbc.Database, error)
- func (d *Driver) NewDatabaseWithContext(ctx context.Context, opts map[string]string) (adbc.Database, error)
- func (d *Driver) WithConnectionFactory(factory ConnectionFactory) *Driver
- func (d *Driver) WithErrorInspector(inspector driverbase.ErrorInspector) *Driver
- type Inserter
- type LoggingConn
- func (tc *LoggingConn) Close() error
- func (tc *LoggingConn) ExecContext(ctx context.Context, query string, args ...any) (sql.Result, error)
- func (tc *LoggingConn) PingContext(ctx context.Context) error
- func (tc *LoggingConn) PrepareContext(ctx context.Context, query string) (*LoggingStmt, error)
- func (tc *LoggingConn) QueryContext(ctx context.Context, query string, args ...any) (*LoggingRows, error)
- func (tc *LoggingConn) QueryRowContext(ctx context.Context, query string, args ...any) *sql.Row
- type LoggingRows
- func (lr *LoggingRows) Close() error
- func (lr *LoggingRows) ColumnTypes() ([]*sql.ColumnType, error)
- func (lr *LoggingRows) Columns() ([]string, error)
- func (lr *LoggingRows) Err() error
- func (lr *LoggingRows) Next() bool
- func (lr *LoggingRows) NextResultSet() bool
- func (lr *LoggingRows) Scan(dest ...any) error
- type LoggingStmt
- type RowBufferIterator
- type TypeConverter
Constants ¶
const ( MetaKeyDatabaseTypeName = "sql.database_type_name" MetaKeyColumnName = "sql.column_name" MetaKeyPrecision = "sql.precision" MetaKeyScale = "sql.scale" MetaKeyFractionalSecondsPrecision = "sql.fractional_seconds_precision" MetaKeyLength = "sql.length" )
const (
// OptionKeyBatchSize controls how many Arrow records to accumulate in a record batch
OptionKeyBatchSize = "adbc.statement.batch_size"
)
Custom option keys for the sqlwrapper
Variables ¶
This section is empty.
Functions ¶
func ConvertColumnType ¶
func ConvertColumnType(colType *sql.ColumnType, typeConverter TypeConverter) (arrow.DataType, bool, arrow.Metadata, error)
func ExecuteBatchedBulkIngest ¶
func ExecuteBatchedBulkIngest( ctx context.Context, conn *LoggingConn, options *driverbase.BulkIngestOptions, stream array.RecordReader, typeConverter TypeConverter, ingester BulkIngester, errorHelper *driverbase.ErrorHelper, ) (totalRowsInserted int64, err error)
ExecuteBatchedBulkIngest provides a generic batched INSERT implementation for SQL databases. This is used by connections that don't have a database-specific bulk loading mechanism.
It generates multi-row INSERT statements like: INSERT INTO table VALUES (row1), (row2), ..., (rowN)
Parameters:
- ingester: provides QuoteIdentifier and GetPlaceholder helpers
Batching behavior:
- uses options.IngestBatchSize (defaults to 1000 if <= 0).
func ExecutePartialBatch ¶
func ExecutePartialBatch( ctx context.Context, conn *LoggingConn, quotedTableName string, schema *arrow.Schema, buffer []any, rowCount int, ingester BulkIngester, errorHelper *driverbase.ErrorHelper, ) (rowsInserted int64, err error)
ExecutePartialBatch executes a multi-row INSERT with a dynamic number of rows. Builds a multi-row INSERT statement for the exact batch size and executes with parameters.
Types ¶
type BulkIngester ¶
type BulkIngester interface {
ExecuteBulkIngest(ctx context.Context, conn *LoggingConn, options *driverbase.BulkIngestOptions, stream array.RecordReader) (int64, error)
// QuoteIdentifier quotes a table/column identifier for SQL
QuoteIdentifier(name string) string
// GetPlaceholder returns the SQL placeholder for a field at the given parameter index (0-based)
// Examples: "?" for MySQL/Trino, "$1" for PostgreSQL (index+1), "CAST(? AS REAL)" for special types
// For databases using positional placeholders (e.g., PostgreSQL $1, $2, ...), the index indicates
// the parameter position in the overall statement
GetPlaceholder(field *arrow.Field, index int) string
}
BulkIngester interface allows drivers to implement database-specific bulk ingest functionality
type ColumnType ¶
type ConnectionFactory ¶
type ConnectionFactory interface {
// CreateConnection creates a custom connection implementation.
// It receives a pre-built sqlwrapper ConnectionImpl and should return a connection
// that embeds or wraps it to add database-specific functionality.
CreateConnection(
ctx context.Context,
conn *ConnectionImplBase,
) (ConnectionImpl, error)
}
ConnectionFactory allows custom connection implementations to be injected into sqlwrapper. Implementations can provide database-specific functionality like DbObjectsEnumerator.
type ConnectionImpl ¶
type ConnectionImpl interface {
driverbase.ConnectionImpl
// Track a pending operation that blocks other pending operations
// (generally, a query with a result set, which needs to be cancelled
// before running other queries on this connection).
OfferPending(io.Closer) error
// Cancel any other running queries.
ClearPending() error
}
type ConnectionImplBase ¶
type ConnectionImplBase struct {
driverbase.ConnectionImplBase
Derived ConnectionImpl
// Conn is the dedicated SQL connection for this ADBC session
Conn *LoggingConn
// TypeConverter handles SQL-to-Arrow type conversion
TypeConverter TypeConverter
// db is the underlying database for metadata operations
Db *sql.DB
Pending io.Closer
}
ConnectionImplBase implements the ADBC Connection interface on top of database/sql.
func (*ConnectionImplBase) ClearPending ¶
func (c *ConnectionImplBase) ClearPending() error
func (*ConnectionImplBase) Close ¶
func (c *ConnectionImplBase) Close() error
Close closes the underlying SQL connection
func (*ConnectionImplBase) Commit ¶
func (c *ConnectionImplBase) Commit(ctx context.Context) error
Commit is a no-op under auto-commit mode TODO (https://github.com/adbc-drivers/driverbase-go/issues/28): we'll likely want to utilize https://pkg.go.dev/database/sql#Tx to manage this here
func (*ConnectionImplBase) GetOption ¶
func (c *ConnectionImplBase) GetOption(key string) (string, error)
func (*ConnectionImplBase) NewStatement ¶
func (c *ConnectionImplBase) NewStatement() (adbc.Statement, error)
NewStatement satisfies adbc.Connection
func (*ConnectionImplBase) OfferPending ¶
func (c *ConnectionImplBase) OfferPending(pending io.Closer) error
func (*ConnectionImplBase) Rollback ¶
func (c *ConnectionImplBase) Rollback(ctx context.Context) error
Rollback is a no-op under auto-commit mode TODO (https://github.com/adbc-drivers/driverbase-go/issues/28): we'll likely want to utilize https://pkg.go.dev/database/sql#Tx to manage this here
func (*ConnectionImplBase) SetOption ¶
func (c *ConnectionImplBase) SetOption(key, value string) error
SetOption sets a string option on this connection
func (*ConnectionImplBase) SetTypeConverter ¶
func (c *ConnectionImplBase) SetTypeConverter(converter TypeConverter)
SetTypeConverter allows higher-level drivers to customize type conversion
type DBFactory ¶
type DBFactory interface {
CreateDB(ctx context.Context, driverName string, opts map[string]string, logger *slog.Logger) (*sql.DB, error)
}
DBFactory handles creation of *sql.DB from connection options. Each driver is expected to implement this interface to provide database-specific DSN construction and connection logic for their particular database format.
type DecimalBuilder ¶
DecimalBuilder interface defines the methods needed for decimal builders
type DefaultTypeConverter ¶
type DefaultTypeConverter struct {
VendorName string
}
DefaultTypeConverter provides the default SQL-to-Arrow type conversion
func (DefaultTypeConverter) ConvertArrowToGo ¶
func (d DefaultTypeConverter) ConvertArrowToGo(arrowArray arrow.Array, index int, field *arrow.Field) (any, error)
ConvertArrowToGo implements the default Arrow value to Go value conversion
func (DefaultTypeConverter) ConvertRawColumnType ¶
func (d DefaultTypeConverter) ConvertRawColumnType(colType ColumnType) (arrow.DataType, bool, arrow.Metadata, error)
ConvertRawColumnType implements TypeConverter interface with the default conversion logic
func (DefaultTypeConverter) CreateInserter ¶
func (d DefaultTypeConverter) CreateInserter(field *arrow.Field, builder array.Builder) (Inserter, error)
CreateInserter implements TypeConverter.CreateInserter for DefaultTypeConverter
type Driver ¶
type Driver struct {
driverbase.DriverImplBase
// contains filtered or unexported fields
}
Driver provides an ADBC driver implementation that wraps database/sql drivers. It uses a configurable TypeConverter for SQL-to-Arrow type mapping and conversion.
func NewDriver ¶
func NewDriver(alloc memory.Allocator, driverName, vendorName string, dbFactory DBFactory, converter TypeConverter) *Driver
NewDriver creates a new sqlwrapper Driver with driver name, required DBFactory, and optional type converter. If converter is nil, uses DefaultTypeConverter.
func (*Driver) NewDatabase ¶
NewDatabase is the main entrypoint for driver‐agnostic ADBC database creation. It uses the driver name provided to NewDriver and expects opts[adbc.OptionKeyURI] to be the DSN/URI.
func (*Driver) NewDatabaseWithContext ¶
func (d *Driver) NewDatabaseWithContext(ctx context.Context, opts map[string]string) (adbc.Database, error)
NewDatabaseWithContext is the same, but lets you pass in a context.
func (*Driver) WithConnectionFactory ¶
func (d *Driver) WithConnectionFactory(factory ConnectionFactory) *Driver
WithConnectionFactory sets a custom connection factory for this driver. This allows database-specific drivers to provide custom connection implementations with additional functionality like DbObjectsEnumerator.
func (*Driver) WithErrorInspector ¶
func (d *Driver) WithErrorInspector(inspector driverbase.ErrorInspector) *Driver
WithErrorInspector sets a custom error inspector for extracting database error metadata. This allows drivers to map database-specific errors to ADBC status codes and extract SQLSTATE, vendor codes, and other error information.
type Inserter ¶
type Inserter interface {
// AppendValue converts a SQL value and appends it directly to the pre-bound Arrow builder
AppendValue(sqlValue any) error
}
Inserter handles SQL-to-Arrow value conversion and builder appending for a specific Arrow type The inserter is bound to a specific Arrow builder during creation to eliminate per-value type switching
type LoggingConn ¶
func (*LoggingConn) Close ¶
func (tc *LoggingConn) Close() error
func (*LoggingConn) ExecContext ¶
func (*LoggingConn) PingContext ¶
func (tc *LoggingConn) PingContext(ctx context.Context) error
func (*LoggingConn) PrepareContext ¶
func (tc *LoggingConn) PrepareContext(ctx context.Context, query string) (*LoggingStmt, error)
func (*LoggingConn) QueryContext ¶
func (tc *LoggingConn) QueryContext(ctx context.Context, query string, args ...any) (*LoggingRows, error)
func (*LoggingConn) QueryRowContext ¶
type LoggingRows ¶
func (*LoggingRows) Close ¶
func (lr *LoggingRows) Close() error
func (*LoggingRows) ColumnTypes ¶
func (lr *LoggingRows) ColumnTypes() ([]*sql.ColumnType, error)
func (*LoggingRows) Columns ¶
func (lr *LoggingRows) Columns() ([]string, error)
func (*LoggingRows) Err ¶
func (lr *LoggingRows) Err() error
func (*LoggingRows) Next ¶
func (lr *LoggingRows) Next() bool
func (*LoggingRows) NextResultSet ¶
func (lr *LoggingRows) NextResultSet() bool
func (*LoggingRows) Scan ¶
func (lr *LoggingRows) Scan(dest ...any) error
type LoggingStmt ¶
func (*LoggingStmt) Close ¶
func (ls *LoggingStmt) Close() error
func (*LoggingStmt) ExecContext ¶
func (*LoggingStmt) QueryContext ¶
func (ls *LoggingStmt) QueryContext(ctx context.Context, args ...any) (*LoggingRows, error)
type RowBufferIterator ¶
type RowBufferIterator struct {
// contains filtered or unexported fields
}
RowBufferIterator accumulates rows from an Arrow RecordReader into fixed-size batches of Go values, crossing Arrow batch boundaries transparently.
func NewRowBufferIterator ¶
func NewRowBufferIterator( reader array.RecordReader, batchSize int, typeConverter TypeConverter, ) (*RowBufferIterator, error)
NewRowBufferIterator creates a new iterator that accumulates rows into fixed-size batches.
Parameters:
- reader: Arrow RecordReader to consume
- batchSize: Target number of rows per SQL batch
- typeConverter: Converts Arrow values to Go values
Returns error if batchSize <= 0 or reader is nil.
func (*RowBufferIterator) CurrentBatch ¶
func (it *RowBufferIterator) CurrentBatch() (buffer []any, rowCount int)
CurrentBatch returns the current batch of Go values and its size. The returned slice is a view into the internal buffer and will be overwritten on the next call to Next().
Returns:
- buffer: []any slice containing rowCount * numCols values
- rowCount: Number of complete rows in the batch (may be < batchSize at end)
The buffer uses row-major layout: [row0_col0, row0_col1, ..., rowN_colM]
func (*RowBufferIterator) Err ¶
func (it *RowBufferIterator) Err() error
Err returns the first error encountered during iteration. Should be called after Next() returns false.
func (*RowBufferIterator) Next ¶
func (it *RowBufferIterator) Next() bool
Next fills the buffer with up to batchSize rows, crossing Arrow batch boundaries. Returns true if a batch is available (could be full or partial), false if stream is exhausted.
type TypeConverter ¶
type TypeConverter interface {
// ConvertColumnType converts a raw ColumnType (with metadata from strings or internal struct) to an Arrow type and nullable flag
// It also returns metadata that should be included in the Arrow field.
ConvertRawColumnType(colType ColumnType) (arrowType arrow.DataType, nullable bool, metadata arrow.Metadata, err error)
// CreateInserter creates a type-specific inserter bound to a specific Arrow builder
// This allows drivers to provide custom inserters for specific types (e.g., MySQL JSON, spatial types)
// The inserter is pre-bound to the builder to eliminate per-value type switching
CreateInserter(field *arrow.Field, builder array.Builder) (Inserter, error)
// ConvertArrowToGo extracts a Go value from an Arrow array at the given index
// This is used for parameter binding and value extraction
// The field parameter provides access to the Arrow field metadata
ConvertArrowToGo(arrowArray arrow.Array, index int, field *arrow.Field) (any, error)
}
TypeConverter allows higher-level drivers to customize SQL-to-Arrow type and value conversion