Modifier and Type | Class and Description |
---|---|
protected static class |
BaseProcedure.Builder<T extends org.apache.paimon.spark.procedure.BaseProcedure> |
Modifier | Constructor and Description |
---|---|
protected |
MigrateTableProcedure(org.apache.spark.sql.connector.catalog.TableCatalog tableCatalog) |
Modifier and Type | Method and Description |
---|---|
static ProcedureBuilder |
builder() |
org.apache.spark.sql.catalyst.InternalRow[] |
call(org.apache.spark.sql.catalyst.InternalRow args)
Executes the given stored procedure.
|
protected org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation |
createRelation(org.apache.spark.sql.connector.catalog.Identifier ident) |
String |
description()
Returns the description of stored procedure.
|
protected org.apache.paimon.spark.SparkTable |
loadSparkTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
static Map<String,String> |
mapDataToHashMap(org.apache.spark.sql.catalyst.util.MapData mapData) |
protected <T> T |
modifyPaimonTable(org.apache.spark.sql.connector.catalog.Identifier ident,
java.util.function.Function<Table,T> func) |
protected org.apache.spark.sql.catalyst.InternalRow |
newInternalRow(Object... values) |
org.apache.spark.sql.types.StructType |
outputType()
Returns the type of rows produced by stored procedure.
|
ProcedureParameter[] |
parameters()
Returns the input parameters of stored procedure.
|
protected void |
refreshSparkCache(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.connector.catalog.Table table) |
protected org.apache.spark.sql.SparkSession |
spark() |
protected org.apache.spark.sql.connector.catalog.TableCatalog |
tableCatalog() |
protected SparkUtils.CatalogAndIdentifier |
toCatalogAndIdentifier(String identifierAsString,
String argName,
org.apache.spark.sql.connector.catalog.CatalogPlugin catalog) |
protected org.apache.spark.sql.connector.catalog.Identifier |
toIdentifier(String identifierAsString,
String argName) |
protected MigrateTableProcedure(org.apache.spark.sql.connector.catalog.TableCatalog tableCatalog)
public ProcedureParameter[] parameters()
Procedure
public org.apache.spark.sql.types.StructType outputType()
Procedure
public org.apache.spark.sql.catalyst.InternalRow[] call(org.apache.spark.sql.catalyst.InternalRow args)
Procedure
args
- Input arguments.public static Map<String,String> mapDataToHashMap(org.apache.spark.sql.catalyst.util.MapData mapData)
public static ProcedureBuilder builder()
public String description()
Procedure
protected org.apache.spark.sql.connector.catalog.Identifier toIdentifier(String identifierAsString, String argName)
protected SparkUtils.CatalogAndIdentifier toCatalogAndIdentifier(String identifierAsString, String argName, org.apache.spark.sql.connector.catalog.CatalogPlugin catalog)
protected <T> T modifyPaimonTable(org.apache.spark.sql.connector.catalog.Identifier ident, java.util.function.Function<Table,T> func)
protected org.apache.paimon.spark.SparkTable loadSparkTable(org.apache.spark.sql.connector.catalog.Identifier ident)
protected org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation createRelation(org.apache.spark.sql.connector.catalog.Identifier ident)
protected void refreshSparkCache(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.connector.catalog.Table table)
protected org.apache.spark.sql.catalyst.InternalRow newInternalRow(Object... values)
protected org.apache.spark.sql.SparkSession spark()
protected org.apache.spark.sql.connector.catalog.TableCatalog tableCatalog()
Copyright © 2023–2024 The Apache Software Foundation. All rights reserved.