public class SparkCatalog extends SparkBaseCatalog implements SupportFunction
TableCatalog for paimon.| Modifier and Type | Field and Description |
|---|---|
protected Catalog |
catalog |
catalogName| Constructor and Description |
|---|
SparkCatalog() |
| Modifier and Type | Method and Description |
|---|---|
void |
alterNamespace(String[] namespace,
org.apache.spark.sql.connector.catalog.NamespaceChange... changes) |
org.apache.spark.sql.connector.catalog.Table |
alterTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.connector.catalog.TableChange... changes) |
protected List<String> |
convertPartitionTransforms(org.apache.spark.sql.connector.expressions.Transform[] transforms) |
void |
createNamespace(String[] namespace,
Map<String,String> metadata) |
org.apache.paimon.spark.SparkTable |
createTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
Map<String,String> properties) |
String[] |
defaultNamespace() |
boolean |
dropNamespace(String[] namespace)
Drop a namespace from the catalog, recursively dropping all objects within the namespace.
|
boolean |
dropNamespace(String[] namespace,
boolean cascade)
Drop a namespace from the catalog with cascade mode, recursively dropping all objects within
the namespace if cascade is true.
|
boolean |
dropTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
void |
initialize(String name,
org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
void |
invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
String[][] |
listNamespaces() |
String[][] |
listNamespaces(String[] namespace) |
org.apache.spark.sql.connector.catalog.Identifier[] |
listTables(String[] namespace) |
Map<String,String> |
loadNamespaceMetadata(String[] namespace) |
protected org.apache.paimon.spark.SparkTable |
loadSparkTable(org.apache.spark.sql.connector.catalog.Identifier ident,
Map<String,String> extraOptions) |
org.apache.paimon.spark.SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
org.apache.paimon.spark.SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
long timestamp)
Do not annotate with
@override here to maintain compatibility with Spark 3.2-. |
org.apache.paimon.spark.SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
String version)
Do not annotate with
@override here to maintain compatibility with Spark 3.2-. |
Catalog |
paimonCatalog() |
void |
renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent,
org.apache.spark.sql.connector.catalog.Identifier newIdent) |
boolean |
tableExists(org.apache.spark.sql.connector.catalog.Identifier ident) |
protected Identifier |
toIdentifier(org.apache.spark.sql.connector.catalog.Identifier ident) |
loadProcedure, name, usePaimonclone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, waitisFunctionNamespace, isSystemNamespace, listFunctions, loadFunctionfunctionExistsprotected Catalog catalog
public void initialize(String name, org.apache.spark.sql.util.CaseInsensitiveStringMap options)
initialize in interface org.apache.spark.sql.connector.catalog.CatalogPluginpublic Catalog paimonCatalog()
paimonCatalog in interface WithPaimonCatalogpublic String[] defaultNamespace()
defaultNamespace in interface org.apache.spark.sql.connector.catalog.CatalogPluginpublic void createNamespace(String[] namespace, Map<String,String> metadata) throws org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
createNamespace in interface org.apache.spark.sql.connector.catalog.SupportsNamespacesorg.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsExceptionpublic String[][] listNamespaces()
listNamespaces in interface org.apache.spark.sql.connector.catalog.SupportsNamespacespublic String[][] listNamespaces(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listNamespaces in interface org.apache.spark.sql.connector.catalog.SupportsNamespacesorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic Map<String,String> loadNamespaceMetadata(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
loadNamespaceMetadata in interface org.apache.spark.sql.connector.catalog.SupportsNamespacesorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic boolean dropNamespace(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
If the catalog implementation does not support this operation, it may throw UnsupportedOperationException.
namespace - a multi-part namespaceUnsupportedOperationException - If drop is not a supported operationorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic boolean dropNamespace(String[] namespace, boolean cascade) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
If the catalog implementation does not support this operation, it may throw UnsupportedOperationException.
dropNamespace in interface org.apache.spark.sql.connector.catalog.SupportsNamespacesnamespace - a multi-part namespacecascade - When true, deletes all objects under the namespaceUnsupportedOperationException - If drop is not a supported operationorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic org.apache.spark.sql.connector.catalog.Identifier[] listTables(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listTables in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic void invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident)
invalidateTable in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic org.apache.paimon.spark.SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident)
throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic org.apache.paimon.spark.SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
String version)
throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
@override here to maintain compatibility with Spark 3.2-.loadTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic org.apache.paimon.spark.SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
long timestamp)
throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
@override here to maintain compatibility with Spark 3.2-.
NOTE: Time unit of timestamp here is microsecond (see TableCatalog.loadTable(Identifier, long)). But in SQL you should use seconds.
loadTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic boolean tableExists(org.apache.spark.sql.connector.catalog.Identifier ident)
tableExists in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic org.apache.spark.sql.connector.catalog.Table alterTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.connector.catalog.TableChange... changes)
throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
alterTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic org.apache.paimon.spark.SparkTable createTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
Map<String,String> properties)
throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException,
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
createTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.TableAlreadyExistsExceptionorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic boolean dropTable(org.apache.spark.sql.connector.catalog.Identifier ident)
dropTable in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic void renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent,
org.apache.spark.sql.connector.catalog.Identifier newIdent)
throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException,
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
renameTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionorg.apache.spark.sql.catalyst.analysis.TableAlreadyExistsExceptionprotected Identifier toIdentifier(org.apache.spark.sql.connector.catalog.Identifier ident) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
org.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionprotected org.apache.paimon.spark.SparkTable loadSparkTable(org.apache.spark.sql.connector.catalog.Identifier ident,
Map<String,String> extraOptions)
throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
org.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionprotected List<String> convertPartitionTransforms(org.apache.spark.sql.connector.expressions.Transform[] transforms)
public void alterNamespace(String[] namespace, org.apache.spark.sql.connector.catalog.NamespaceChange... changes)
alterNamespace in interface org.apache.spark.sql.connector.catalog.SupportsNamespacesCopyright © 2023–2024 The Apache Software Foundation. All rights reserved.