public class SparkCatalog extends SparkBaseCatalog implements SupportFunction, SupportView
TableCatalog
for paimon.Modifier and Type | Field and Description |
---|---|
protected Catalog |
catalog |
catalogName
Constructor and Description |
---|
SparkCatalog() |
Modifier and Type | Method and Description |
---|---|
void |
alterNamespace(String[] namespace,
org.apache.spark.sql.connector.catalog.NamespaceChange... changes) |
org.apache.spark.sql.connector.catalog.Table |
alterTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.connector.catalog.TableChange... changes) |
protected List<String> |
convertPartitionTransforms(org.apache.spark.sql.connector.expressions.Transform[] transforms) |
void |
createNamespace(String[] namespace,
Map<String,String> metadata) |
org.apache.spark.sql.connector.catalog.Table |
createTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
Map<String,String> properties) |
String[] |
defaultNamespace() |
boolean |
dropNamespace(String[] namespace)
Drop a namespace from the catalog, recursively dropping all objects within the namespace.
|
boolean |
dropNamespace(String[] namespace,
boolean cascade)
Drop a namespace from the catalog with cascade mode, recursively dropping all objects within
the namespace if cascade is true.
|
boolean |
dropTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
void |
initialize(String name,
org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
void |
invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
String[][] |
listNamespaces() |
String[][] |
listNamespaces(String[] namespace) |
org.apache.spark.sql.connector.catalog.Identifier[] |
listTables(String[] namespace) |
Map<String,String> |
loadNamespaceMetadata(String[] namespace) |
protected org.apache.spark.sql.connector.catalog.Table |
loadSparkTable(org.apache.spark.sql.connector.catalog.Identifier ident,
Map<String,String> extraOptions) |
org.apache.spark.sql.connector.catalog.Table |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
org.apache.paimon.spark.SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
long timestamp)
Do not annotate with
@override here to maintain compatibility with Spark 3.2-. |
org.apache.paimon.spark.SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
String version)
Do not annotate with
@override here to maintain compatibility with Spark 3.2-. |
Catalog |
paimonCatalog() |
void |
renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent,
org.apache.spark.sql.connector.catalog.Identifier newIdent) |
loadProcedure, name, usePaimon
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
isFunctionNamespace, isSystemNamespace, listFunctions, loadFunction
functionExists
namespaceExists
createView, dropView, listViews, loadView
protected Catalog catalog
public void initialize(String name, org.apache.spark.sql.util.CaseInsensitiveStringMap options)
initialize
in interface org.apache.spark.sql.connector.catalog.CatalogPlugin
public Catalog paimonCatalog()
paimonCatalog
in interface WithPaimonCatalog
public String[] defaultNamespace()
defaultNamespace
in interface org.apache.spark.sql.connector.catalog.CatalogPlugin
public void createNamespace(String[] namespace, Map<String,String> metadata) throws org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
createNamespace
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
public String[][] listNamespaces()
listNamespaces
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
public String[][] listNamespaces(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listNamespaces
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public Map<String,String> loadNamespaceMetadata(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
loadNamespaceMetadata
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public boolean dropNamespace(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
If the catalog implementation does not support this operation, it may throw UnsupportedOperationException
.
namespace
- a multi-part namespaceUnsupportedOperationException
- If drop is not a supported operationorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public boolean dropNamespace(String[] namespace, boolean cascade) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
If the catalog implementation does not support this operation, it may throw UnsupportedOperationException
.
dropNamespace
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
namespace
- a multi-part namespacecascade
- When true, deletes all objects under the namespaceUnsupportedOperationException
- If drop is not a supported operationorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public org.apache.spark.sql.connector.catalog.Identifier[] listTables(String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listTables
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public void invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident)
invalidateTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
public org.apache.spark.sql.connector.catalog.Table loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.paimon.spark.SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, String version) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
@override
here to maintain compatibility with Spark 3.2-.loadTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.paimon.spark.SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, long timestamp) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
@override
here to maintain compatibility with Spark 3.2-.
NOTE: Time unit of timestamp here is microsecond (see TableCatalog.loadTable(Identifier, long)
). But in SQL you should use seconds.
loadTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.spark.sql.connector.catalog.Table alterTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.connector.catalog.TableChange... changes) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
alterTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.spark.sql.connector.catalog.Table createTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, Map<String,String> properties) throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException, org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
createTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public boolean dropTable(org.apache.spark.sql.connector.catalog.Identifier ident)
dropTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
public void renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent, org.apache.spark.sql.connector.catalog.Identifier newIdent) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException, org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
renameTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
protected org.apache.spark.sql.connector.catalog.Table loadSparkTable(org.apache.spark.sql.connector.catalog.Identifier ident, Map<String,String> extraOptions) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
protected List<String> convertPartitionTransforms(org.apache.spark.sql.connector.expressions.Transform[] transforms)
public void alterNamespace(String[] namespace, org.apache.spark.sql.connector.catalog.NamespaceChange... changes)
alterNamespace
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
Copyright © 2023–2024 The Apache Software Foundation. All rights reserved.