Skip to content

Commit

Permalink
flink paimon catalog docs
Browse files Browse the repository at this point in the history
  • Loading branch information
hdygxsj committed Jan 15, 2025
1 parent 7cdfb87 commit f4dac7e
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,6 @@ protected boolean supportSchemaOperationWithCommentAndOptions() {
return true;
}

protected Map<String, String> schemaOptions(String warehouse, String schemaName) {
return null;
}

@Test
public void testCreateSchema() {
doWithCatalog(
Expand Down Expand Up @@ -240,7 +236,6 @@ public void testCreateSimpleTable() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -286,7 +281,6 @@ public void testListTables() {
doWithSchema(
currentCatalog(),
newSchema,
schemaOptions(warehouse, newSchema),
catalog -> {
catalog
.asTableCatalog()
Expand Down Expand Up @@ -319,7 +313,6 @@ public void testDropTable() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
String tableName = "test_drop_table";
Column[] columns =
Expand Down Expand Up @@ -348,7 +341,6 @@ public void testGetSimpleTable() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
String tableName = "test_desc_table";
String comment = "comment1";
Expand Down Expand Up @@ -398,7 +390,6 @@ public void testRenameColumn() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -436,7 +427,6 @@ public void testAlterTableComment() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
Optional<org.apache.flink.table.catalog.Catalog> flinkCatalog =
tableEnv.getCatalog(currentCatalog().name());
Expand Down Expand Up @@ -496,7 +486,6 @@ public void testAlterTableAddColumn() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -533,7 +522,6 @@ public void testAlterTableDropColumn() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -565,7 +553,6 @@ public void testAlterColumnTypeAndChangeOrder() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -608,7 +595,6 @@ public void testRenameTable() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -637,7 +623,6 @@ public void testAlterTableProperties() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,18 +159,18 @@ protected TableResult sql(@FormatString String sql, Object... args) {
return tableEnv.executeSql(String.format(sql, args));
}

protected static void doWithSchema(
Catalog catalog,
String schemaName,
Map<String, String> schemaOptions,
Consumer<Catalog> action,
boolean dropSchema) {
protected Map<String, String> schemaOptions(String schemaName) {
return null;
}

protected void doWithSchema(
Catalog catalog, String schemaName, Consumer<Catalog> action, boolean dropSchema) {
Preconditions.checkNotNull(catalog);
Preconditions.checkNotNull(schemaName);
try {
tableEnv.useCatalog(catalog.name());
if (!catalog.asSchemas().schemaExists(schemaName)) {
catalog.asSchemas().createSchema(schemaName, null, schemaOptions);
catalog.asSchemas().createSchema(schemaName, null, schemaOptions(schemaName));
}
tableEnv.useDatabase(schemaName);
action.accept(catalog);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,6 @@ public void testHivePartitionTable() {
doWithSchema(
currentCatalog(),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -396,7 +395,6 @@ public void testCreateHiveTable() {
doWithSchema(
metalake.loadCatalog(DEFAULT_HIVE_CATALOG),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
TableResult result =
sql(
Expand Down Expand Up @@ -500,7 +498,6 @@ public void testGetHiveTable() {
doWithSchema(
metalake.loadCatalog(DEFAULT_HIVE_CATALOG),
databaseName,
schemaOptions(warehouse, databaseName),
catalog -> {
String tableName = "test_desc_table";
String comment = "comment1";
Expand Down Expand Up @@ -591,7 +588,7 @@ protected org.apache.gravitino.Catalog currentCatalog() {
}

@Override
protected Map<String, String> schemaOptions(String warehouse, String schemaName) {
protected Map<String, String> schemaOptions(String schemaName) {
return ImmutableMap.of("location", warehouse + "/" + schemaName);
}
}

0 comments on commit f4dac7e

Please sign in to comment.