Skip to content

Commit 9677fa9

Browse files
betodealmeidasadpandajoe
authored andcommitted
fix: handle empty catalog when DB supports them (#29840)
(cherry picked from commit 39209c2)
1 parent 16295b0 commit 9677fa9

File tree

23 files changed

+100
-148
lines changed

23 files changed

+100
-148
lines changed

scripts/change_detector.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
def fetch_files_github_api(url: str): # type: ignore
5353
"""Fetches data using GitHub API."""
5454
req = Request(url)
55-
req.add_header("Authorization", f"token {GITHUB_TOKEN}")
55+
req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}")
5656
req.add_header("Accept", "application/vnd.github.v3+json")
5757

5858
print(f"Fetching from {url}")

superset/cachekeys/schemas.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,10 @@ class Datasource(Schema):
3232
datasource_name = fields.String(
3333
metadata={"description": datasource_name_description},
3434
)
35+
catalog = fields.String(
36+
allow_none=True,
37+
metadata={"description": "Datasource catalog"},
38+
)
3539
schema = fields.String(
3640
metadata={"description": "Datasource schema"},
3741
)

superset/commands/dataset/create.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -54,23 +54,28 @@ def run(self) -> Model:
5454
def validate(self) -> None:
5555
exceptions: list[ValidationError] = []
5656
database_id = self._properties["database"]
57-
schema = self._properties.get("schema")
5857
catalog = self._properties.get("catalog")
58+
schema = self._properties.get("schema")
59+
table_name = self._properties["table_name"]
5960
sql = self._properties.get("sql")
6061
owner_ids: Optional[list[int]] = self._properties.get("owners")
6162

62-
table = Table(self._properties["table_name"], schema, catalog)
63-
64-
# Validate uniqueness
65-
if not DatasetDAO.validate_uniqueness(database_id, table):
66-
exceptions.append(DatasetExistsValidationError(table))
67-
6863
# Validate/Populate database
6964
database = DatasetDAO.get_database_by_id(database_id)
7065
if not database:
7166
exceptions.append(DatabaseNotFoundValidationError())
7267
self._properties["database"] = database
7368

69+
# Validate uniqueness
70+
if database:
71+
if not catalog:
72+
catalog = self._properties["catalog"] = database.get_default_catalog()
73+
74+
table = Table(table_name, schema, catalog)
75+
76+
if not DatasetDAO.validate_uniqueness(database, table):
77+
exceptions.append(DatasetExistsValidationError(table))
78+
7479
# Validate table exists on dataset if sql is not provided
7580
# This should be validated when the dataset is physical
7681
if (

superset/commands/dataset/importers/v1/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def import_dataset(
166166

167167
try:
168168
table_exists = dataset.database.has_table(
169-
Table(dataset.table_name, dataset.schema),
169+
Table(dataset.table_name, dataset.schema, dataset.catalog),
170170
)
171171
except Exception: # pylint: disable=broad-except
172172
# MySQL doesn't play nice with GSheets table names

superset/commands/dataset/update.py

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,10 +79,12 @@ def run(self) -> Model:
7979
def validate(self) -> None:
8080
exceptions: list[ValidationError] = []
8181
owner_ids: Optional[list[int]] = self._properties.get("owners")
82+
8283
# Validate/populate model exists
8384
self._model = DatasetDAO.find_by_id(self._model_id)
8485
if not self._model:
8586
raise DatasetNotFoundError()
87+
8688
# Check ownership
8789
try:
8890
security_manager.raise_for_ownership(self._model)
@@ -91,22 +93,30 @@ def validate(self) -> None:
9193

9294
database_id = self._properties.get("database")
9395

96+
catalog = self._properties.get("catalog")
97+
if not catalog:
98+
catalog = self._properties["catalog"] = (
99+
self._model.database.get_default_catalog()
100+
)
101+
94102
table = Table(
95103
self._properties.get("table_name"), # type: ignore
96104
self._properties.get("schema"),
97-
self._properties.get("catalog"),
105+
catalog,
98106
)
99107

100108
# Validate uniqueness
101109
if not DatasetDAO.validate_update_uniqueness(
102-
self._model.database_id,
110+
self._model.database,
103111
table,
104112
self._model_id,
105113
):
106114
exceptions.append(DatasetExistsValidationError(table))
115+
107116
# Validate/Populate database not allowed to change
108117
if database_id and database_id != self._model:
109118
exceptions.append(DatabaseChangeValidationError())
119+
110120
# Validate/Populate owner
111121
try:
112122
owners = self.compute_owners(
@@ -116,6 +126,7 @@ def validate(self) -> None:
116126
self._properties["owners"] = owners
117127
except ValidationError as ex:
118128
exceptions.append(ex)
129+
119130
# Validate columns
120131
if columns := self._properties.get("columns"):
121132
self._validate_columns(columns, exceptions)

superset/connectors/sqla/models.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -461,9 +461,11 @@ def data_for_slices( # pylint: disable=too-many-locals
461461
)
462462
else:
463463
_columns = [
464-
utils.get_column_name(column_)
465-
if utils.is_adhoc_column(column_)
466-
else column_
464+
(
465+
utils.get_column_name(column_)
466+
if utils.is_adhoc_column(column_)
467+
else column_
468+
)
467469
for column_param in COLUMN_FORM_DATA_PARAMS
468470
for column_ in utils.as_list(form_data.get(column_param) or [])
469471
]
@@ -1963,7 +1965,7 @@ class and any keys added via `ExtraCache`.
19631965
if self.has_extra_cache_key_calls(query_obj):
19641966
sqla_query = self.get_sqla_query(**query_obj)
19651967
extra_cache_keys += sqla_query.extra_cache_keys
1966-
return extra_cache_keys
1968+
return list(set(extra_cache_keys))
19671969

19681970
@property
19691971
def quote_identifier(self) -> Callable[[str], str]:

superset/daos/dataset.py

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -84,15 +84,19 @@ def validate_table_exists(
8484

8585
@staticmethod
8686
def validate_uniqueness(
87-
database_id: int,
87+
database: Database,
8888
table: Table,
8989
dataset_id: int | None = None,
9090
) -> bool:
91+
# The catalog might not be set even if the database supports catalogs, in case
92+
# multi-catalog is disabled.
93+
catalog = table.catalog or database.get_default_catalog()
94+
9195
dataset_query = db.session.query(SqlaTable).filter(
9296
SqlaTable.table_name == table.table,
9397
SqlaTable.schema == table.schema,
94-
SqlaTable.catalog == table.catalog,
95-
SqlaTable.database_id == database_id,
98+
SqlaTable.catalog == catalog,
99+
SqlaTable.database_id == database.id,
96100
)
97101

98102
if dataset_id:
@@ -103,15 +107,19 @@ def validate_uniqueness(
103107

104108
@staticmethod
105109
def validate_update_uniqueness(
106-
database_id: int,
110+
database: Database,
107111
table: Table,
108112
dataset_id: int,
109113
) -> bool:
114+
# The catalog might not be set even if the database supports catalogs, in case
115+
# multi-catalog is disabled.
116+
catalog = table.catalog or database.get_default_catalog()
117+
110118
dataset_query = db.session.query(SqlaTable).filter(
111119
SqlaTable.table_name == table.table,
112-
SqlaTable.database_id == database_id,
120+
SqlaTable.database_id == database.id,
113121
SqlaTable.schema == table.schema,
114-
SqlaTable.catalog == table.catalog,
122+
SqlaTable.catalog == catalog,
115123
SqlaTable.id != dataset_id,
116124
)
117125
return not db.session.query(dataset_query.exists()).scalar()

superset/databases/api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1159,7 +1159,7 @@ def select_star(
11591159
self.incr_stats("init", self.select_star.__name__)
11601160
try:
11611161
result = database.select_star(
1162-
Table(table_name, schema_name),
1162+
Table(table_name, schema_name, database.get_default_catalog()),
11631163
latest_partition=True,
11641164
)
11651165
except NoSuchTableError:

superset/jinja_context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -565,7 +565,7 @@ def process_template(self, sql: str, **kwargs: Any) -> str:
565565
"""
566566
Makes processing a template a noop
567567
"""
568-
return sql
568+
return str(sql)
569569

570570

571571
class PrestoTemplateProcessor(JinjaTemplateProcessor):

superset/models/core.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -490,7 +490,7 @@ def _get_sqla_engine( # pylint: disable=too-many-locals
490490
g.user.id,
491491
self.db_engine_spec,
492492
)
493-
if hasattr(g, "user") and hasattr(g.user, "id") and oauth2_config
493+
if oauth2_config and hasattr(g, "user") and hasattr(g.user, "id")
494494
else None
495495
)
496496
# If using MySQL or Presto for example, will set url.username

0 commit comments

Comments
 (0)