cube-backup.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. # pylint: disable=line-too-long,unused-argument,ungrouped-imports
  18. """A collection of ORM sqlalchemy models for Superset"""
  19. import json
  20. import logging
  21. import textwrap
  22. from contextlib import closing
  23. from copy import deepcopy
  24. from datetime import datetime
  25. from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type
  26. import numpy
  27. import pandas as pd
  28. import sqlalchemy as sqla
  29. import sqlparse
  30. from flask import g, request
  31. from flask_appbuilder import Model
  32. from sqlalchemy import (
  33. Boolean,
  34. Column,
  35. create_engine,
  36. DateTime,
  37. ForeignKey,
  38. Integer,
  39. MetaData,
  40. String,
  41. Table,
  42. Text,
  43. )
  44. from sqlalchemy.engine import Dialect, Engine, url
  45. from sqlalchemy.engine.reflection import Inspector
  46. from sqlalchemy.engine.url import make_url, URL
  47. from sqlalchemy.ext.hybrid import hybrid_property
  48. from sqlalchemy.orm import relationship
  49. from sqlalchemy.pool import NullPool
  50. from sqlalchemy.schema import UniqueConstraint
  51. from sqlalchemy.sql import expression, Select
  52. from sqlalchemy_utils import EncryptedType
  53. from superset import app, db_engine_specs, is_feature_enabled, security_manager
  54. from superset.db_engine_specs.base import TimeGrain
  55. from superset.models.dashboard import Dashboard
  56. from superset.models.helpers import AuditMixinNullable, ImportMixin
  57. from superset.models.tags import DashboardUpdater, FavStarUpdater
  58. from superset.utils import cache as cache_util, core as utils
  59. config = app.config
  60. custom_password_store = config["SQLALCHEMY_CUSTOM_PASSWORD_STORE"]
  61. stats_logger = config["STATS_LOGGER"]
  62. log_query = config["QUERY_LOGGER"]
  63. metadata = Model.metadata # pylint: disable=no-member
  64. logger = logging.getLogger(__name__)
  65. PASSWORD_MASK = "X" * 10
  66. DB_CONNECTION_MUTATOR = config["DB_CONNECTION_MUTATOR"]
  67. class Url(Model, AuditMixinNullable):
  68. """Used for the short url feature"""
  69. __tablename__ = "url"
  70. id = Column(Integer, primary_key=True)
  71. url = Column(Text)
  72. class KeyValue(Model): # pylint: disable=too-few-public-methods
  73. """Used for any type of key-value store"""
  74. __tablename__ = "keyvalue"
  75. id = Column(Integer, primary_key=True)
  76. value = Column(Text, nullable=False)
  77. class CssTemplate(Model, AuditMixinNullable):
  78. """CSS templates for dashboards"""
  79. __tablename__ = "css_templates"
  80. id = Column(Integer, primary_key=True)
  81. template_name = Column(String(250))
  82. css = Column(Text, default="")
  83. class Database(
  84. Model, AuditMixinNullable, ImportMixin
  85. ): # pylint: disable=too-many-public-methods
  86. """An ORM object that stores Database related information"""
  87. __tablename__ = "dbs"
  88. type = "table"
  89. __table_args__ = (UniqueConstraint("database_name"),)
  90. id = Column(Integer, primary_key=True)
  91. verbose_name = Column(String(250), unique=True)
  92. # short unique name, used in permissions
  93. database_name = Column(String(250), unique=True, nullable=False)
  94. sqlalchemy_uri = Column(String(1024), nullable=False)
  95. password = Column(EncryptedType(String(1024), config["SECRET_KEY"]))
  96. cache_timeout = Column(Integer)
  97. select_as_create_table_as = Column(Boolean, default=False)
  98. expose_in_sqllab = Column(Boolean, default=True)
  99. allow_run_async = Column(Boolean, default=False)
  100. allow_csv_upload = Column(Boolean, default=False)
  101. allow_ctas = Column(Boolean, default=False)
  102. allow_cvas = Column(Boolean, default=False)
  103. allow_dml = Column(Boolean, default=False)
  104. force_ctas_schema = Column(String(250))
  105. allow_multi_schema_metadata_fetch = Column( # pylint: disable=invalid-name
  106. Boolean, default=False
  107. )
  108. extra = Column(
  109. Text,
  110. default=textwrap.dedent(
  111. """\
  112. {
  113. "metadata_params": {},
  114. "engine_params": {},
  115. "metadata_cache_timeout": {},
  116. "schemas_allowed_for_csv_upload": []
  117. }
  118. """
  119. ),
  120. )
  121. encrypted_extra = Column(EncryptedType(Text, config["SECRET_KEY"]), nullable=True)
  122. impersonate_user = Column(Boolean, default=False)
  123. server_cert = Column(EncryptedType(Text, config["SECRET_KEY"]), nullable=True)
  124. export_fields = [
  125. "database_name",
  126. "sqlalchemy_uri",
  127. "cache_timeout",
  128. "expose_in_sqllab",
  129. "allow_run_async",
  130. "allow_ctas",
  131. "allow_cvas",
  132. "allow_csv_upload",
  133. "extra",
  134. ]
  135. export_children = ["tables"]
  136. def __repr__(self) -> str:
  137. return self.name
  138. @property
  139. def name(self) -> str:
  140. return self.verbose_name if self.verbose_name else self.database_name
  141. @property
  142. def allows_subquery(self) -> bool:
  143. return self.db_engine_spec.allows_subqueries
  144. @property
  145. def function_names(self) -> List[str]:
  146. try:
  147. return self.db_engine_spec.get_function_names(self)
  148. except Exception as ex: # pylint: disable=broad-except
  149. # function_names property is used in bulk APIs and should not hard crash
  150. # more info in: https://github.com/apache/incubator-superset/issues/9678
  151. logger.error(
  152. "Failed to fetch database function names with error: %s", str(ex)
  153. )
  154. return []
  155. @property
  156. def allows_cost_estimate(self) -> bool:
  157. extra = self.get_extra()
  158. database_version = extra.get("version")
  159. cost_estimate_enabled: bool = extra.get("cost_estimate_enabled") # type: ignore
  160. return (
  161. self.db_engine_spec.get_allow_cost_estimate(database_version)
  162. and cost_estimate_enabled
  163. )
  164. @property
  165. def allows_virtual_table_explore(self) -> bool:
  166. extra = self.get_extra()
  167. return bool(extra.get("allows_virtual_table_explore", True))
  168. @property
  169. def explore_database_id(self) -> int:
  170. return self.get_extra().get("explore_database_id", self.id)
  171. @property
  172. def data(self) -> Dict[str, Any]:
  173. return {
  174. "id": self.id,
  175. "name": self.database_name,
  176. "backend": self.backend,
  177. "allow_multi_schema_metadata_fetch": self.allow_multi_schema_metadata_fetch,
  178. "allows_subquery": self.allows_subquery,
  179. "allows_cost_estimate": self.allows_cost_estimate,
  180. "allows_virtual_table_explore": self.allows_virtual_table_explore,
  181. "explore_database_id": self.explore_database_id,
  182. }
  183. @property
  184. def unique_name(self) -> str:
  185. return self.database_name
  186. @property
  187. def url_object(self) -> URL:
  188. return make_url(self.sqlalchemy_uri_decrypted)
  189. @property
  190. def backend(self) -> str:
  191. sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
  192. return sqlalchemy_url.get_backend_name() # pylint: disable=no-member
  193. @property
  194. def metadata_cache_timeout(self) -> Dict[str, Any]:
  195. return self.get_extra().get("metadata_cache_timeout", {})
  196. @property
  197. def schema_cache_enabled(self) -> bool:
  198. return "schema_cache_timeout" in self.metadata_cache_timeout
  199. @property
  200. def schema_cache_timeout(self) -> Optional[int]:
  201. return self.metadata_cache_timeout.get("schema_cache_timeout")
  202. @property
  203. def table_cache_enabled(self) -> bool:
  204. return "table_cache_timeout" in self.metadata_cache_timeout
  205. @property
  206. def table_cache_timeout(self) -> Optional[int]:
  207. return self.metadata_cache_timeout.get("table_cache_timeout")
  208. @property
  209. def default_schemas(self) -> List[str]:
  210. return self.get_extra().get("default_schemas", [])
  211. @property
  212. def connect_args(self) -> Dict[str, Any]:
  213. return self.get_extra().get("engine_params", {}).get("connect_args", {})
  214. @classmethod
  215. def get_password_masked_url_from_uri( # pylint: disable=invalid-name
  216. cls, uri: str
  217. ) -> URL:
  218. sqlalchemy_url = make_url(uri)
  219. return cls.get_password_masked_url(sqlalchemy_url)
  220. @classmethod
  221. def get_password_masked_url(
  222. cls, url: URL # pylint: disable=redefined-outer-name
  223. ) -> URL:
  224. url_copy = deepcopy(url)
  225. if url_copy.password is not None:
  226. url_copy.password = PASSWORD_MASK
  227. return url_copy
  228. def set_sqlalchemy_uri(self, uri: str) -> None:
  229. conn = sqla.engine.url.make_url(uri.strip())
  230. if conn.password != PASSWORD_MASK and not custom_password_store:
  231. # do not over-write the password with the password mask
  232. self.password = conn.password
  233. conn.password = PASSWORD_MASK if conn.password else None
  234. self.sqlalchemy_uri = str(conn) # hides the password
  235. def get_effective_user(
  236. self,
  237. url: URL, # pylint: disable=redefined-outer-name
  238. user_name: Optional[str] = None,
  239. ) -> Optional[str]:
  240. """
  241. Get the effective user, especially during impersonation.
  242. :param url: SQL Alchemy URL object
  243. :param user_name: Default username
  244. :return: The effective username
  245. """
  246. effective_username = None
  247. if self.impersonate_user:
  248. effective_username = url.username
  249. if user_name:
  250. effective_username = user_name
  251. elif (
  252. hasattr(g, "user")
  253. and hasattr(g.user, "username")
  254. and g.user.username is not None
  255. ):
  256. effective_username = g.user.username
  257. return effective_username
  258. @utils.memoized(watch=("impersonate_user", "sqlalchemy_uri_decrypted", "extra"))
  259. def get_sqla_engine(
  260. self,
  261. schema: Optional[str] = None,
  262. nullpool: bool = True,
  263. user_name: Optional[str] = None,
  264. source: Optional[utils.QuerySource] = None,
  265. ) -> Engine:
  266. extra = self.get_extra()
  267. sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted)
  268. self.db_engine_spec.adjust_database_uri(sqlalchemy_url, schema)
  269. effective_username = self.get_effective_user(sqlalchemy_url, user_name)
  270. # If using MySQL or Presto for example, will set url.username
  271. # If using Hive, will not do anything yet since that relies on a
  272. # configuration parameter instead.
  273. self.db_engine_spec.modify_url_for_impersonation(
  274. sqlalchemy_url, self.impersonate_user, effective_username
  275. )
  276. masked_url = self.get_password_masked_url(sqlalchemy_url)
  277. logger.debug("Database.get_sqla_engine(). Masked URL: %s", str(masked_url))
  278. params = extra.get("engine_params", {})
  279. if nullpool:
  280. params["poolclass"] = NullPool
  281. connect_args = params.get("connect_args", {})
  282. configuration = connect_args.get("configuration", {})
  283. # If using Hive, this will set hive.server2.proxy.user=$effective_username
  284. configuration.update(
  285. self.db_engine_spec.get_configuration_for_impersonation(
  286. str(sqlalchemy_url), self.impersonate_user, effective_username
  287. )
  288. )
  289. if configuration:
  290. connect_args["configuration"] = configuration
  291. if connect_args:
  292. params["connect_args"] = connect_args
  293. params.update(self.get_encrypted_extra())
  294. if DB_CONNECTION_MUTATOR:
  295. if not source and request and request.referrer:
  296. if "/superset/dashboard/" in request.referrer:
  297. source = utils.QuerySource.DASHBOARD
  298. elif "/superset/explore/" in request.referrer:
  299. source = utils.QuerySource.CHART
  300. elif "/superset/sqllab/" in request.referrer:
  301. source = utils.QuerySource.SQL_LAB
  302. sqlalchemy_url, params = DB_CONNECTION_MUTATOR(
  303. sqlalchemy_url, params, effective_username, security_manager, source
  304. )
  305. return create_engine(sqlalchemy_url, **params)
  306. def get_reserved_words(self) -> Set[str]:
  307. return self.get_dialect().preparer.reserved_words
  308. def get_quoter(self) -> Callable[[str, Any], str]:
  309. return self.get_dialect().identifier_preparer.quote
  310. def get_df( # pylint: disable=too-many-locals
  311. self,
  312. sql: str,
  313. schema: Optional[str] = None,
  314. mutator: Optional[Callable[[pd.DataFrame], None]] = None,
  315. ) -> pd.DataFrame:
  316. sqls = [str(s).strip(" ;") for s in sqlparse.parse(sql)]
  317. engine = self.get_sqla_engine(schema=schema)
  318. username = utils.get_username()
  319. def needs_conversion(df_series: pd.Series) -> bool:
  320. return not df_series.empty and isinstance(df_series[0], (list, dict))
  321. def _log_query(sql: str) -> None:
  322. if log_query:
  323. log_query(engine.url, sql, schema, username, __name__, security_manager)
  324. with closing(engine.raw_connection()) as conn:
  325. with closing(conn.cursor()) as cursor:
  326. for sql_ in sqls[:-1]:
  327. _log_query(sql_)
  328. self.db_engine_spec.execute(cursor, sql_)
  329. cursor.fetchall()
  330. _log_query(sqls[-1])
  331. self.db_engine_spec.execute(cursor, sqls[-1])
  332. if cursor.description is not None:
  333. columns = [col_desc[0] for col_desc in cursor.description]
  334. else:
  335. columns = []
  336. df = pd.DataFrame.from_records(
  337. data=list(cursor.fetchall()), columns=columns, coerce_float=True
  338. )
  339. if mutator:
  340. mutator(df)
  341. for k, v in df.dtypes.items():
  342. if v.type == numpy.object_ and needs_conversion(df[k]):
  343. df[k] = df[k].apply(utils.json_dumps_w_dates)
  344. return df
  345. def compile_sqla_query(self, qry: Select, schema: Optional[str] = None) -> str:
  346. engine = self.get_sqla_engine(schema=schema)
  347. sql = str(qry.compile(engine, compile_kwargs={"literal_binds": True}))
  348. if (
  349. engine.dialect.identifier_preparer._double_percents # pylint: disable=protected-access
  350. ):
  351. sql = sql.replace("%%", "%")
  352. return sql
  353. def select_star( # pylint: disable=too-many-arguments
  354. self,
  355. table_name: str,
  356. schema: Optional[str] = None,
  357. limit: int = 100,
  358. show_cols: bool = False,
  359. indent: bool = True,
  360. latest_partition: bool = False,
  361. cols: Optional[List[Dict[str, Any]]] = None,
  362. ) -> str:
  363. """Generates a ``select *`` statement in the proper dialect"""
  364. eng = self.get_sqla_engine(schema=schema, source=utils.QuerySource.SQL_LAB)
  365. return self.db_engine_spec.select_star(
  366. self,
  367. table_name,
  368. schema=schema,
  369. engine=eng,
  370. limit=limit,
  371. show_cols=show_cols,
  372. indent=indent,
  373. latest_partition=latest_partition,
  374. cols=cols,
  375. )
  376. def apply_limit_to_sql(self, sql: str, limit: int = 1000) -> str:
  377. return self.db_engine_spec.apply_limit_to_sql(sql, limit, self)
  378. def safe_sqlalchemy_uri(self) -> str:
  379. return self.sqlalchemy_uri
  380. @property
  381. def inspector(self) -> Inspector:
  382. engine = self.get_sqla_engine()
  383. return sqla.inspect(engine)
  384. @cache_util.memoized_func(
  385. key= "db:{}:schema:None:table_list",
  386. attribute_in_key="id",
  387. )
  388. def get_all_table_names_in_database(
  389. self,
  390. cache: bool = False,
  391. cache_timeout: Optional[bool] = None,
  392. force: bool = False,
  393. ) -> List[utils.DatasourceName]:
  394. """Parameters need to be passed as keyword arguments."""
  395. if not self.allow_multi_schema_metadata_fetch:
  396. return []
  397. return self.db_engine_spec.get_all_datasource_names(self, "table")
  398. @cache_util.memoized_func(
  399. key= "db:{}:schema:None:view_list", attribute_in_key="id"
  400. )
  401. def get_all_view_names_in_database(
  402. self,
  403. cache: bool = False,
  404. cache_timeout: Optional[bool] = None,
  405. force: bool = False,
  406. ) -> List[utils.DatasourceName]:
  407. """Parameters need to be passed as keyword arguments."""
  408. if not self.allow_multi_schema_metadata_fetch:
  409. return []
  410. return self.db_engine_spec.get_all_datasource_names(self, "view")
  411. @cache_util.memoized_func(
  412. key= f"db:{{}}:schema:{kwargs.get('schema')}:table_list", # type: ignore
  413. attribute_in_key="id",
  414. )
  415. def get_all_table_names_in_schema(
  416. self,
  417. schema: str,
  418. cache: bool = False,
  419. cache_timeout: Optional[int] = None,
  420. force: bool = False,
  421. ) -> List[utils.DatasourceName]:
  422. """Parameters need to be passed as keyword arguments.
  423. For unused parameters, they are referenced in
  424. cache_util.memoized_func decorator.
  425. :param schema: schema name
  426. :param cache: whether cache is enabled for the function
  427. :param cache_timeout: timeout in seconds for the cache
  428. :param force: whether to force refresh the cache
  429. :return: list of tables
  430. """
  431. try:
  432. tables = self.db_engine_spec.get_table_names(
  433. database=self, inspector=self.inspector, schema=schema
  434. )
  435. return [
  436. utils.DatasourceName(table=table, schema=schema) for table in tables
  437. ]
  438. except Exception as ex: # pylint: disable=broad-except
  439. logger.exception(ex)
  440. @cache_util.memoized_func(
  441. key= f"db:{{}}:schema:{kwargs.get('schema')}:view_list", # type: ignore
  442. attribute_in_key="id",
  443. )
  444. def get_all_view_names_in_schema(
  445. self,
  446. schema: str,
  447. cache: bool = False,
  448. cache_timeout: Optional[int] = None,
  449. force: bool = False,
  450. ) -> List[utils.DatasourceName]:
  451. """Parameters need to be passed as keyword arguments.
  452. For unused parameters, they are referenced in
  453. cache_util.memoized_func decorator.
  454. :param schema: schema name
  455. :param cache: whether cache is enabled for the function
  456. :param cache_timeout: timeout in seconds for the cache
  457. :param force: whether to force refresh the cache
  458. :return: list of views
  459. """
  460. try:
  461. views = self.db_engine_spec.get_view_names(
  462. database=self, inspector=self.inspector, schema=schema
  463. )
  464. return [utils.DatasourceName(table=view, schema=schema) for view in views]
  465. except Exception as ex: # pylint: disable=broad-except
  466. logger.exception(ex)
  467. @cache_util.memoized_func(
  468. key= "db:{}:schema_list", attribute_in_key="id"
  469. )
  470. def get_all_schema_names(
  471. self,
  472. cache: bool = False,
  473. cache_timeout: Optional[int] = None,
  474. force: bool = False,
  475. ) -> List[str]:
  476. """Parameters need to be passed as keyword arguments.
  477. For unused parameters, they are referenced in
  478. cache_util.memoized_func decorator.
  479. :param cache: whether cache is enabled for the function
  480. :param cache_timeout: timeout in seconds for the cache
  481. :param force: whether to force refresh the cache
  482. :return: schema list
  483. """
  484. return self.db_engine_spec.get_schema_names(self.inspector)
  485. @property
  486. def db_engine_spec(self) -> Type[db_engine_specs.BaseEngineSpec]:
  487. return db_engine_specs.engines.get(self.backend, db_engine_specs.BaseEngineSpec)
  488. @classmethod
  489. def get_db_engine_spec_for_backend(
  490. cls, backend: str
  491. ) -> Type[db_engine_specs.BaseEngineSpec]:
  492. return db_engine_specs.engines.get(backend, db_engine_specs.BaseEngineSpec)
  493. def grains(self) -> Tuple[TimeGrain, ...]:
  494. """Defines time granularity database-specific expressions.
  495. The idea here is to make it easy for users to change the time grain
  496. from a datetime (maybe the source grain is arbitrary timestamps, daily
  497. or 5 minutes increments) to another, "truncated" datetime. Since
  498. each database has slightly different but similar datetime functions,
  499. this allows a mapping between database engines and actual functions.
  500. """
  501. return self.db_engine_spec.get_time_grains()
  502. def get_extra(self) -> Dict[str, Any]:
  503. return self.db_engine_spec.get_extra_params(self)
  504. def get_encrypted_extra(self) -> Dict[str, Any]:
  505. encrypted_extra = {}
  506. if self.encrypted_extra:
  507. try:
  508. encrypted_extra = json.loads(self.encrypted_extra)
  509. except json.JSONDecodeError as ex:
  510. logger.error(ex)
  511. raise ex
  512. return encrypted_extra
  513. def get_table(self, table_name: str, schema: Optional[str] = None) -> Table:
  514. extra = self.get_extra()
  515. meta = MetaData(**extra.get("metadata_params", {}))
  516. return Table(
  517. table_name,
  518. meta,
  519. schema=schema or None,
  520. autoload=True,
  521. autoload_with=self.get_sqla_engine(),
  522. )
  523. def get_columns(
  524. self, table_name: str, schema: Optional[str] = None
  525. ) -> List[Dict[str, Any]]:
  526. return self.db_engine_spec.get_columns(self.inspector, table_name, schema)
  527. def get_indexes(
  528. self, table_name: str, schema: Optional[str] = None
  529. ) -> List[Dict[str, Any]]:
  530. return self.inspector.get_indexes(table_name, schema)
  531. def get_pk_constraint(
  532. self, table_name: str, schema: Optional[str] = None
  533. ) -> Dict[str, Any]:
  534. return self.inspector.get_pk_constraint(table_name, schema)
  535. def get_foreign_keys(
  536. self, table_name: str, schema: Optional[str] = None
  537. ) -> List[Dict[str, Any]]:
  538. return self.inspector.get_foreign_keys(table_name, schema)
  539. def get_schema_access_for_csv_upload( # pylint: disable=invalid-name
  540. self,
  541. ) -> List[str]:
  542. allowed_databases = self.get_extra().get("schemas_allowed_for_csv_upload", [])
  543. if hasattr(g, "user"):
  544. extra_allowed_databases = config["ALLOWED_USER_CSV_SCHEMA_FUNC"](
  545. self, g.user
  546. )
  547. allowed_databases += extra_allowed_databases
  548. return sorted(set(allowed_databases))
  549. @property
  550. def sqlalchemy_uri_decrypted(self) -> str:
  551. conn = sqla.engine.url.make_url(self.sqlalchemy_uri)
  552. if custom_password_store:
  553. conn.password = custom_password_store(conn)
  554. else:
  555. conn.password = self.password
  556. return str(conn)
  557. @property
  558. def sql_url(self) -> str:
  559. return f"/superset/sql/{self.id}/"
  560. @hybrid_property
  561. def perm(self) -> str:
  562. return f"[{self.database_name}].(id:{self.id})"
  563. @perm.expression # type: ignore
  564. def perm(cls) -> str: # pylint: disable=no-self-argument
  565. return (
  566. "[" + cls.database_name + "].(id:" + expression.cast(cls.id, String) + ")"
  567. )
  568. def get_perm(self) -> str:
  569. return self.perm # type: ignore
  570. def has_table(self, table: Table) -> bool:
  571. engine = self.get_sqla_engine()
  572. return engine.has_table(table.table_name, table.schema or None)
  573. def has_table_by_name(self, table_name: str, schema: Optional[str] = None) -> bool:
  574. engine = self.get_sqla_engine()
  575. return engine.has_table(table_name, schema)
  576. @utils.memoized
  577. def get_dialect(self) -> Dialect:
  578. sqla_url = url.make_url(self.sqlalchemy_uri_decrypted)
  579. return sqla_url.get_dialect()() # pylint: disable=no-member
  580. sqla.event.listen(Database, "after_insert", security_manager.set_perm)
  581. sqla.event.listen(Database, "after_update", security_manager.set_perm)
  582. class Log(Model): # pylint: disable=too-few-public-methods
  583. """ORM object used to log Superset actions to the database"""
  584. __tablename__ = "logs"
  585. id = Column(Integer, primary_key=True)
  586. action = Column(String(512))
  587. user_id = Column(Integer, ForeignKey("ab_user.id"))
  588. dashboard_id = Column(Integer)
  589. slice_id = Column(Integer)
  590. json = Column(Text)
  591. user = relationship(
  592. security_manager.user_model, backref="logs", foreign_keys=[user_id]
  593. )
  594. dttm = Column(DateTime, default=datetime.utcnow)
  595. duration_ms = Column(Integer)
  596. referrer = Column(String(1024))
  597. class FavStar(Model): # pylint: disable=too-few-public-methods
  598. __tablename__ = "favstar"
  599. id = Column(Integer, primary_key=True)
  600. user_id = Column(Integer, ForeignKey("ab_user.id"))
  601. class_name = Column(String(50))
  602. obj_id = Column(Integer)
  603. dttm = Column(DateTime, default=datetime.utcnow)
  604. # events for updating tags
  605. if is_feature_enabled("TAGGING_SYSTEM"):
  606. sqla.event.listen(Dashboard, "after_insert", DashboardUpdater.after_insert)
  607. sqla.event.listen(Dashboard, "after_update", DashboardUpdater.after_update)
  608. sqla.event.listen(Dashboard, "after_delete", DashboardUpdater.after_delete)
  609. sqla.event.listen(FavStar, "after_insert", FavStarUpdater.after_insert)
  610. sqla.event.listen(FavStar, "after_delete", FavStarUpdater.after_delete)