chore: Migrate /superset/tables/* to API v1 (#22501)

This commit is contained in:
Diego Medina 2023-02-01 09:45:57 -03:00 committed by GitHub
parent ede18be08e
commit 02cd75be8d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 643 additions and 85 deletions

View File

@ -345,7 +345,7 @@
"AnnotationLayerRestApi.get_list": {
"properties": {
"changed_by": {
"$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User"
"$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User1"
},
"changed_on": {
"format": "date-time",
@ -356,7 +356,7 @@
"readOnly": true
},
"created_by": {
"$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User1"
"$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User"
},
"created_on": {
"format": "date-time",
@ -502,13 +502,13 @@
"AnnotationRestApi.get_list": {
"properties": {
"changed_by": {
"$ref": "#/components/schemas/AnnotationRestApi.get_list.User"
"$ref": "#/components/schemas/AnnotationRestApi.get_list.User1"
},
"changed_on_delta_humanized": {
"readOnly": true
},
"created_by": {
"$ref": "#/components/schemas/AnnotationRestApi.get_list.User1"
"$ref": "#/components/schemas/AnnotationRestApi.get_list.User"
},
"end_dttm": {
"format": "date-time",
@ -1768,7 +1768,7 @@
"type": "string"
},
"changed_by": {
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User1"
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User"
},
"changed_by_name": {
"readOnly": true
@ -1783,7 +1783,7 @@
"readOnly": true
},
"created_by": {
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User3"
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User2"
},
"created_on_delta_humanized": {
"readOnly": true
@ -1830,10 +1830,10 @@
"type": "string"
},
"last_saved_by": {
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User"
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User3"
},
"owners": {
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User2"
"$ref": "#/components/schemas/ChartDataRestApi.get_list.User1"
},
"params": {
"nullable": true,
@ -1897,10 +1897,6 @@
"maxLength": 64,
"type": "string"
},
"id": {
"format": "int32",
"type": "integer"
},
"last_name": {
"maxLength": 64,
"type": "string"
@ -1913,23 +1909,6 @@
"type": "object"
},
"ChartDataRestApi.get_list.User1": {
"properties": {
"first_name": {
"maxLength": 64,
"type": "string"
},
"last_name": {
"maxLength": 64,
"type": "string"
}
},
"required": [
"first_name",
"last_name"
],
"type": "object"
},
"ChartDataRestApi.get_list.User2": {
"properties": {
"first_name": {
"maxLength": 64,
@ -1955,6 +1934,27 @@
],
"type": "object"
},
"ChartDataRestApi.get_list.User2": {
"properties": {
"first_name": {
"maxLength": 64,
"type": "string"
},
"id": {
"format": "int32",
"type": "integer"
},
"last_name": {
"maxLength": 64,
"type": "string"
}
},
"required": [
"first_name",
"last_name"
],
"type": "object"
},
"ChartDataRestApi.get_list.User3": {
"properties": {
"first_name": {
@ -2560,7 +2560,7 @@
"type": "string"
},
"changed_by": {
"$ref": "#/components/schemas/ChartRestApi.get_list.User1"
"$ref": "#/components/schemas/ChartRestApi.get_list.User"
},
"changed_by_name": {
"readOnly": true
@ -2575,7 +2575,7 @@
"readOnly": true
},
"created_by": {
"$ref": "#/components/schemas/ChartRestApi.get_list.User3"
"$ref": "#/components/schemas/ChartRestApi.get_list.User2"
},
"created_on_delta_humanized": {
"readOnly": true
@ -2622,10 +2622,10 @@
"type": "string"
},
"last_saved_by": {
"$ref": "#/components/schemas/ChartRestApi.get_list.User"
"$ref": "#/components/schemas/ChartRestApi.get_list.User3"
},
"owners": {
"$ref": "#/components/schemas/ChartRestApi.get_list.User2"
"$ref": "#/components/schemas/ChartRestApi.get_list.User1"
},
"params": {
"nullable": true,
@ -2689,10 +2689,6 @@
"maxLength": 64,
"type": "string"
},
"id": {
"format": "int32",
"type": "integer"
},
"last_name": {
"maxLength": 64,
"type": "string"
@ -2705,23 +2701,6 @@
"type": "object"
},
"ChartRestApi.get_list.User1": {
"properties": {
"first_name": {
"maxLength": 64,
"type": "string"
},
"last_name": {
"maxLength": 64,
"type": "string"
}
},
"required": [
"first_name",
"last_name"
],
"type": "object"
},
"ChartRestApi.get_list.User2": {
"properties": {
"first_name": {
"maxLength": 64,
@ -2747,6 +2726,27 @@
],
"type": "object"
},
"ChartRestApi.get_list.User2": {
"properties": {
"first_name": {
"maxLength": 64,
"type": "string"
},
"id": {
"format": "int32",
"type": "integer"
},
"last_name": {
"maxLength": 64,
"type": "string"
}
},
"required": [
"first_name",
"last_name"
],
"type": "object"
},
"ChartRestApi.get_list.User3": {
"properties": {
"first_name": {
@ -3027,13 +3027,13 @@
"CssTemplateRestApi.get_list": {
"properties": {
"changed_by": {
"$ref": "#/components/schemas/CssTemplateRestApi.get_list.User"
"$ref": "#/components/schemas/CssTemplateRestApi.get_list.User1"
},
"changed_on_delta_humanized": {
"readOnly": true
},
"created_by": {
"$ref": "#/components/schemas/CssTemplateRestApi.get_list.User1"
"$ref": "#/components/schemas/CssTemplateRestApi.get_list.User"
},
"created_on": {
"format": "date-time",
@ -4056,7 +4056,7 @@
"type": "boolean"
},
"allow_run_async": {
"description": "Operate the database in asynchronous mode, meaning that the queries are executed on remote workers as opposed to on the web server itself. This assumes that you have a Celery worker setup as well as a results backend. Refer to the installation docs for more information.",
"description": "Operate the database in asynchronous mode, meaning that the queries are executed on remote workers as opposed to on the web server itself. This assumes that you have a Celery worker setup as well as a results backend. Refer to the installation docs for more information.",
"type": "boolean"
},
"cache_timeout": {
@ -4169,7 +4169,7 @@
"type": "boolean"
},
"allow_run_async": {
"description": "Operate the database in asynchronous mode, meaning that the queries are executed on remote workers as opposed to on the web server itself. This assumes that you have a Celery worker setup as well as a results backend. Refer to the installation docs for more information.",
"description": "Operate the database in asynchronous mode, meaning that the queries are executed on remote workers as opposed to on the web server itself. This assumes that you have a Celery worker setup as well as a results backend. Refer to the installation docs for more information.",
"type": "boolean"
},
"cache_timeout": {
@ -4288,6 +4288,23 @@
},
"type": "object"
},
"DatabaseTablesResponse": {
"properties": {
"extra": {
"description": "Extra data used to specify column metadata",
"type": "object"
},
"type": {
"description": "table or view",
"type": "string"
},
"value": {
"description": "The table or view name",
"type": "string"
}
},
"type": "object"
},
"DatabaseTestConnectionSchema": {
"properties": {
"configuration_method": {
@ -9270,6 +9287,20 @@
},
"type": "object"
},
"database_tables_query_schema": {
"properties": {
"force": {
"type": "boolean"
},
"schema_name": {
"type": "string"
}
},
"required": [
"schema_name"
],
"type": "object"
},
"get_delete_ids_schema": {
"items": {
"type": "integer"
@ -15549,6 +15580,80 @@
]
}
},
"/api/v1/database/{pk}/tables/": {
"get": {
"parameters": [
{
"description": "The database id",
"in": "path",
"name": "pk",
"required": true,
"schema": {
"type": "integer"
}
},
{
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/database_tables_query_schema"
}
}
},
"in": "query",
"name": "q"
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"properties": {
"count": {
"type": "integer"
},
"result": {
"description": "A List of tables for given database",
"items": {
"$ref": "#/components/schemas/DatabaseTablesResponse"
},
"type": "array"
}
},
"type": "object"
}
}
},
"description": "Tables list"
},
"400": {
"$ref": "#/components/responses/400"
},
"401": {
"$ref": "#/components/responses/401"
},
"404": {
"$ref": "#/components/responses/404"
},
"422": {
"$ref": "#/components/responses/422"
},
"500": {
"$ref": "#/components/responses/500"
}
},
"security": [
{
"jwt": []
}
],
"summary": "Get a list of tables for given database",
"tags": [
"Database"
]
}
},
"/api/v1/database/{pk}/validate_sql/": {
"post": {
"description": "Validates arbitrary SQL.",
@ -16686,6 +16791,99 @@
]
}
},
"/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/": {
"get": {
"parameters": [
{
"description": "The type of datasource",
"in": "path",
"name": "datasource_type",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "The id of the datasource",
"in": "path",
"name": "datasource_id",
"required": true,
"schema": {
"type": "integer"
}
},
{
"description": "The name of the column to get values for",
"in": "path",
"name": "column_name",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"properties": {
"result": {
"items": {
"oneOf": [
{
"type": "string"
},
{
"type": "integer"
},
{
"type": "number"
},
{
"type": "boolean"
},
{
"type": "object"
}
]
},
"type": "array"
}
},
"type": "object"
}
}
},
"description": "A List of distinct values for the column"
},
"400": {
"$ref": "#/components/responses/400"
},
"401": {
"$ref": "#/components/responses/401"
},
"403": {
"$ref": "#/components/responses/403"
},
"404": {
"$ref": "#/components/responses/404"
},
"500": {
"$ref": "#/components/responses/500"
}
},
"security": [
{
"jwt": []
}
],
"summary": "Get possible values for a datasource column",
"tags": [
"Datasources"
]
}
},
"/api/v1/embedded_dashboard/{uuid}": {
"get": {
"description": "Get a report schedule log",

View File

@ -81,7 +81,7 @@ describe('SqlLab query panel', () => {
});
it.skip('successfully saves a query', () => {
cy.intercept('superset/tables/**').as('getTables');
cy.intercept('api/v1/database/**/tables/**').as('getTables');
cy.intercept('savedqueryviewapi/**').as('getSavedQuery');
const query =

View File

@ -54,7 +54,7 @@ jest.mock('src/SqlLab/components/SqlEditorLeftBar', () => () => (
const MOCKED_SQL_EDITOR_HEIGHT = 500;
fetchMock.get('glob:*/api/v1/database/*', { result: [] });
fetchMock.get('glob:*/superset/tables/*', { options: [] });
fetchMock.get('glob:*/api/v1/database/*/tables/*', { options: [] });
fetchMock.post('glob:*/sqllab/execute/*', { result: [] });
const middlewares = [thunk];

View File

@ -42,14 +42,14 @@ const mockStore = configureStore(middlewares);
const store = mockStore(initialState);
fetchMock.get('glob:*/api/v1/database/*/schemas/?*', { result: [] });
fetchMock.get('glob:*/superset/tables/**', {
options: [
fetchMock.get('glob:*/api/v1/database/*/tables/*', {
count: 1,
result: [
{
label: 'ab_user',
value: 'ab_user',
},
],
tableLength: 1,
});
const renderAndWait = (props, store) =>

View File

@ -51,7 +51,8 @@ const getSchemaMockFunction = async () =>
const getTableMockFunction = async () =>
({
json: {
options: [
count: 4,
result: [
{ label: 'table_a', value: 'table_a' },
{ label: 'table_b', value: 'table_b' },
{ label: 'table_c', value: 'table_c' },

View File

@ -23,7 +23,8 @@ import { useTables } from './tables';
const fakeApiResult = {
json: {
options: [
count: 2,
result: [
{
id: 1,
name: 'fake api result1',
@ -35,13 +36,13 @@ const fakeApiResult = {
label: 'fake api label2',
},
],
tableLength: 2,
},
};
const fakeHasMoreApiResult = {
json: {
options: [
count: 4,
result: [
{
id: 1,
name: 'fake api result1',
@ -53,17 +54,16 @@ const fakeHasMoreApiResult = {
label: 'fake api label2',
},
],
tableLength: 4,
},
};
const expectedData = {
...fakeApiResult.json,
options: [...fakeApiResult.json.result],
hasMore: false,
};
const expectedHasMoreData = {
...fakeHasMoreApiResult.json,
options: [...fakeHasMoreApiResult.json.result],
hasMore: true,
};
@ -103,7 +103,9 @@ describe('useTables hook', () => {
});
expect(SupersetClient.get).toHaveBeenCalledTimes(1);
expect(SupersetClient.get).toHaveBeenCalledWith({
endpoint: `/superset/tables/${expectDbId}/${expectedSchema}/${forceRefresh}/`,
endpoint: `/api/v1/database/${expectDbId}/tables/?q=(force:!${
forceRefresh ? 't' : 'f'
},schema_name:${expectedSchema})`,
});
expect(result.current.data).toEqual(expectedData);
await act(async () => {
@ -111,7 +113,7 @@ describe('useTables hook', () => {
});
expect(SupersetClient.get).toHaveBeenCalledTimes(2);
expect(SupersetClient.get).toHaveBeenCalledWith({
endpoint: `/superset/tables/${expectDbId}/${expectedSchema}/true/`,
endpoint: `/api/v1/database/${expectDbId}/tables/?q=(force:!t,schema_name:${expectedSchema})`,
});
expect(result.current.data).toEqual(expectedData);
});

View File

@ -18,6 +18,7 @@
*/
import { useRef } from 'react';
import { useQuery, UseQueryOptions } from 'react-query';
import rison from 'rison';
import { SupersetClient } from '@superset-ui/core';
export type FetchTablesQueryParams = {
@ -39,11 +40,15 @@ export interface Table {
}
type QueryData = {
json: { options: Table[]; tableLength: number };
json: {
count: number;
result: Table[];
};
response: Response;
};
export type Data = QueryData['json'] & {
export type Data = {
options: Table[];
hasMore: boolean;
};
@ -53,10 +58,15 @@ export function fetchTables({
forceRefresh,
}: FetchTablesQueryParams) {
const encodedSchema = schema ? encodeURIComponent(schema) : '';
const params = rison.encode({
force: forceRefresh,
schema_name: encodedSchema,
});
// TODO: Would be nice to add pagination in a follow-up. Needs endpoint changes.
const endpoint = `/superset/tables/${
const endpoint = `/api/v1/database/${
dbId ?? 'undefined'
}/${encodedSchema}/${forceRefresh}/`;
}/tables/?q=${params}`;
return SupersetClient.get({ endpoint }) as Promise<QueryData>;
}
@ -72,8 +82,8 @@ export function useTables(options: Params) {
() => fetchTables({ ...params, forceRefresh: forceRefreshRef.current }),
{
select: ({ json }) => ({
...json,
hasMore: json.tableLength > json.options.length,
options: json.result,
hasMore: json.count > json.result.length,
}),
enabled: Boolean(dbId && schema),
onSuccess,

View File

@ -24,7 +24,7 @@ import LeftPanel from 'src/views/CRUD/data/dataset/AddDataset/LeftPanel';
const databasesEndpoint = 'glob:*/api/v1/database/?q*';
const schemasEndpoint = 'glob:*/api/v1/database/*/schemas*';
const tablesEndpoint = 'glob:*/superset/tables*';
const tablesEndpoint = 'glob:*/api/v1/database/*/tables/?q*';
fetchMock.get(databasesEndpoint, {
count: 2,
@ -136,8 +136,8 @@ fetchMock.get(schemasEndpoint, {
});
fetchMock.get(tablesEndpoint, {
tableLength: 3,
options: [
count: 3,
result: [
{ value: 'Sheet1', type: 'table', extra: null },
{ value: 'Sheet2', type: 'table', extra: null },
{ value: 'Sheet3', type: 'table', extra: null },

View File

@ -17,6 +17,7 @@
* under the License.
*/
import React, { useEffect, useState, SetStateAction, Dispatch } from 'react';
import rison from 'rison';
import {
SupersetClient,
t,
@ -177,7 +178,7 @@ export default function LeftPanel({
const getTablesList = (url: string) => {
SupersetClient.get({ url })
.then(({ json }) => {
const options: TableOption[] = json.options.map((table: Table) => {
const options: TableOption[] = json.result.map((table: Table) => {
const option: TableOption = {
value: table.value,
label: <TableOption table={table} />,
@ -213,9 +214,12 @@ export default function LeftPanel({
useEffect(() => {
if (loadTables) {
const endpoint = encodeURI(
`/superset/tables/${dbId}/${encodedSchema}/${refresh}/`,
);
const params = rison.encode({
force: refresh,
schema_name: encodedSchema,
});
const endpoint = `/api/v1/database/${dbId}/tables/?q=${params}`;
getTablesList(endpoint);
}
}, [loadTables]);

View File

@ -118,6 +118,7 @@ MODEL_API_RW_METHOD_PERMISSION_MAP = {
"put": "write",
"related": "read",
"related_objects": "read",
"tables": "read",
"schemas": "read",
"select_star": "read",
"table_metadata": "read",

View File

@ -44,11 +44,13 @@ from superset.databases.commands.exceptions import (
DatabaseDeleteFailedError,
DatabaseInvalidError,
DatabaseNotFoundError,
DatabaseTablesUnexpectedError,
DatabaseUpdateFailedError,
InvalidParametersError,
)
from superset.databases.commands.export import ExportDatabasesCommand
from superset.databases.commands.importers.dispatcher import ImportDatabasesCommand
from superset.databases.commands.tables import TablesDatabaseCommand
from superset.databases.commands.test_connection import TestConnectionDatabaseCommand
from superset.databases.commands.update import UpdateDatabaseCommand
from superset.databases.commands.validate import ValidateDatabaseParametersCommand
@ -58,10 +60,12 @@ from superset.databases.decorators import check_datasource_access
from superset.databases.filters import DatabaseFilter, DatabaseUploadEnabledFilter
from superset.databases.schemas import (
database_schemas_query_schema,
database_tables_query_schema,
DatabaseFunctionNamesResponse,
DatabasePostSchema,
DatabasePutSchema,
DatabaseRelatedObjectsResponse,
DatabaseTablesResponse,
DatabaseTestConnectionSchema,
DatabaseValidateParametersSchema,
get_export_ids_schema,
@ -104,6 +108,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
RouteMethod.EXPORT,
RouteMethod.IMPORT,
"tables",
"table_metadata",
"table_extra_metadata",
"select_star",
@ -210,6 +215,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
apispec_parameter_schemas = {
"database_schemas_query_schema": database_schemas_query_schema,
"database_tables_query_schema": database_tables_query_schema,
"get_export_ids_schema": get_export_ids_schema,
}
@ -217,6 +223,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
openapi_spec_component_schemas = (
DatabaseFunctionNamesResponse,
DatabaseRelatedObjectsResponse,
DatabaseTablesResponse,
DatabaseTestConnectionSchema,
DatabaseValidateParametersSchema,
TableExtraMetadataResponseSchema,
@ -555,6 +562,73 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
except SupersetException as ex:
return self.response(ex.status, message=ex.message)
@expose("/<int:pk>/tables/")
@protect()
@safe
@rison(database_tables_query_schema)
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" f".tables",
log_to_statsd=False,
)
def tables(self, pk: int, **kwargs: Any) -> FlaskResponse:
"""Get a list of tables for given database
---
get:
summary: Get a list of tables for given database
parameters:
- in: path
schema:
type: integer
name: pk
description: The database id
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/database_tables_query_schema'
responses:
200:
description: Tables list
content:
application/json:
schema:
type: object
properties:
count:
type: integer
result:
description: >-
A List of tables for given database
type: array
items:
$ref: '#/components/schemas/DatabaseTablesResponse'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
force = kwargs["rison"].get("force", False)
schema_name = kwargs["rison"].get("schema_name", "")
try:
command = TablesDatabaseCommand(pk, schema_name, force)
payload = command.run()
return self.response(200, **payload)
except DatabaseNotFoundError:
return self.response_404()
except SupersetException as ex:
return self.response(ex.status, message=ex.message)
except DatabaseTablesUnexpectedError as ex:
return self.response_422(ex.message)
@expose("/<int:pk>/table/<table_name>/<schema_name>/", methods=["GET"])
@protect()
@check_datasource_access

View File

@ -137,6 +137,11 @@ class DatabaseTestConnectionUnexpectedError(SupersetErrorsException):
message = _("Unexpected error occurred, please check your logs for details")
class DatabaseTablesUnexpectedError(Exception):
status = 422
message = _("Unexpected error occurred, please check your logs for details")
class NoValidatorConfigFoundError(SupersetErrorException):
status = 422
message = _("no SQL validator is configured")

View File

@ -0,0 +1,113 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, cast, Dict
from superset.commands.base import BaseCommand
from superset.connectors.sqla.models import SqlaTable
from superset.databases.commands.exceptions import (
DatabaseNotFoundError,
DatabaseTablesUnexpectedError,
)
from superset.databases.dao import DatabaseDAO
from superset.exceptions import SupersetException
from superset.extensions import db, security_manager
from superset.models.core import Database
from superset.utils.core import DatasourceName
logger = logging.getLogger(__name__)
class TablesDatabaseCommand(BaseCommand):
_model: Database
def __init__(self, db_id: int, schema_name: str, force: bool):
self._db_id = db_id
self._schema_name = schema_name
self._force = force
def run(self) -> Dict[str, Any]:
self.validate()
try:
tables = security_manager.get_datasources_accessible_by_user(
database=self._model,
schema=self._schema_name,
datasource_names=sorted(
DatasourceName(*datasource_name)
for datasource_name in self._model.get_all_table_names_in_schema(
schema=self._schema_name,
force=self._force,
cache=self._model.table_cache_enabled,
cache_timeout=self._model.table_cache_timeout,
)
),
)
views = security_manager.get_datasources_accessible_by_user(
database=self._model,
schema=self._schema_name,
datasource_names=sorted(
DatasourceName(*datasource_name)
for datasource_name in self._model.get_all_view_names_in_schema(
schema=self._schema_name,
force=self._force,
cache=self._model.table_cache_enabled,
cache_timeout=self._model.table_cache_timeout,
)
),
)
extra_dict_by_name = {
table.name: table.extra_dict
for table in (
db.session.query(SqlaTable).filter(
SqlaTable.database_id == self._model.id,
SqlaTable.schema == self._schema_name,
)
).all()
}
options = sorted(
[
{
"value": table.table,
"type": "table",
"extra": extra_dict_by_name.get(table.table, None),
}
for table in tables
]
+ [
{
"value": view.table,
"type": "view",
}
for view in views
],
key=lambda item: item["value"],
)
payload = {"count": len(tables) + len(views), "result": options}
return payload
except SupersetException as ex:
raise ex
except Exception as ex:
raise DatabaseTablesUnexpectedError(ex) from ex
def validate(self) -> None:
self._model = cast(Database, DatabaseDAO.find_by_id(self._db_id))
if not self._model:
raise DatabaseNotFoundError()

View File

@ -43,6 +43,15 @@ database_schemas_query_schema = {
"properties": {"force": {"type": "boolean"}},
}
database_tables_query_schema = {
"type": "object",
"properties": {
"force": {"type": "boolean"},
"schema_name": {"type": "string"},
},
"required": ["schema_name"],
}
database_name_description = "A database name to identify this connection."
port_description = "Port number for the database connection."
cache_timeout_description = (
@ -573,6 +582,12 @@ class SchemasResponseSchema(Schema):
result = fields.List(fields.String(description="A database schema name"))
class DatabaseTablesResponse(Schema):
extra = fields.Dict(description="Extra data used to specify column metadata")
type = fields.String(description="table or view")
value = fields.String(description="The table or view name")
class ValidateSQLRequest(Schema):
sql = fields.String(required=True, description="SQL statement to validate")
schema = fields.String(required=False, allow_none=True)

View File

@ -1143,6 +1143,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
@event_logger.log_this
@expose("/tables/<int:db_id>/<schema>/")
@expose("/tables/<int:db_id>/<schema>/<force_refresh>/")
@deprecated()
def tables( # pylint: disable=no-self-use
self,
db_id: int,

View File

@ -1782,6 +1782,66 @@ class TestDatabaseApi(SupersetTestCase):
)
self.assertEqual(rv.status_code, 400)
def test_database_tables(self):
"""
Database API: Test database tables
"""
self.login(username="admin")
database = db.session.query(Database).filter_by(database_name="examples").one()
schema_name = self.default_schema_backend_map[database.backend]
rv = self.client.get(
f"api/v1/database/{database.id}/tables/?q={prison.dumps({'schema_name': schema_name})}"
)
self.assertEqual(rv.status_code, 200)
if database.backend == "postgresql":
response = json.loads(rv.data.decode("utf-8"))
schemas = [
s[0] for s in database.get_all_table_names_in_schema(schema_name)
]
self.assertEquals(response["count"], len(schemas))
for option in response["result"]:
self.assertEquals(option["extra"], None)
self.assertEquals(option["type"], "table")
self.assertTrue(option["value"] in schemas)
def test_database_tables_not_found(self):
"""
Database API: Test database tables not found
"""
self.logout()
self.login(username="gamma")
example_db = get_example_database()
uri = f"api/v1/database/{example_db.id}/tables/?q={prison.dumps({'schema_name': 'non_existent'})}"
rv = self.client.get(uri)
self.assertEqual(rv.status_code, 404)
def test_database_tables_invalid_query(self):
"""
Database API: Test database tables with invalid query
"""
self.login("admin")
database = db.session.query(Database).first()
rv = self.client.get(
f"api/v1/database/{database.id}/tables/?q={prison.dumps({'force': 'nop'})}"
)
self.assertEqual(rv.status_code, 400)
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
def test_database_tables_unexpected_error(self, mock_can_access_database):
"""
Database API: Test database tables with unexpected error
"""
self.login(username="admin")
database = db.session.query(Database).filter_by(database_name="examples").one()
mock_can_access_database.side_effect = Exception("Test Error")
rv = self.client.get(
f"api/v1/database/{database.id}/tables/?q={prison.dumps({'schema_name': 'main'})}"
)
self.assertEqual(rv.status_code, 422)
def test_test_connection(self):
"""
Database API: Test test connection

View File

@ -31,17 +31,20 @@ from superset.databases.commands.exceptions import (
DatabaseInvalidError,
DatabaseNotFoundError,
DatabaseSecurityUnsafeError,
DatabaseTablesUnexpectedError,
DatabaseTestConnectionDriverError,
DatabaseTestConnectionUnexpectedError,
)
from superset.databases.commands.export import ExportDatabasesCommand
from superset.databases.commands.importers.v1 import ImportDatabasesCommand
from superset.databases.commands.tables import TablesDatabaseCommand
from superset.databases.commands.test_connection import TestConnectionDatabaseCommand
from superset.databases.commands.validate import ValidateDatabaseParametersCommand
from superset.databases.schemas import DatabaseTestConnectionSchema
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
SupersetErrorsException,
SupersetException,
SupersetSecurityException,
SupersetTimeoutException,
)
@ -886,3 +889,74 @@ def test_validate_partial_invalid_hostname(is_hostname_valid, app_context):
},
),
]
class TestTablesDatabaseCommand(SupersetTestCase):
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
def test_database_tables_list_with_unknown_database(self, mock_find_by_id):
mock_find_by_id.return_value = None
command = TablesDatabaseCommand(1, "test", False)
with pytest.raises(DatabaseNotFoundError) as excinfo:
command.run()
assert str(excinfo.value) == ("Database not found.")
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
@mock.patch("superset.utils.core.g")
def test_database_tables_superset_exception(
self, mock_g, mock_can_access_database, mock_find_by_id
):
database = get_example_database()
if database.backend == "mysql":
return
mock_find_by_id.return_value = database
mock_can_access_database.side_effect = SupersetException("Test Error")
mock_g.user = security_manager.find_user("admin")
command = TablesDatabaseCommand(database.id, "main", False)
with pytest.raises(SupersetException) as excinfo:
command.run()
assert str(excinfo.value) == "Test Error"
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
@mock.patch("superset.utils.core.g")
def test_database_tables_exception(
self, mock_g, mock_can_access_database, mock_find_by_id
):
database = get_example_database()
mock_find_by_id.return_value = database
mock_can_access_database.side_effect = Exception("Test Error")
mock_g.user = security_manager.find_user("admin")
command = TablesDatabaseCommand(database.id, "main", False)
with pytest.raises(DatabaseTablesUnexpectedError) as excinfo:
command.run()
assert (
str(excinfo.value)
== "Unexpected error occurred, please check your logs for details"
)
@mock.patch("superset.databases.dao.DatabaseDAO.find_by_id")
@mock.patch("superset.security.manager.SupersetSecurityManager.can_access_database")
@mock.patch("superset.utils.core.g")
def test_database_tables_list_tables(
self, mock_g, mock_can_access_database, mock_find_by_id
):
database = get_example_database()
mock_find_by_id.return_value = database
mock_can_access_database.return_value = True
mock_g.user = security_manager.find_user("admin")
schema_name = self.default_schema_backend_map[database.backend]
if database.backend == "postgresql" or database.backend == "mysql":
return
command = TablesDatabaseCommand(database.id, schema_name, False)
result = command.run()
assert result["count"] > 0
assert len(result["result"]) > 0
assert len(result["result"]) == result["count"]