mirror of https://github.com/apache/superset.git
Revert "Removing uniqueness constraints on tables table" (#6777)
* Revert "creating new circular-json safe stringify and replacing one call (#6772)" This reverts commit11a7ad00b7
. * Revert "Improve Unicode support for MSSQL (#6690)" This reverts commitc44ae612df
. * Revert "Fix uniqueness constraints on tables table (#6718)" This reverts commitc4fb7a0a87
.
This commit is contained in:
parent
817783f466
commit
2631558ac4
|
@ -253,10 +253,7 @@ class SqlaTable(Model, BaseDatasource):
|
|||
owner_class = security_manager.user_model
|
||||
|
||||
__tablename__ = 'tables'
|
||||
__table_args__ = (UniqueConstraint('database_id',
|
||||
'schema',
|
||||
'table_name',
|
||||
name='uq_table_in_db_schema'),)
|
||||
__table_args__ = (UniqueConstraint('database_id', 'table_name'),)
|
||||
|
||||
table_name = Column(String(250))
|
||||
main_dttm_col = Column(String(250))
|
||||
|
|
|
@ -42,7 +42,7 @@ handlers = console
|
|||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = INFO
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
"""make_table_unique_within_db_and_schema
|
||||
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
Revision ID: 8d49a37823bf
|
||||
Revises: 18dc26817ad2
|
||||
Create Date: 2019-01-20 11:44:14.640628
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8d49a37823bf'
|
||||
down_revision = '18dc26817ad2'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from superset.utils.core import generic_find_uq_constraint_name
|
||||
from collections import OrderedDict
|
||||
|
||||
def is_unique_constraint(constraint):
|
||||
return constraint and isinstance(constraint, sa.UniqueConstraint)
|
||||
|
||||
def is_sqlite():
|
||||
bind = op.get_bind()
|
||||
return bind and bind.dialect and bind.dialect.name and bind.dialect.name.startswith('sqlite')
|
||||
|
||||
def upgrade():
|
||||
bind = op.get_bind()
|
||||
insp = sa.engine.reflection.Inspector.from_engine(bind)
|
||||
constraints = insp.get_unique_constraints('tables')
|
||||
table_new_uniq_constraint = ['database_id', 'schema', 'table_name']
|
||||
if not constraints:
|
||||
constraints = []
|
||||
# Sqlite cannot handle constraint change and has to recreate the table
|
||||
if is_sqlite():
|
||||
existing_table = sa.Table(
|
||||
'tables', sa.MetaData(),
|
||||
autoload=True,
|
||||
autoload_with=op.get_bind())
|
||||
existing_table.constraints = set([c for c in existing_table.constraints if not is_unique_constraint(c)])
|
||||
# We don't want to preserve the existing table_args for the tables table
|
||||
with op.batch_alter_table('tables', copy_from=existing_table, recreate="always") as batch_op:
|
||||
batch_op.create_unique_constraint('uq_table_in_db_schema', table_new_uniq_constraint)
|
||||
else:
|
||||
op.create_unique_constraint('uq_table_in_db_schema', 'tables', table_new_uniq_constraint)
|
||||
# and for other databases we need to explicitly remove the earlier constraints
|
||||
# otherwise they don't get removed as with above copy_from approach
|
||||
for c in constraints:
|
||||
name = c.get('name', None)
|
||||
if name:
|
||||
op.drop_constraint(name, 'tables', type_='unique')
|
||||
|
||||
def downgrade():
|
||||
table_name_existing_unique = ['database_id', 'table_name']
|
||||
if is_sqlite():
|
||||
with op.batch_alter_table('tables', recreate="always") as batch_op:
|
||||
batch_op.create_unique_constraint(
|
||||
'uq_tables_table_name',
|
||||
table_name_existing_unique)
|
||||
batch_op.drop_constraint('uq_table_in_db_schema', type_='unique')
|
||||
else:
|
||||
op.create_unique_constraint('uq_tables_table_name', 'tables', table_name_existing_unique)
|
||||
op.drop_constraint('uq_table_in_db_schema', 'tables', type_='unique')
|
|
@ -97,13 +97,10 @@ class ImportMixin(object):
|
|||
|
||||
@classmethod
|
||||
def import_from_dict(cls, session, dict_rep, parent=None,
|
||||
recursive=True, sync=[], respect_id=True):
|
||||
recursive=True, sync=[]):
|
||||
"""Import obj from a dictionary"""
|
||||
parent_refs = cls._parent_foreign_key_mappings()
|
||||
export_fields = set(cls.export_fields) | set(parent_refs.keys())
|
||||
logging.info(f'Doing the import_from_dict for the {cls}, with {dict_rep}, '
|
||||
f'respect_id={respect_id}')
|
||||
given_id = dict_rep.get('id', None) if respect_id else None
|
||||
new_children = {c: dict_rep.get(c) for c in cls.export_children
|
||||
if c in dict_rep}
|
||||
unique_constrains = cls._unique_constrains()
|
||||
|
@ -131,20 +128,14 @@ class ImportMixin(object):
|
|||
for k in parent_refs.keys()])
|
||||
|
||||
# Add filter for unique constraints
|
||||
if unique_constrains:
|
||||
ucs = [and_(*[getattr(cls, k) == dict_rep.get(k)
|
||||
for k in cs if dict_rep.get(k) is not None])
|
||||
for cs in unique_constrains]
|
||||
filters.append(or_(*ucs))
|
||||
elif given_id:
|
||||
logging.info(f'Not given any unique constraint, so adding an id check for'
|
||||
f'{getattr(cls, "id")} equal to {given_id}')
|
||||
filters.append(getattr(cls, 'id') == given_id)
|
||||
ucs = [and_(*[getattr(cls, k) == dict_rep.get(k)
|
||||
for k in cs if dict_rep.get(k) is not None])
|
||||
for cs in unique_constrains]
|
||||
filters.append(or_(*ucs))
|
||||
|
||||
# Check if object already exists in DB, break if more than one is found
|
||||
try:
|
||||
obj_query = session.query(cls).filter(and_(*filters))
|
||||
logging.info(f'Did the query {str(obj_query)} to find existing for {cls}')
|
||||
obj = obj_query.one_or_none()
|
||||
except MultipleResultsFound as e:
|
||||
logging.error('Error importing %s \n %s \n %s', cls.__name__,
|
||||
|
|
Loading…
Reference in New Issue