Fixed cognitive complexity issues reported by SonarQube.
parent
505a3ac960
commit
1fbf298d30
|
@ -157,6 +157,31 @@ def get_parent(conn, tid, template_path=None):
|
|||
return schema, table
|
||||
|
||||
|
||||
def _get_sql_for_delete_fk_constraint(data, constraint, sql, template_path,
|
||||
conn):
|
||||
"""
|
||||
Get sql for delete foreign key constraints.
|
||||
:param data:
|
||||
:param constraint:
|
||||
:param sql: sql for append
|
||||
:param template_path: template path for sql.
|
||||
:param conn:
|
||||
:return:
|
||||
"""
|
||||
if 'deleted' in constraint:
|
||||
for c in constraint['deleted']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql for drop
|
||||
sql.append(
|
||||
render_template("/".join(
|
||||
[template_path,
|
||||
'delete.sql']),
|
||||
data=c, conn=conn).strip('\n')
|
||||
)
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_foreign_key_sql(conn, tid, data, template_path=None):
|
||||
"""
|
||||
|
@ -174,18 +199,8 @@ def get_foreign_key_sql(conn, tid, data, template_path=None):
|
|||
if 'foreign_key' in data:
|
||||
constraint = data['foreign_key']
|
||||
# If constraint(s) is/are deleted
|
||||
if 'deleted' in constraint:
|
||||
for c in constraint['deleted']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql for drop
|
||||
sql.append(
|
||||
render_template("/".join(
|
||||
[template_path,
|
||||
'delete.sql']),
|
||||
data=c, conn=conn).strip('\n')
|
||||
)
|
||||
_get_sql_for_delete_fk_constraint(data, constraint, sql, template_path,
|
||||
conn)
|
||||
|
||||
if 'changed' in constraint:
|
||||
for c in constraint['changed']:
|
||||
|
@ -221,21 +236,9 @@ def get_sql(conn, data, tid, fkid=None, template_path=None):
|
|||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if fkid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, cid=fkid)
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the foreign key constraint in the table.'))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
old_data, name = _get_properties_for_fk_const(tid, fkid, data,
|
||||
template_path, conn)
|
||||
|
||||
sql = render_template("/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
|
@ -265,40 +268,92 @@ def get_sql(conn, data, tid, fkid=None, template_path=None):
|
|||
"/".join([template_path, 'create_index.sql']),
|
||||
data=data, conn=conn)
|
||||
else:
|
||||
if 'columns' not in data or \
|
||||
(isinstance(data['columns'], list) and
|
||||
len(data['columns']) < 1):
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
if data['autoindex'] and \
|
||||
('coveringindex' not in data or data['coveringindex'] == ''):
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
# Get the parent schema and table.
|
||||
schema, table = get_parent(conn,
|
||||
data['columns'][0]['references'])
|
||||
|
||||
# Below handling will be used in Schema diff in case
|
||||
# of different database comparison
|
||||
|
||||
if schema and table:
|
||||
data['remote_schema'] = schema
|
||||
data['remote_table'] = table
|
||||
|
||||
if 'remote_schema' not in data:
|
||||
data['remote_schema'] = None
|
||||
elif 'schema' in data and (schema is None or schema == ''):
|
||||
data['remote_schema'] = data['schema']
|
||||
|
||||
if 'remote_table' not in data:
|
||||
data['remote_table'] = None
|
||||
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
if data['autoindex']:
|
||||
sql += render_template(
|
||||
"/".join([template_path, 'create_index.sql']),
|
||||
data=data, conn=conn)
|
||||
is_error, errmsg, name, sql = _get_sql_for_create_fk_const(
|
||||
data, conn, template_path)
|
||||
if is_error:
|
||||
return _(errmsg), name
|
||||
|
||||
return sql, name
|
||||
|
||||
|
||||
def _get_properties_for_fk_const(tid, fkid, data, template_path, conn):
|
||||
"""
|
||||
Get property data for fk constraint.
|
||||
tid: table Id
|
||||
fkid: Foreign key constraint ID.
|
||||
data: Data.
|
||||
template_path: template path for get sql.
|
||||
conn: Connection.
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, cid=fkid)
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the foreign key constraint in the table.'))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
return old_data, name
|
||||
|
||||
|
||||
def _get_sql_for_create_fk_const(data, conn, template_path):
|
||||
"""
|
||||
Get SQL for create new foreign key constrains.
|
||||
data: Data.
|
||||
conn: Connection
|
||||
template_path: template path for get template.
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if 'columns' not in data or \
|
||||
(isinstance(data['columns'], list) and
|
||||
len(data['columns']) < 1):
|
||||
return True, '-- definition incomplete', name, ''
|
||||
|
||||
if data['autoindex'] and \
|
||||
('coveringindex' not in data or data['coveringindex'] == ''):
|
||||
return True, '-- definition incomplete', name, ''
|
||||
|
||||
# Get the parent schema and table.
|
||||
schema, table = get_parent(conn,
|
||||
data['columns'][0]['references'])
|
||||
|
||||
# Below handling will be used in Schema diff in case
|
||||
# of different database comparison
|
||||
_checks_for_schema_diff(table, schema, data)
|
||||
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
if data['autoindex']:
|
||||
sql += render_template(
|
||||
"/".join([template_path, 'create_index.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
return False, '', '', sql
|
||||
|
||||
|
||||
def _checks_for_schema_diff(table, schema, data):
|
||||
"""
|
||||
Check for schema diff in case of different database comparisons.
|
||||
table: table data
|
||||
schema: schema data
|
||||
data:Data
|
||||
"""
|
||||
if schema and table:
|
||||
data['remote_schema'] = schema
|
||||
data['remote_table'] = table
|
||||
|
||||
if 'remote_schema' not in data:
|
||||
data['remote_schema'] = None
|
||||
elif 'schema' in data and (schema is None or schema == ''):
|
||||
data['remote_schema'] = data['schema']
|
||||
|
||||
if 'remote_table' not in data:
|
||||
data['remote_table'] = None
|
||||
|
|
|
@ -536,13 +536,13 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
|
|||
drop_sql = kwargs.get('drop_sql', False)
|
||||
|
||||
if drop_sql:
|
||||
SQL = self.delete(gid=gid, sid=sid, did=did,
|
||||
sql = self.delete(gid=gid, sid=sid, did=did,
|
||||
scid=scid, tid=tid,
|
||||
rid=oid, only_sql=True)
|
||||
else:
|
||||
SQL = render_template("/".join(
|
||||
sql = render_template("/".join(
|
||||
[self.template_path, self._PROPERTIES_SQL]), rid=oid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
|
@ -551,7 +551,7 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
|
|||
)
|
||||
res_data = parse_rule_definition(res)
|
||||
|
||||
SQL = ''
|
||||
sql = ''
|
||||
|
||||
if data:
|
||||
if source_schema and 'statements' in data:
|
||||
|
@ -559,24 +559,34 @@ class RuleView(PGChildNodeView, SchemaDiffObjectCompare):
|
|||
data['statements'] = data['statements'].replace(
|
||||
source_schema, diff_schema)
|
||||
old_data = res_data
|
||||
SQL = render_template(
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, self._UPDATE_SQL]),
|
||||
data=data, o_data=old_data
|
||||
)
|
||||
else:
|
||||
if diff_schema:
|
||||
if 'statements' in res_data:
|
||||
# Replace the source schema with the target schema
|
||||
res_data['statements'] = \
|
||||
res_data['statements'].replace(
|
||||
res_data['schema'], diff_schema)
|
||||
res_data['schema'] = diff_schema
|
||||
RuleView._check_schema_diff(diff_schema, res_data)
|
||||
|
||||
SQL = render_template("/".join(
|
||||
sql = render_template("/".join(
|
||||
[self.template_path, self._CREATE_SQL]),
|
||||
data=res_data, display_comments=True)
|
||||
|
||||
return SQL
|
||||
return sql
|
||||
|
||||
@staticmethod
|
||||
def _check_schema_diff(diff_schema, res_data):
|
||||
"""
|
||||
Check for schema diff, if yes then replace source schema with target
|
||||
schema.
|
||||
diff_schema: schema diff schema
|
||||
res_data: response from properties sql.
|
||||
"""
|
||||
if diff_schema:
|
||||
if 'statements' in res_data:
|
||||
# Replace the source schema with the target schema
|
||||
res_data['statements'] = \
|
||||
res_data['statements'].replace(
|
||||
res_data['schema'], diff_schema)
|
||||
res_data['schema'] = diff_schema
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid, rid):
|
||||
|
|
|
@ -616,8 +616,7 @@ class TriggerView(PGChildNodeView, SchemaDiffObjectCompare):
|
|||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if not res['rows']:
|
||||
elif not res['rows']:
|
||||
return make_json_response(
|
||||
success=0,
|
||||
errormsg=gettext(
|
||||
|
|
|
@ -165,8 +165,7 @@ def get_sql(conn, **kwargs):
|
|||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
elif len(res['rows']) == 0:
|
||||
raise ObjectGone(_('Could not find the trigger in the table.'))
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
|
@ -175,47 +174,21 @@ def get_sql(conn, **kwargs):
|
|||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
drop_sql = ''
|
||||
if is_schema_diff:
|
||||
if 'table' not in data:
|
||||
data['table'] = old_data['relname']
|
||||
if 'schema' not in data:
|
||||
data['schema'] = old_data['nspname']
|
||||
|
||||
# If any of the below key is present in data then we need to drop
|
||||
# trigger and re-create it.
|
||||
key_array = ['prosrc', 'is_row_trigger', 'evnt_insert',
|
||||
'evnt_delete', 'evnt_update', 'fires', 'tgdeferrable',
|
||||
'whenclause', 'tfunction', 'tgargs', 'columns',
|
||||
'is_constraint_trigger', 'tginitdeferred']
|
||||
|
||||
is_drop_trigger = False
|
||||
for key in key_array:
|
||||
if key in data:
|
||||
is_drop_trigger = True
|
||||
break
|
||||
|
||||
if is_drop_trigger:
|
||||
tmp_data = dict()
|
||||
tmp_data['name'] = data['name']
|
||||
tmp_data['nspname'] = old_data['nspname']
|
||||
tmp_data['relname'] = old_data['relname']
|
||||
drop_sql = render_template("/".join([template_path,
|
||||
'delete.sql']),
|
||||
data=tmp_data, conn=conn)
|
||||
drop_sql = _check_schema_diff_sql(is_schema_diff, data, old_data,
|
||||
template_path, conn)
|
||||
|
||||
old_data = get_trigger_function_and_columns(
|
||||
conn, old_data, tid, show_system_objects)
|
||||
|
||||
old_data = trigger_definition(old_data)
|
||||
|
||||
SQL = render_template(
|
||||
sql = render_template(
|
||||
"/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=conn
|
||||
)
|
||||
|
||||
if is_schema_diff:
|
||||
SQL = drop_sql + '\n' + SQL
|
||||
sql = drop_sql + '\n' + sql
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name',
|
||||
|
@ -227,9 +200,51 @@ def get_sql(conn, **kwargs):
|
|||
return _('-- definition incomplete')
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template("/".join([template_path, 'create.sql']),
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
return SQL, name
|
||||
return sql, name
|
||||
|
||||
|
||||
def _check_schema_diff_sql(is_schema_diff, data, old_data, template_path,
|
||||
conn):
|
||||
"""
|
||||
Check for schema diff and perform required actions.
|
||||
is_schema_diff: flag for check req for schema diff.
|
||||
data: Data.
|
||||
old_data: properties sql data.
|
||||
template_path: template path for get correct template location.
|
||||
conn: Connection.
|
||||
return: return deleted sql statement if any.
|
||||
"""
|
||||
drop_sql = ''
|
||||
if is_schema_diff:
|
||||
if 'table' not in data:
|
||||
data['table'] = old_data['relname']
|
||||
if 'schema' not in data:
|
||||
data['schema'] = old_data['nspname']
|
||||
|
||||
# If any of the below key is present in data then we need to drop
|
||||
# trigger and re-create it.
|
||||
key_array = ['prosrc', 'is_row_trigger', 'evnt_insert',
|
||||
'evnt_delete', 'evnt_update', 'fires', 'tgdeferrable',
|
||||
'whenclause', 'tfunction', 'tgargs', 'columns',
|
||||
'is_constraint_trigger', 'tginitdeferred']
|
||||
|
||||
is_drop_trigger = False
|
||||
for key in key_array:
|
||||
if key in data:
|
||||
is_drop_trigger = True
|
||||
break
|
||||
|
||||
if is_drop_trigger:
|
||||
tmp_data = dict()
|
||||
tmp_data['name'] = data['name']
|
||||
tmp_data['nspname'] = old_data['nspname']
|
||||
tmp_data['relname'] = old_data['relname']
|
||||
drop_sql = render_template("/".join([template_path,
|
||||
'delete.sql']),
|
||||
data=tmp_data, conn=conn)
|
||||
return drop_sql
|
||||
|
||||
|
||||
@get_template_path
|
||||
|
|
|
@ -743,17 +743,46 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
status=200
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_privilege_data(acls, data):
|
||||
"""
|
||||
Check and parse privilege data.
|
||||
acls: allowed privileges
|
||||
data: data on which we check for having privilege or not.
|
||||
"""
|
||||
for aclcol in acls:
|
||||
if aclcol in data:
|
||||
allowedacl = acls[aclcol]
|
||||
|
||||
for key in ['added', 'changed', 'deleted']:
|
||||
if key in data[aclcol]:
|
||||
data[aclcol][key] = parse_priv_to_db(
|
||||
data[aclcol][key], allowedacl['acl']
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_info_from_data(data, res):
|
||||
"""
|
||||
Get name and schema data
|
||||
data: sql data.
|
||||
res: properties sql response.
|
||||
"""
|
||||
if 'name' not in data:
|
||||
data['name'] = res['rows'][0]['name']
|
||||
if 'schema' not in data:
|
||||
data['schema'] = res['rows'][0]['schema']
|
||||
|
||||
def getSQL(self, gid, sid, did, data, vid=None):
|
||||
"""
|
||||
This function will generate sql from model data
|
||||
"""
|
||||
if vid is not None:
|
||||
SQL = render_template("/".join(
|
||||
sql = render_template("/".join(
|
||||
[self.template_path, 'sql/properties.sql']),
|
||||
vid=vid,
|
||||
datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return None, internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
|
@ -762,10 +791,7 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
)
|
||||
old_data = res['rows'][0]
|
||||
|
||||
if 'name' not in data:
|
||||
data['name'] = res['rows'][0]['name']
|
||||
if 'schema' not in data:
|
||||
data['schema'] = res['rows'][0]['schema']
|
||||
ViewNode._get_info_from_data(data, res)
|
||||
|
||||
try:
|
||||
acls = render_template(
|
||||
|
@ -776,59 +802,18 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
current_app.logger.exception(e)
|
||||
|
||||
# Privileges
|
||||
for aclcol in acls:
|
||||
if aclcol in data:
|
||||
allowedacl = acls[aclcol]
|
||||
ViewNode._parse_privilege_data(acls, data)
|
||||
|
||||
for key in ['added', 'changed', 'deleted']:
|
||||
if key in data[aclcol]:
|
||||
data[aclcol][key] = parse_priv_to_db(
|
||||
data[aclcol][key], allowedacl['acl']
|
||||
)
|
||||
data['del_sql'] = False
|
||||
old_data['acl_sql'] = ''
|
||||
|
||||
if 'definition' in data and self.manager.server_type == 'pg':
|
||||
new_def = re.sub(r"\W", "", data['definition']).split('FROM')
|
||||
old_def = re.sub(r"\W", "", res['rows'][0]['definition']
|
||||
).split('FROM')
|
||||
if 'definition' in data and (
|
||||
len(old_def) > 1 or len(new_def) > 1
|
||||
) and (
|
||||
old_def[0] != new_def[0] and
|
||||
old_def[0] not in new_def[0]
|
||||
):
|
||||
data['del_sql'] = True
|
||||
|
||||
# If we drop and recreate the view, the
|
||||
# privileges must be restored
|
||||
|
||||
# Fetch all privileges for view
|
||||
sql_acl = render_template("/".join(
|
||||
[self.template_path, 'sql/acl.sql']), vid=vid)
|
||||
status, dataclres = self.conn.execute_dict(sql_acl)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
for row in dataclres['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
res['rows'][0].setdefault(row['deftype'], []
|
||||
).append(priv)
|
||||
|
||||
old_data.update(res['rows'][0])
|
||||
|
||||
# Privileges
|
||||
for aclcol in acls:
|
||||
if aclcol in old_data:
|
||||
allowedacl = acls[aclcol]
|
||||
old_data[aclcol] = parse_priv_to_db(
|
||||
old_data[aclcol], allowedacl['acl'])
|
||||
|
||||
old_data['acl_sql'] = render_template("/".join(
|
||||
[self.template_path, 'sql/grant.sql']), data=old_data)
|
||||
is_error, errmsg = self._get_definition_data(vid, data, old_data,
|
||||
res, acls)
|
||||
if is_error:
|
||||
return None, errmsg
|
||||
|
||||
try:
|
||||
SQL = render_template("/".join(
|
||||
sql = render_template("/".join(
|
||||
[self.template_path, 'sql/update.sql']), data=data,
|
||||
o_data=old_data, conn=self.conn)
|
||||
|
||||
|
@ -836,47 +821,126 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
current_app.logger.exception(e)
|
||||
return None, internal_server_error(errormsg=str(e))
|
||||
else:
|
||||
required_args = [
|
||||
'name',
|
||||
'schema',
|
||||
'definition'
|
||||
]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return None, make_json_response(
|
||||
data=gettext(" -- definition incomplete"),
|
||||
status=200
|
||||
)
|
||||
is_error, errmsg, sql = self._get_create_view_sql(data)
|
||||
if is_error:
|
||||
return None, errmsg
|
||||
|
||||
# Get Schema Name from its OID.
|
||||
if 'schema' in data and isinstance(data['schema'], int):
|
||||
data['schema'] = self._get_schema(data['schema'])
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
acls = []
|
||||
try:
|
||||
acls = render_template(
|
||||
"/".join([self.template_path, 'sql/allowed_privs.json'])
|
||||
)
|
||||
acls = json.loads(acls, encoding='utf-8')
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
def _get_create_view_sql(self, data):
|
||||
"""
|
||||
Get create view sql with it's privileges.
|
||||
data: Source data for sql generation
|
||||
return: created sql for create view.
|
||||
"""
|
||||
required_args = [
|
||||
'name',
|
||||
'schema',
|
||||
'definition'
|
||||
]
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return True, make_json_response(
|
||||
data=gettext(" -- definition incomplete"),
|
||||
status=200
|
||||
), ''
|
||||
|
||||
# Privileges
|
||||
for aclcol in acls:
|
||||
if aclcol in data:
|
||||
allowedacl = acls[aclcol]
|
||||
data[aclcol] = parse_priv_to_db(
|
||||
data[aclcol], allowedacl['acl']
|
||||
)
|
||||
# Get Schema Name from its OID.
|
||||
if 'schema' in data and isinstance(data['schema'], int):
|
||||
data['schema'] = self._get_schema(data['schema'])
|
||||
|
||||
SQL = render_template("/".join(
|
||||
[self.template_path, 'sql/create.sql']), data=data)
|
||||
if data['definition']:
|
||||
SQL += "\n"
|
||||
SQL += render_template("/".join(
|
||||
[self.template_path, 'sql/grant.sql']), data=data)
|
||||
acls = []
|
||||
try:
|
||||
acls = render_template(
|
||||
"/".join([self.template_path, 'sql/allowed_privs.json'])
|
||||
)
|
||||
acls = json.loads(acls, encoding='utf-8')
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
# Privileges
|
||||
ViewNode._parse_priv_data(acls, data)
|
||||
|
||||
sql = render_template("/".join(
|
||||
[self.template_path, 'sql/create.sql']), data=data)
|
||||
if data['definition']:
|
||||
sql += "\n"
|
||||
sql += render_template("/".join(
|
||||
[self.template_path, 'sql/grant.sql']), data=data)
|
||||
|
||||
return False, '', sql
|
||||
|
||||
def _get_definition_data(self, vid, data, old_data, res, acls):
|
||||
"""
|
||||
Check and process definition data.
|
||||
vid: View Id.
|
||||
data: sql data.
|
||||
old_data: properties sql data.
|
||||
res: Response data from properties sql.
|
||||
acls: allowed privileges.
|
||||
|
||||
return: If any error it will return True with error msg,
|
||||
if not retun False with error msg empty('')
|
||||
"""
|
||||
if 'definition' in data and self.manager.server_type == 'pg':
|
||||
new_def = re.sub(r"\W", "", data['definition']).split('FROM')
|
||||
old_def = re.sub(r"\W", "", res['rows'][0]['definition']
|
||||
).split('FROM')
|
||||
if 'definition' in data and (
|
||||
len(old_def) > 1 or len(new_def) > 1
|
||||
) and (
|
||||
old_def[0] != new_def[0] and
|
||||
old_def[0] not in new_def[0]
|
||||
):
|
||||
data['del_sql'] = True
|
||||
|
||||
# If we drop and recreate the view, the
|
||||
# privileges must be restored
|
||||
|
||||
# Fetch all privileges for view
|
||||
is_error, errmsg = self._fetch_all_view_priv(vid, res)
|
||||
if is_error:
|
||||
return True, errmsg
|
||||
|
||||
old_data.update(res['rows'][0])
|
||||
|
||||
# Privileges
|
||||
ViewNode._parse_priv_data(acls, old_data)
|
||||
|
||||
old_data['acl_sql'] = render_template("/".join(
|
||||
[self.template_path, 'sql/grant.sql']), data=old_data)
|
||||
return False, ''
|
||||
|
||||
def _fetch_all_view_priv(self, vid, res):
|
||||
"""
|
||||
This is for fetch all privileges for the view.
|
||||
vid: View ID
|
||||
res: response data from property sql
|
||||
"""
|
||||
sql_acl = render_template("/".join(
|
||||
[self.template_path, 'sql/acl.sql']), vid=vid)
|
||||
status, dataclres = self.conn.execute_dict(sql_acl)
|
||||
if not status:
|
||||
return True, internal_server_error(errormsg=res)
|
||||
|
||||
for row in dataclres['rows']:
|
||||
priv = parse_priv_from_db(row)
|
||||
res['rows'][0].setdefault(row['deftype'], []
|
||||
).append(priv)
|
||||
return False, ''
|
||||
|
||||
@staticmethod
|
||||
def _parse_priv_data(acls, data):
|
||||
"""
|
||||
Iterate privilege data and send it for parsing before send it to db.
|
||||
acls: allowed privileges
|
||||
data: data on which we check for privilege check.
|
||||
"""
|
||||
for aclcol in acls:
|
||||
if aclcol in data:
|
||||
allowedacl = acls[aclcol]
|
||||
data[aclcol] = parse_priv_to_db(
|
||||
data[aclcol], allowedacl['acl'])
|
||||
|
||||
def get_index_column_details(self, idx, data):
|
||||
"""
|
||||
|
@ -993,6 +1057,72 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
sql_data += SQL
|
||||
return sql_data
|
||||
|
||||
def _generate_and_return_trigger_sql(self, vid, data, display_comments,
|
||||
sql_data):
|
||||
"""
|
||||
Iterate trigger data and generate sql for different tabs of trigger.
|
||||
vid: View ID
|
||||
data: Trigger data for iteration.
|
||||
display_comments: comments for sql
|
||||
sql_data: Sql queries
|
||||
return: Check if any error then return error, else return sql data.
|
||||
"""
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import trigger_definition
|
||||
|
||||
for trigger in data['rows']:
|
||||
SQL = render_template("/".join(
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/properties.sql'.format(
|
||||
self.manager.server_type, self.manager.version)]),
|
||||
tid=vid,
|
||||
trid=trigger['oid']
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
continue
|
||||
res_rows = dict(res['rows'][0])
|
||||
res_rows['table'] = res_rows['relname']
|
||||
res_rows['schema'] = self.view_schema
|
||||
|
||||
if len(res_rows['tgattr']) > 1:
|
||||
columns = ', '.join(res_rows['tgattr'].split(' '))
|
||||
SQL = render_template("/".join(
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/get_columns.sql'.format(
|
||||
self.manager.server_type,
|
||||
self.manager.version)]),
|
||||
tid=trigger['oid'],
|
||||
clist=columns)
|
||||
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return True, internal_server_error(errormsg=rset), ''
|
||||
# 'tgattr' contains list of columns from table
|
||||
# used in trigger
|
||||
columns = []
|
||||
|
||||
for col_row in rset['rows']:
|
||||
columns.append(col_row['name'])
|
||||
|
||||
res_rows['columns'] = columns
|
||||
|
||||
res_rows = trigger_definition(res_rows)
|
||||
SQL = render_template("/".join(
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/create.sql'.format(
|
||||
self.manager.server_type, self.manager.version)]),
|
||||
data=res_rows, display_comments=display_comments)
|
||||
sql_data += '\n'
|
||||
sql_data += SQL
|
||||
|
||||
return False, '', sql_data
|
||||
|
||||
def get_compound_trigger_sql(self, vid, display_comments=True):
|
||||
"""
|
||||
Get all compound trigger nodes associated with view node,
|
||||
|
@ -1002,9 +1132,6 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
if self.manager.server_type == 'ppas' \
|
||||
and self.manager.version >= 120000:
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils\
|
||||
import trigger_definition
|
||||
|
||||
# Define template path
|
||||
self.ct_trigger_temp_path = 'compound_triggers'
|
||||
|
||||
|
@ -1018,55 +1145,11 @@ class ViewNode(PGChildNodeView, VacuumSettings, SchemaDiffObjectCompare):
|
|||
if not status:
|
||||
return internal_server_error(errormsg=data)
|
||||
|
||||
for trigger in data['rows']:
|
||||
SQL = render_template("/".join(
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/properties.sql'.format(
|
||||
self.manager.server_type, self.manager.version)]),
|
||||
tid=vid,
|
||||
trid=trigger['oid']
|
||||
)
|
||||
is_error, errmsg, sql_data = self._generate_and_return_trigger_sql(
|
||||
vid, data, display_comments, sql_data)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
continue
|
||||
res_rows = dict(res['rows'][0])
|
||||
res_rows['table'] = res_rows['relname']
|
||||
res_rows['schema'] = self.view_schema
|
||||
|
||||
if len(res_rows['tgattr']) > 1:
|
||||
columns = ', '.join(res_rows['tgattr'].split(' '))
|
||||
SQL = render_template("/".join(
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/get_columns.sql'.format(
|
||||
self.manager.server_type,
|
||||
self.manager.version)]),
|
||||
tid=trigger['oid'],
|
||||
clist=columns)
|
||||
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
# 'tgattr' contains list of columns from table
|
||||
# used in trigger
|
||||
columns = []
|
||||
|
||||
for col_row in rset['rows']:
|
||||
columns.append(col_row['name'])
|
||||
|
||||
res_rows['columns'] = columns
|
||||
|
||||
res_rows = trigger_definition(res_rows)
|
||||
SQL = render_template("/".join(
|
||||
[self.ct_trigger_temp_path,
|
||||
'sql/{0}/#{1}#/create.sql'.format(
|
||||
self.manager.server_type, self.manager.version)]),
|
||||
data=res_rows, display_comments=display_comments)
|
||||
sql_data += '\n'
|
||||
sql_data += SQL
|
||||
if is_error:
|
||||
return errmsg
|
||||
|
||||
return sql_data
|
||||
|
||||
|
|
|
@ -71,20 +71,37 @@ def parse_variables_from_db(db_variables):
|
|||
if 'setconfig' in row and row['setconfig'] is not None:
|
||||
for d in row['setconfig']:
|
||||
var_name, var_value = d.split("=")
|
||||
# Because we save as boolean string in db so it needs
|
||||
# conversion
|
||||
if var_value == 'false' or var_value == 'off':
|
||||
var_value = False
|
||||
|
||||
var_dict = {
|
||||
'name': var_name,
|
||||
'value': var_value
|
||||
}
|
||||
if 'user_name' in row:
|
||||
var_dict['role'] = row['user_name']
|
||||
if 'db_name' in row:
|
||||
var_dict['database'] = row['db_name']
|
||||
|
||||
var_dict = _check_var_type(var_value, var_name, row)
|
||||
variables_lst.append(var_dict)
|
||||
|
||||
return {"variables": variables_lst}
|
||||
|
||||
|
||||
def _check_var_type(var_value, var_name, row):
|
||||
"""
|
||||
Function for check variable type and return dictionary in the format
|
||||
{
|
||||
"name": String,
|
||||
"value": String
|
||||
}
|
||||
var_value: Input variable value
|
||||
var_name: Input variable name
|
||||
row: data
|
||||
return: Variable dictionary.
|
||||
"""
|
||||
|
||||
# Because we save as boolean string in db so it needs
|
||||
# conversion
|
||||
if var_value == 'false' or var_value == 'off':
|
||||
var_value = False
|
||||
|
||||
var_dict = {
|
||||
'name': var_name,
|
||||
'value': var_value
|
||||
}
|
||||
if 'user_name' in row:
|
||||
var_dict['role'] = row['user_name']
|
||||
if 'db_name' in row:
|
||||
var_dict['database'] = row['db_name']
|
||||
|
||||
return var_dict
|
||||
|
|
Loading…
Reference in New Issue