Refactored code of table and it's child nodes.
1) Remove duplicate logic. 2) Child node specific logic should be inside the child node and table node gets the required information from the respective child node. Note:- Refactoring of columns node still in progress. Fixes #4807pull/27/head
parent
00ce130286
commit
de878b3198
|
@ -13,6 +13,7 @@ New features
|
|||
Housekeeping
|
||||
************
|
||||
|
||||
| `Issue #4807 <https://redmine.postgresql.org/issues/4807>`_ - Refactored code of table and it's child nodes.
|
||||
|
||||
Bug fixes
|
||||
*********
|
||||
|
|
|
@ -22,6 +22,8 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
|||
make_response as ajax_response, gone
|
||||
from .utils import BaseTableView
|
||||
from pgadmin.utils.preferences import Preferences
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.foreign_key import utils as fkey_utils
|
||||
|
||||
|
||||
class TableModule(SchemaChildModule):
|
||||
|
@ -1011,18 +1013,10 @@ class TableView(BaseTableView, DataTypeReader, VacuumSettings):
|
|||
|
||||
if 'foreign_key' in data:
|
||||
for c in data['foreign_key']:
|
||||
SQL = render_template(
|
||||
"/".join([
|
||||
self.foreign_key_template_path, 'get_parent.sql'
|
||||
]),
|
||||
tid=c['columns'][0]['references']
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
c['remote_schema'] = rset['rows'][0]['schema']
|
||||
c['remote_table'] = rset['rows'][0]['table']
|
||||
schema, table = fkey_utils.get_parent(
|
||||
self.conn, c['columns'][0]['references'])
|
||||
c['remote_schema'] = schema
|
||||
c['remote_table'] = table
|
||||
|
||||
try:
|
||||
partitions_sql = ''
|
||||
|
|
|
@ -19,6 +19,10 @@ from pgadmin.browser.collection import CollectionNodeModule
|
|||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
compound_triggers import utils as compound_trigger_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import trigger_definition
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
|
@ -199,9 +203,6 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
- This function is used to return modified SQL for the selected
|
||||
Compound Trigger node
|
||||
|
||||
* get_sql(data, scid, tid, trid)
|
||||
- This function will generate sql from model data
|
||||
|
||||
* sql(gid, sid, did, scid, tid, trid):
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected Compound Trigger node.
|
||||
|
@ -213,13 +214,6 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
* dependent(gid, sid, did, scid, tid, trid):
|
||||
- This function will generate dependent list to show it in dependent
|
||||
pane for the selected Compound Trigger node.
|
||||
|
||||
* _column_details(tid, clist)::
|
||||
- This function will fetch the columns for compound trigger
|
||||
|
||||
* _trigger_definition(data):
|
||||
- This function will set additional compound trigger definitions in
|
||||
AJAX response
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
|
@ -281,28 +275,10 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
# We need parent's name eg table name and schema name
|
||||
# when we create new compound trigger in update we can fetch
|
||||
# it using property sql
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=kwargs['tid'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
|
||||
# Here we are storing compound trigger definition
|
||||
# We will use it to check compound trigger type definition
|
||||
self.trigger_definition = {
|
||||
'TRIGGER_TYPE_ROW': (1 << 0),
|
||||
'TRIGGER_TYPE_BEFORE': (1 << 1),
|
||||
'TRIGGER_TYPE_INSERT': (1 << 2),
|
||||
'TRIGGER_TYPE_DELETE': (1 << 3),
|
||||
'TRIGGER_TYPE_UPDATE': (1 << 4),
|
||||
'TRIGGER_TYPE_TRUNCATE': (1 << 5),
|
||||
'TRIGGER_TYPE_INSTEAD': (1 << 6)
|
||||
}
|
||||
schema, table = compound_trigger_utils.get_parent(
|
||||
self.conn, kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
|
@ -422,67 +398,6 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
status=200
|
||||
)
|
||||
|
||||
def _column_details(self, tid, clist):
|
||||
"""
|
||||
This functional will fetch list of column for compound trigger
|
||||
|
||||
Args:
|
||||
tid: Table OID
|
||||
clist: List of columns
|
||||
|
||||
Returns:
|
||||
Updated properties data with column
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_columns.sql']),
|
||||
tid=tid, clist=clist)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
# 'tgattr' contains list of columns from table used in
|
||||
# compound trigger
|
||||
columns = []
|
||||
|
||||
for row in rset['rows']:
|
||||
columns.append(row['name'])
|
||||
|
||||
return columns
|
||||
|
||||
def _trigger_definition(self, data):
|
||||
"""
|
||||
This functional will set the compound trigger definition
|
||||
|
||||
Args:
|
||||
data: Properties data
|
||||
|
||||
Returns:
|
||||
Updated properties data with compound trigger definition
|
||||
"""
|
||||
|
||||
# Event definition
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_INSERT']:
|
||||
data['evnt_insert'] = True
|
||||
else:
|
||||
data['evnt_insert'] = False
|
||||
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_DELETE']:
|
||||
data['evnt_delete'] = True
|
||||
else:
|
||||
data['evnt_delete'] = False
|
||||
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_UPDATE']:
|
||||
data['evnt_update'] = True
|
||||
else:
|
||||
data['evnt_update'] = False
|
||||
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_TRUNCATE']:
|
||||
data['evnt_truncate'] = True
|
||||
else:
|
||||
data['evnt_truncate'] = False
|
||||
|
||||
return data
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
|
@ -520,9 +435,10 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
data = dict(res['rows'][0])
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
data['columns'] = compound_trigger_utils.get_column_details(
|
||||
self.conn, tid, columns)
|
||||
|
||||
data = self._trigger_definition(data)
|
||||
data = trigger_definition(data)
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
|
@ -696,7 +612,8 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
SQL, name = self.get_sql(scid, tid, trid, data)
|
||||
SQL, name = compound_trigger_utils.get_sql(
|
||||
self.conn, data, tid, trid, self.datlastsysoid)
|
||||
if not isinstance(SQL, (str, unicode)):
|
||||
return SQL
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
@ -775,7 +692,8 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
data['table'] = self.table
|
||||
|
||||
try:
|
||||
sql, name = self.get_sql(scid, tid, trid, data)
|
||||
sql, name = compound_trigger_utils.get_sql(
|
||||
self.conn, data, tid, trid, self.datlastsysoid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -789,58 +707,6 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, scid, tid, trid, data):
|
||||
"""
|
||||
This function will genrate sql from model data
|
||||
"""
|
||||
if trid is not None:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext(
|
||||
"""Could not find the compound trigger in the table.""")
|
||||
)
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
|
||||
# If name is not present in data then
|
||||
# we will fetch it from old data, we also need schema & table name
|
||||
if 'name' not in data:
|
||||
data['name'] = old_data['name']
|
||||
|
||||
self.trigger_name = data['name']
|
||||
self.is_trigger_enabled = old_data['is_enable_trigger']
|
||||
|
||||
if len(old_data['tgattr']) > 1:
|
||||
columns = ', '.join(old_data['tgattr'].split(' '))
|
||||
old_data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
old_data = self._trigger_definition(old_data)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn
|
||||
)
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name'
|
||||
}
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return gettext('-- definition incomplete')
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
|
@ -855,46 +721,12 @@ class CompoundTriggerView(PGChildNodeView):
|
|||
tid: Table ID
|
||||
trid: Trigger ID
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext(
|
||||
"""Could not find the compound trigger in the table."""))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
data = self._trigger_definition(data)
|
||||
|
||||
SQL, name = self.get_sql(scid, tid, None, data)
|
||||
|
||||
sql_header = u"-- Compound Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
|
||||
# If compound trigger is disbaled then add sql code for the same
|
||||
if data['is_enable_trigger'] != 'O':
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([self.template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=self.conn)
|
||||
try:
|
||||
SQL = compound_trigger_utils.get_reverse_engineered_sql(
|
||||
self.conn, self.schema, self.table, tid, trid,
|
||||
self.datlastsysoid)
|
||||
except Exception as e:
|
||||
return internal_server_error(errormsg=SQL)
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
|
|
|
@ -0,0 +1,197 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Foreign Keys. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import trigger_definition
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = 'compound_triggers/sql/{0}/#{1}#'.format(
|
||||
conn_obj.manager.server_type, conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_column_details(conn, tid, clist, template_path=None):
|
||||
"""
|
||||
This functional will fetch list of column for trigger.
|
||||
:param conn:
|
||||
:param tid:
|
||||
:param clist:
|
||||
:param template_path:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_columns.sql']),
|
||||
tid=tid, clist=clist)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
columns = []
|
||||
for row in rset['rows']:
|
||||
columns.append(row['name'])
|
||||
|
||||
return columns
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, tid, trid, datlastsysoid, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data
|
||||
:param conn: Connection Object
|
||||
:param data: Data
|
||||
:param tid: Table ID
|
||||
:param trid: Trigger ID
|
||||
:param datlastsysoid:
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if trid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=datlastsysoid)
|
||||
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the compound trigger in the table.'))
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
# If name is not present in data then
|
||||
# we will fetch it from old data, we also need schema & table name
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
if len(old_data['tgattr']) > 1:
|
||||
columns = ', '.join(old_data['tgattr'].split(' '))
|
||||
old_data['columns'] = get_column_details(conn, tid, columns)
|
||||
|
||||
old_data = trigger_definition(old_data)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=conn
|
||||
)
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name'
|
||||
}
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
return SQL, name
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_reverse_engineered_sql(conn, schema, table, tid, trid,
|
||||
datlastsysoid, template_path=None):
|
||||
"""
|
||||
This function will return reverse engineered sql for trigger(s).
|
||||
:param conn: Connection Object
|
||||
:param schema: Schema
|
||||
:param table: Table
|
||||
:param tid: Table ID
|
||||
:param trid: Trigger ID
|
||||
:param datlastsysoid:
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=datlastsysoid)
|
||||
|
||||
status, res = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the compound trigger in the table.'))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = schema
|
||||
data['table'] = table
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = get_column_details(conn, tid, columns)
|
||||
|
||||
data = trigger_definition(data)
|
||||
|
||||
SQL, name = get_sql(conn, data, tid, None, datlastsysoid)
|
||||
|
||||
sql_header = u"-- Compound Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
|
||||
# If trigger is disabled then add sql code for the same
|
||||
if data['is_enable_trigger'] != 'O':
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=conn)
|
||||
return SQL
|
|
@ -21,6 +21,8 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
|||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.check_constraint import utils as check_utils
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
|
||||
|
@ -141,10 +143,6 @@ class CheckConstraintView(PGChildNodeView):
|
|||
* msql(gid, sid, did, scid, doid=None):
|
||||
- Returns the modified SQL.
|
||||
|
||||
* get_sql(gid, sid, data, scid, tid=None):
|
||||
- Generates the SQL statements to create/update the Check Constraint.
|
||||
object.
|
||||
|
||||
* dependents(gid, sid, did, scid, tid, cid):
|
||||
- Returns the dependents for the Check Constraint object.
|
||||
|
||||
|
@ -202,15 +200,9 @@ class CheckConstraintView(PGChildNodeView):
|
|||
self.template_path = 'check_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=kwargs['tid'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
self.schema = rset['rows'][0]['schema']
|
||||
self.table = rset['rows'][0]['table']
|
||||
schema, table = check_utils.get_parent(self.conn, kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
|
@ -272,15 +264,9 @@ class CheckConstraintView(PGChildNodeView):
|
|||
self.template_path = 'check_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
self.schema = rset['rows'][0]['schema']
|
||||
self.table = rset['rows'][0]['table']
|
||||
schema, table = check_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid)
|
||||
|
@ -391,15 +377,9 @@ class CheckConstraintView(PGChildNodeView):
|
|||
self.template_path = 'check_constraint/sql/#{0}#'.format(
|
||||
self.manager.version)
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
self.schema = rset['rows'][0]['schema']
|
||||
self.table = rset['rows'][0]['table']
|
||||
schema, table = check_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
|
@ -438,20 +418,21 @@ class CheckConstraintView(PGChildNodeView):
|
|||
cid: Check Constraint Id
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, cid=cid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
status, res = check_utils.get_check_constraints(self.conn, tid, cid)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("Could not find the object on the server.")
|
||||
)
|
||||
return res
|
||||
|
||||
if len(res) == 0:
|
||||
return gone(_(
|
||||
"""Could not find the check constraint in the table."""
|
||||
))
|
||||
|
||||
result = res
|
||||
if cid:
|
||||
result = res[0]
|
||||
|
||||
data = res['rows'][0]
|
||||
return ajax_response(
|
||||
response=data,
|
||||
response=result,
|
||||
status=200
|
||||
)
|
||||
|
||||
|
@ -653,7 +634,7 @@ class CheckConstraintView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
SQL, name = self.get_sql(gid, sid, data, scid, tid, cid)
|
||||
SQL, name = check_utils.get_sql(self.conn, data, tid, cid)
|
||||
if not SQL:
|
||||
return name
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
@ -763,7 +744,7 @@ class CheckConstraintView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
try:
|
||||
sql, name = self.get_sql(gid, sid, data, scid, tid, cid)
|
||||
sql, name = check_utils.get_sql(self.conn, data, tid, cid)
|
||||
if not sql:
|
||||
return name
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -777,56 +758,6 @@ class CheckConstraintView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, gid, sid, data, scid, tid, cid=None):
|
||||
"""
|
||||
Generates the SQL statements to create/update the Check Constraint.
|
||||
|
||||
Args:
|
||||
gid: Server Group Id
|
||||
sid: Server Id
|
||||
did: Database Id
|
||||
scid: Schema Id
|
||||
tid: Table Id
|
||||
cid: Check Constraint Id
|
||||
"""
|
||||
if cid is not None:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, cid=cid)
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
return False, internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return False, gone(
|
||||
_("Could not find the object on the server.")
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
required_args = ['name']
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn
|
||||
)
|
||||
else:
|
||||
required_args = ['consrc']
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return _('-- definition incomplete')
|
||||
elif isinstance(data[arg], list) and len(data[arg]) < 1:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'create.sql']),
|
||||
data=data)
|
||||
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def dependents(self, gid, sid, did, scid, tid, cid):
|
||||
"""
|
||||
|
|
|
@ -0,0 +1,174 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Check Constraint. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = 'check_constraint/sql/#{0}#'.format(
|
||||
conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_check_constraints(conn, tid, cid=None, template_path=None):
|
||||
"""
|
||||
This function is used to fetch information of the
|
||||
check constraint(s) for the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table ID
|
||||
:param cid: Check Constraint ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
sql = render_template("/".join(
|
||||
[template_path, 'properties.sql']), tid=tid, cid=cid)
|
||||
|
||||
status, result = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=result)
|
||||
|
||||
return True, result['rows']
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_check_constraint_sql(conn, tid, data, template_path=None):
|
||||
"""
|
||||
This function will return sql for check constraints.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table ID
|
||||
:param data: Data
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
sql = []
|
||||
# Check if constraint is in data
|
||||
# If yes then we need to check for add/change/delete
|
||||
if 'check_constraint' in data:
|
||||
constraint = data['check_constraint']
|
||||
# If constraint(s) is/are deleted
|
||||
if 'deleted' in constraint:
|
||||
for c in constraint['deleted']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql for drop
|
||||
sql.append(
|
||||
render_template("/".join(
|
||||
[template_path, 'delete.sql']),
|
||||
data=c, conn=conn).strip("\n")
|
||||
)
|
||||
|
||||
if 'changed' in constraint:
|
||||
for c in constraint['changed']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
modified_sql, name = get_sql(conn, c, tid, c['oid'])
|
||||
sql.append(modified_sql.strip('\n'))
|
||||
|
||||
if 'added' in constraint:
|
||||
for c in constraint['added']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
add_sql, name = get_sql(conn, c, tid)
|
||||
sql.append(add_sql.strip("\n"))
|
||||
|
||||
if len(sql) > 0:
|
||||
# Join all the sql(s) as single string
|
||||
return '\n\n'.join(sql)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, tid, cid=None, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
:param conn: Connection Object
|
||||
:param data: data
|
||||
:param tid: Table id
|
||||
:param cid: Check Constraint ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if cid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, cid=cid)
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the check constraint in the table.'))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
sql = render_template("/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=conn)
|
||||
else:
|
||||
if 'consrc' not in data:
|
||||
return _('-- definition incomplete'), name
|
||||
elif isinstance(data['consrc'], list) and len(data['consrc']) < 1:
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data)
|
||||
|
||||
return sql, name
|
|
@ -20,6 +20,8 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
|||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.exclusion_constraint import utils as exclusion_utils
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
|
@ -228,16 +230,10 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=kwargs['tid'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = exclusion_utils.get_parent(self.conn,
|
||||
kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
@ -265,60 +261,19 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
Returns:
|
||||
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=exid)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
status, res = exclusion_utils.get_exclusion_constraints(
|
||||
self.conn, did, tid, exid)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return res
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
if len(res) == 0:
|
||||
return gone(_(
|
||||
"""Could not find the exclusion constraint in the table."""
|
||||
))
|
||||
|
||||
result = res['rows'][0]
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_cols.sql']),
|
||||
cid=exid,
|
||||
colcnt=result['col_count'])
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
if row['options'] & 1:
|
||||
order = False
|
||||
nulls_order = True if (row['options'] & 2) else False
|
||||
else:
|
||||
order = True
|
||||
nulls_order = True if (row['options'] & 2) else False
|
||||
|
||||
columns.append({"column": row['coldef'].strip('"'),
|
||||
"oper_class": row['opcname'],
|
||||
"order": order,
|
||||
"nulls_order": nulls_order,
|
||||
"operator": row['oprname'],
|
||||
"col_type": row['datatype']
|
||||
})
|
||||
|
||||
result['columns'] = columns
|
||||
|
||||
# Add Include details of the index supported for PG-11+
|
||||
if self.manager.version >= 110000:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_include.sql']),
|
||||
cid=exid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
result['include'] = [col['colname'] for col in res['rows']]
|
||||
result = res
|
||||
if exid:
|
||||
result = res[0]
|
||||
|
||||
return ajax_response(
|
||||
response=result,
|
||||
|
@ -374,16 +329,9 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = exclusion_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
|
@ -489,16 +437,9 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = exclusion_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
|
@ -656,7 +597,8 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
try:
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
sql, name = self.get_sql(data, did, tid, exid)
|
||||
sql, name = \
|
||||
exclusion_utils.get_sql(self.conn, data, did, tid, exid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -787,7 +729,8 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
try:
|
||||
sql, name = self.get_sql(data, did, tid, exid)
|
||||
sql, name = \
|
||||
exclusion_utils.get_sql(self.conn, data, did, tid, exid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -801,53 +744,6 @@ class ExclusionConstraintView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, data, did, tid, exid=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
Args:
|
||||
data: Contains the data of the selected Exclusion constraint.
|
||||
tid: Table ID.
|
||||
exid: Exclusion constraint ID
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if exid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=exid
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("Could not find the exclusion constraint."))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
required_args = ['name']
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
else:
|
||||
required_args = ['columns']
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return _('-- definition incomplete')
|
||||
elif isinstance(data[arg], list) and len(data[arg]) < 1:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
return sql, data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, exid=None):
|
||||
"""
|
||||
|
|
|
@ -0,0 +1,215 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Exclusion Constraint. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = 'exclusion_constraint/sql/#{0}#'.format(
|
||||
conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_exclusion_constraints(conn, did, tid, exid=None, template_path=None):
|
||||
"""
|
||||
This function is used to fetch information of the
|
||||
exclusion constraint(s) for the given table.
|
||||
:param conn: Connection Object
|
||||
:param did: Database ID
|
||||
:param tid: Table ID
|
||||
:param exid: Exclusion Constraint ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=exid)
|
||||
|
||||
status, result = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=result)
|
||||
|
||||
for ex in result['rows']:
|
||||
sql = render_template("/".join([template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
cid=ex['oid'], colcnt=ex['col_count'])
|
||||
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
if row['options'] & 1:
|
||||
order = False
|
||||
nulls_order = True if (row['options'] & 2) else False
|
||||
else:
|
||||
order = True
|
||||
nulls_order = True if (row['options'] & 2) else False
|
||||
|
||||
columns.append({"column": row['coldef'].strip('"'),
|
||||
"oper_class": row['opcname'],
|
||||
"order": order,
|
||||
"nulls_order": nulls_order,
|
||||
"operator": row['oprname'],
|
||||
"col_type": row['datatype']
|
||||
})
|
||||
|
||||
ex['columns'] = columns
|
||||
|
||||
# INCLUDE clause in index is supported from PG-11+
|
||||
if conn.manager.version >= 110000:
|
||||
sql = render_template("/".join([template_path,
|
||||
'get_constraint_include.sql']),
|
||||
cid=ex['oid'])
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=res)
|
||||
|
||||
ex['include'] = [col['colname'] for col in res['rows']]
|
||||
|
||||
return True, result['rows']
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_exclusion_constraint_sql(conn, did, tid, data, template_path=None):
|
||||
"""
|
||||
This function will return sql for exclusion constraints.
|
||||
:param conn: Connection Object
|
||||
:param did: Database ID
|
||||
:param tid: Table ID
|
||||
:param data: Data
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
sql = []
|
||||
# Check if constraint is in data
|
||||
# If yes then we need to check for add/change/delete
|
||||
if 'exclude_constraint' in data:
|
||||
constraint = data['exclude_constraint']
|
||||
# If constraint(s) is/are deleted
|
||||
if 'deleted' in constraint:
|
||||
for c in constraint['deleted']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql for drop
|
||||
sql.append(
|
||||
render_template("/".join(
|
||||
[template_path, 'delete.sql']),
|
||||
data=c, conn=conn).strip("\n")
|
||||
)
|
||||
|
||||
if 'changed' in constraint:
|
||||
for c in constraint['changed']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
modified_sql, name = get_sql(conn, c, did, tid, c['oid'])
|
||||
sql.append(modified_sql.strip('\n'))
|
||||
|
||||
if 'added' in constraint:
|
||||
for c in constraint['added']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
add_sql, name = get_sql(conn, c, did, tid)
|
||||
sql.append(add_sql.strip("\n"))
|
||||
|
||||
if len(sql) > 0:
|
||||
# Join all the sql(s) as single string
|
||||
return '\n\n'.join(sql)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, did, tid, exid=None, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
:param conn: Connection Object
|
||||
:param data: data
|
||||
:param did: Database ID
|
||||
:param tid: Table id
|
||||
:param exid: Exclusion Constraint ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if exid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=exid)
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the exclusion constraint in the table.'))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
sql = render_template("/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
else:
|
||||
if 'columns' not in data:
|
||||
return _('-- definition incomplete'), name
|
||||
elif isinstance(data['columns'], list) and len(data['columns']) < 1:
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
return sql, name
|
|
@ -20,6 +20,8 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
|||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.foreign_key import utils as fkey_utils
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
|
@ -176,9 +178,6 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
- This function is used to return modified SQL for the selected
|
||||
foreign key.
|
||||
|
||||
* get_sql()
|
||||
- This function will generate sql from model data.
|
||||
|
||||
* sql():
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected foreign key.
|
||||
|
@ -237,16 +236,9 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=kwargs['tid'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = fkey_utils.get_parent(self.conn, kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
@ -274,51 +266,18 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
Returns:
|
||||
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, cid=fkid)
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
status, res = fkey_utils.get_foreign_keys(self.conn, tid, fkid)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return res
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
if len(res) == 0:
|
||||
return gone(_(
|
||||
"""Could not find the foreign key constraint in the table."""
|
||||
))
|
||||
|
||||
result = res['rows'][0]
|
||||
|
||||
sql = render_template("/".join([self.template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(result['confkey'], result['conkey']),
|
||||
confrelid=result['confrelid'])
|
||||
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
cols = []
|
||||
for row in res['rows']:
|
||||
columns.append({"local_column": row['conattname'],
|
||||
"references": result['confrelid'],
|
||||
"referenced": row['confattname']})
|
||||
cols.append(row['conattname'])
|
||||
|
||||
result['columns'] = columns
|
||||
|
||||
result = res
|
||||
if fkid:
|
||||
coveringindex = self.search_coveringindex(tid, cols)
|
||||
result['coveringindex'] = coveringindex
|
||||
if coveringindex:
|
||||
result['autoindex'] = True
|
||||
result['hasindex'] = True
|
||||
else:
|
||||
result['autoindex'] = False
|
||||
result['hasindex'] = False
|
||||
result = res[0]
|
||||
|
||||
return ajax_response(
|
||||
response=result,
|
||||
|
@ -373,16 +332,9 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = fkey_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
|
@ -488,16 +440,10 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
schema, table = fkey_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'nodes.sql']),
|
||||
|
@ -575,15 +521,11 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
try:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=data['columns'][0]['references'])
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data['remote_schema'] = res['rows'][0]['schema']
|
||||
data['remote_table'] = res['rows'][0]['table']
|
||||
# Get the parent schema and table.
|
||||
schema, table = fkey_utils.get_parent(
|
||||
self.conn, data['columns'][0]['references'])
|
||||
data['remote_schema'] = schema
|
||||
data['remote_table'] = table
|
||||
|
||||
if 'name' not in data or data['name'] == "":
|
||||
SQL = render_template(
|
||||
|
@ -690,7 +632,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
try:
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
sql, name = self.get_sql(data, tid, fkid)
|
||||
sql, name = fkey_utils.get_sql(self.conn, data, tid, fkid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -826,7 +768,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
try:
|
||||
sql, name = self.get_sql(data, tid, fkid)
|
||||
sql, name = fkey_utils.get_sql(self.conn, data, tid, fkid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -840,96 +782,6 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, data, tid, fkid=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
Args:
|
||||
data: Contains the data of the selected foreign key constraint.
|
||||
tid: Table ID.
|
||||
fkid: Foreign key constraint ID
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if fkid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
tid=tid, cid=fkid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(_("""Could not find the foreign key."""))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
required_args = ['name']
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
|
||||
if 'autoindex' in data and data['autoindex'] and \
|
||||
('coveringindex' in data and data['coveringindex'] != ''):
|
||||
|
||||
col_sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(old_data['confkey'], old_data['conkey']),
|
||||
confrelid=old_data['confrelid']
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(col_sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"local_column": row['conattname'],
|
||||
"references": old_data['confrelid'],
|
||||
"referenced": row['confattname']})
|
||||
|
||||
data['columns'] = columns
|
||||
|
||||
sql += render_template(
|
||||
"/".join([self.template_path, 'create_index.sql']),
|
||||
data=data, conn=self.conn)
|
||||
else:
|
||||
required_args = ['columns']
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return _('-- definition incomplete')
|
||||
elif isinstance(data[arg], list) and len(data[arg]) < 1:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
if data['autoindex'] and \
|
||||
('coveringindex' not in data or
|
||||
data['coveringindex'] == ''):
|
||||
return _('-- definition incomplete')
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=data['columns'][0]['references'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
data['remote_schema'] = rset['rows'][0]['schema']
|
||||
data['remote_table'] = rset['rows'][0]['table']
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
if data['autoindex']:
|
||||
sql += render_template(
|
||||
"/".join([self.template_path, 'create_index.sql']),
|
||||
data=data, conn=self.conn)
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, fkid=None):
|
||||
"""
|
||||
|
@ -980,16 +832,11 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
|
||||
data['columns'] = columns
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=data['columns'][0]['references'])
|
||||
status, res = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
data['remote_schema'] = res['rows'][0]['schema']
|
||||
data['remote_table'] = res['rows'][0]['table']
|
||||
# Get the parent schema and table.
|
||||
schema, table = fkey_utils.get_parent(self.conn,
|
||||
data['columns'][0]['references'])
|
||||
data['remote_schema'] = schema
|
||||
data['remote_table'] = table
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']), data=data)
|
||||
|
@ -1087,46 +934,6 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def search_coveringindex(self, tid, cols):
|
||||
"""
|
||||
|
||||
Args:
|
||||
tid: Table id
|
||||
cols: column list
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
|
||||
cols = set(cols)
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_constraints.sql']),
|
||||
tid=tid)
|
||||
status, constraints = self.conn.execute_dict(SQL)
|
||||
|
||||
if not status:
|
||||
raise Exception(constraints)
|
||||
|
||||
for costrnt in constraints['rows']:
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_cols.sql']),
|
||||
cid=costrnt['oid'],
|
||||
colcnt=costrnt['col_count'])
|
||||
status, rest = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rest)
|
||||
|
||||
indexcols = set()
|
||||
for r in rest['rows']:
|
||||
indexcols.add(r['column'].strip('"'))
|
||||
|
||||
if len(cols - indexcols) == len(indexcols - cols) == 0:
|
||||
return costrnt["idxname"]
|
||||
|
||||
return None
|
||||
|
||||
@check_precondition
|
||||
def get_coveringindex(self, gid, sid, did, scid, tid=None):
|
||||
"""
|
||||
|
@ -1146,7 +953,7 @@ class ForeignKeyConstraintView(PGChildNodeView):
|
|||
try:
|
||||
if data and 'cols' in data:
|
||||
cols = set(json.loads(data['cols'], encoding='utf-8'))
|
||||
index = self.search_coveringindex(tid, cols)
|
||||
index = fkey_utils.search_coveringindex(self.conn, tid, cols)
|
||||
|
||||
return make_json_response(
|
||||
data=index,
|
||||
|
|
|
@ -0,0 +1,289 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Foreign Keys. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = 'foreign_key/sql/#{0}#'.format(
|
||||
conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_foreign_keys(conn, tid, fkid=None, template_path=None):
|
||||
"""
|
||||
This function is used to fetch information of the
|
||||
foreign key(s) for the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table ID
|
||||
:param fkid: Foreign Key ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
sql = render_template("/".join(
|
||||
[template_path, 'properties.sql']), tid=tid, cid=fkid)
|
||||
|
||||
status, result = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=result)
|
||||
|
||||
for fk in result['rows']:
|
||||
sql = render_template("/".join([template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(fk['confkey'], fk['conkey']),
|
||||
confrelid=fk['confrelid'])
|
||||
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
cols = []
|
||||
for row in res['rows']:
|
||||
columns.append({"local_column": row['conattname'],
|
||||
"references": fk['confrelid'],
|
||||
"referenced": row['confattname']})
|
||||
cols.append(row['conattname'])
|
||||
|
||||
fk['columns'] = columns
|
||||
|
||||
if not fkid:
|
||||
schema, table = get_parent(conn, fk['columns'][0]['references'])
|
||||
fk['remote_schema'] = schema
|
||||
fk['remote_table'] = table
|
||||
|
||||
coveringindex = search_coveringindex(conn, tid, cols)
|
||||
fk['coveringindex'] = coveringindex
|
||||
if coveringindex:
|
||||
fk['autoindex'] = True
|
||||
fk['hasindex'] = True
|
||||
else:
|
||||
fk['autoindex'] = False
|
||||
fk['hasindex'] = False
|
||||
|
||||
return True, result['rows']
|
||||
|
||||
|
||||
@get_template_path
|
||||
def search_coveringindex(conn, tid, cols, template_path=None):
|
||||
"""
|
||||
This function is used to search the covering index
|
||||
:param conn: Connection Object
|
||||
:param tid: Table id
|
||||
:param cols: Columns
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
cols = set(cols)
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_constraints.sql']),
|
||||
tid=tid)
|
||||
status, constraints = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
raise Exception(constraints)
|
||||
|
||||
for constraint in constraints['rows']:
|
||||
sql = render_template(
|
||||
"/".join([template_path, 'get_cols.sql']),
|
||||
cid=constraint['oid'],
|
||||
colcnt=constraint['col_count'])
|
||||
status, rest = conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
raise Exception(rest)
|
||||
|
||||
index_cols = set()
|
||||
for r in rest['rows']:
|
||||
index_cols.add(r['column'].strip('"'))
|
||||
|
||||
if len(cols - index_cols) == len(index_cols - cols) == 0:
|
||||
return constraint["idxname"]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_foreign_key_sql(conn, tid, data, template_path=None):
|
||||
"""
|
||||
This function will return sql for foreign keys.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table ID
|
||||
:param data: Data
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
sql = []
|
||||
# Check if constraint is in data
|
||||
# If yes then we need to check for add/change/delete
|
||||
if 'foreign_key' in data:
|
||||
constraint = data['foreign_key']
|
||||
# If constraint(s) is/are deleted
|
||||
if 'deleted' in constraint:
|
||||
for c in constraint['deleted']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql for drop
|
||||
sql.append(
|
||||
render_template("/".join(
|
||||
[template_path,
|
||||
'delete.sql']),
|
||||
data=c, conn=conn).strip('\n')
|
||||
)
|
||||
|
||||
if 'changed' in constraint:
|
||||
for c in constraint['changed']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
modified_sql, name = get_sql(conn, c, tid, c['oid'])
|
||||
sql.append(modified_sql.strip("\n"))
|
||||
|
||||
if 'added' in constraint:
|
||||
for c in constraint['added']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
add_sql, name = get_sql(conn, c, tid)
|
||||
sql.append(add_sql.strip("\n"))
|
||||
|
||||
if len(sql) > 0:
|
||||
# Join all the sql(s) as single string
|
||||
return '\n\n'.join(sql)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, tid, fkid=None, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
:param conn: Connection Object
|
||||
:param data: data
|
||||
:param tid: Table id
|
||||
:param fkid: Foreign Key
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if fkid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, cid=fkid)
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the foreign key constraint in the table.'))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
sql = render_template("/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data)
|
||||
|
||||
if 'autoindex' in data and data['autoindex'] and \
|
||||
('coveringindex' in data and data['coveringindex'] != ''):
|
||||
col_sql = render_template(
|
||||
"/".join([template_path, 'get_constraint_cols.sql']),
|
||||
tid=tid,
|
||||
keys=zip(old_data['confkey'], old_data['conkey']),
|
||||
confrelid=old_data['confrelid']
|
||||
)
|
||||
|
||||
status, res = conn.execute_dict(col_sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"local_column": row['conattname'],
|
||||
"references": old_data['confrelid'],
|
||||
"referenced": row['confattname']})
|
||||
|
||||
data['columns'] = columns
|
||||
|
||||
sql += render_template(
|
||||
"/".join([template_path, 'create_index.sql']),
|
||||
data=data, conn=conn)
|
||||
else:
|
||||
if 'columns' not in data:
|
||||
return _('-- definition incomplete'), name
|
||||
elif isinstance(data['columns'], list) and len(data['columns']) < 1:
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
if data['autoindex'] and \
|
||||
('coveringindex' not in data or data['coveringindex'] == ''):
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
# Get the parent schema and table.
|
||||
schema, table = get_parent(conn,
|
||||
data['columns'][0]['references'])
|
||||
data['remote_schema'] = schema
|
||||
data['remote_table'] = table
|
||||
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
if data['autoindex']:
|
||||
sql += render_template(
|
||||
"/".join([template_path, 'create_index.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
return sql, name
|
|
@ -20,6 +20,8 @@ from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
|||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.index_constraint import utils as idxcons_utils
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
|
@ -247,16 +249,10 @@ class IndexConstraintView(PGChildNodeView):
|
|||
.format(self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=kwargs['tid'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
schema, table = idxcons_utils.get_parent(self.conn, kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
@ -284,49 +280,20 @@ class IndexConstraintView(PGChildNodeView):
|
|||
Returns:
|
||||
|
||||
"""
|
||||
sql = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
status, res = idxcons_utils.get_index_constraints(self.conn, did, tid,
|
||||
self.constraint_type,
|
||||
cid)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
return res
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
if len(res) == 0:
|
||||
return gone(_("""Could not find the {} in the table.""".format(
|
||||
"primary key" if self.constraint_type == "p" else "unique key"
|
||||
)))
|
||||
|
||||
result = res['rows'][0]
|
||||
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_cols.sql']),
|
||||
cid=cid,
|
||||
colcnt=result['col_count'])
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
for row in res['rows']:
|
||||
columns.append({"column": row['column'].strip('"')})
|
||||
|
||||
result['columns'] = columns
|
||||
|
||||
# Add Include details of the index supported for PG-11+
|
||||
if self.manager.version >= 110000:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'get_constraint_include.sql']),
|
||||
cid=cid)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
result['include'] = [col['colname'] for col in res['rows']]
|
||||
result = res
|
||||
if cid:
|
||||
result = res[0]
|
||||
|
||||
return ajax_response(
|
||||
response=result,
|
||||
|
@ -381,16 +348,9 @@ class IndexConstraintView(PGChildNodeView):
|
|||
.format(self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = idxcons_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
SQL = render_template("/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
|
@ -503,16 +463,9 @@ class IndexConstraintView(PGChildNodeView):
|
|||
.format(self.manager.version)
|
||||
|
||||
# We need parent's name eg table name and schema name
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=tid)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = idxcons_utils.get_parent(self.conn, tid)
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
res = []
|
||||
SQL = render_template("/".join([self.template_path, 'nodes.sql']),
|
||||
|
@ -686,7 +639,8 @@ class IndexConstraintView(PGChildNodeView):
|
|||
try:
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
sql, name = self.get_sql(data, did, tid, cid)
|
||||
sql, name = idxcons_utils.get_sql(self.conn, data, did, tid,
|
||||
self.constraint_type, cid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -826,7 +780,8 @@ class IndexConstraintView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
try:
|
||||
sql, name = self.get_sql(data, did, tid, cid)
|
||||
sql, name = idxcons_utils.get_sql(self.conn, data, did, tid,
|
||||
self.constraint_type, cid)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -840,77 +795,6 @@ class IndexConstraintView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, data, did, tid, cid=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
Args:
|
||||
data: Contains the data of the selected primary key constraint.
|
||||
tid: Table ID.
|
||||
cid: Primary key constraint ID
|
||||
|
||||
Returns:
|
||||
|
||||
"""
|
||||
if cid is not None:
|
||||
sql = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did,
|
||||
tid=tid,
|
||||
cid=cid,
|
||||
constraint_type=self.constraint_type
|
||||
)
|
||||
status, res = self.conn.execute_dict(sql)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
_("""Could not find the {} in the table.""".format(
|
||||
"primary key" if self.constraint_type == "p"
|
||||
else "unique key"
|
||||
))
|
||||
)
|
||||
|
||||
old_data = res['rows'][0]
|
||||
required_args = [u'name']
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
data[arg] = old_data[arg]
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'update.sql']),
|
||||
data=data,
|
||||
o_data=old_data)
|
||||
else:
|
||||
required_args = [
|
||||
[u'columns', u'index'] # Either of one should be there.
|
||||
]
|
||||
|
||||
def is_key_str(key, data):
|
||||
return isinstance(data[key], str) and data[key] != ""
|
||||
|
||||
def is_key_list(key, data):
|
||||
return isinstance(data[key], list) and len(data[param]) > 0
|
||||
|
||||
for arg in required_args:
|
||||
if isinstance(arg, list):
|
||||
for param in arg:
|
||||
if param in data:
|
||||
if is_key_str(param, data) \
|
||||
or is_key_list(param, data):
|
||||
break
|
||||
else:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
elif arg not in data:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
sql = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data,
|
||||
conn=self.conn,
|
||||
constraint_name=self.constraint_name)
|
||||
|
||||
return sql, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, cid=None):
|
||||
"""
|
||||
|
|
|
@ -115,6 +115,13 @@
|
|||
"data": {
|
||||
"name": "Pk_$%{}[]()&*^!@\"'`\\/#a"
|
||||
}
|
||||
}, {
|
||||
"type": "delete",
|
||||
"name": "Drop Index",
|
||||
"endpoint": "NODE-index.delete_id",
|
||||
"data": {
|
||||
"name": "uindex"
|
||||
}
|
||||
}, {
|
||||
"type": "delete",
|
||||
"name": "Drop Index Constraint Table",
|
||||
|
|
|
@ -115,6 +115,13 @@
|
|||
"data": {
|
||||
"name": "UC_$%{}[]()&*^!@\"'`\\/#a"
|
||||
}
|
||||
}, {
|
||||
"type": "delete",
|
||||
"name": "Drop Index",
|
||||
"endpoint": "NODE-index.delete_id",
|
||||
"data": {
|
||||
"name": "uindex"
|
||||
}
|
||||
}, {
|
||||
"type": "delete",
|
||||
"name": "Drop Unique Constraint Table",
|
||||
|
|
|
@ -0,0 +1,233 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Exclusion Constraint. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from functools import wraps
|
||||
from pgadmin.utils import IS_PY2
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = 'index_constraint/sql/#{0}#'.format(
|
||||
conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_index_constraints(conn, did, tid, ctype, cid=None, template_path=None):
|
||||
"""
|
||||
This function is used to fetch information of the
|
||||
index constraint(s) for the given table.
|
||||
:param conn: Connection Object
|
||||
:param did: Database ID
|
||||
:param tid: Table ID
|
||||
:param ctype: Constraint Type
|
||||
:param cid: index Constraint ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=cid, constraint_type=ctype)
|
||||
status, result = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=result)
|
||||
|
||||
for idx_cons in result['rows']:
|
||||
sql = render_template("/".join([template_path,
|
||||
'get_constraint_cols.sql']),
|
||||
cid=idx_cons['oid'],
|
||||
colcnt=idx_cons['col_count'])
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=res)
|
||||
|
||||
columns = []
|
||||
for r in res['rows']:
|
||||
columns.append({"column": r['column'].strip('"')})
|
||||
|
||||
idx_cons['columns'] = columns
|
||||
|
||||
# INCLUDE clause in index is supported from PG-11+
|
||||
if conn.manager.version >= 110000:
|
||||
sql = render_template("/".join([template_path,
|
||||
'get_constraint_include.sql']),
|
||||
cid=idx_cons['oid'])
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
return status, internal_server_error(errormsg=res)
|
||||
|
||||
idx_cons['include'] = [col['colname'] for col in res['rows']]
|
||||
|
||||
return True, result['rows']
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_index_constraint_sql(conn, did, tid, data, template_path=None):
|
||||
"""
|
||||
This function will return sql for index constraints.
|
||||
:param conn: Connection Object
|
||||
:param did: Database ID
|
||||
:param tid: Table ID
|
||||
:param data: Data
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
sql = []
|
||||
# We will fetch all the index constraints for the table
|
||||
index_constraints = {
|
||||
'p': 'primary_key', 'u': 'unique_constraint'
|
||||
}
|
||||
|
||||
for ctype in index_constraints.keys():
|
||||
# Check if constraint is in data
|
||||
# If yes then we need to check for add/change/delete
|
||||
if index_constraints[ctype] in data:
|
||||
constraint = data[index_constraints[ctype]]
|
||||
# If constraint(s) is/are deleted
|
||||
if 'deleted' in constraint:
|
||||
for c in constraint['deleted']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
# Sql for drop
|
||||
sql.append(render_template("/".join([template_path,
|
||||
'delete.sql']),
|
||||
data=c,
|
||||
conn=conn).strip('\n'))
|
||||
if 'changed' in constraint:
|
||||
for c in constraint['changed']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
modified_sql, name = get_sql(conn, c, did, tid, ctype,
|
||||
c['oid'])
|
||||
sql.append(modified_sql.strip('\n'))
|
||||
|
||||
if 'added' in constraint:
|
||||
for c in constraint['added']:
|
||||
c['schema'] = data['schema']
|
||||
c['table'] = data['name']
|
||||
|
||||
add_sql, name = get_sql(conn, c, did, tid, ctype)
|
||||
sql.append(add_sql.strip("\n"))
|
||||
|
||||
if len(sql) > 0:
|
||||
# Join all the sql(s) as single string
|
||||
return '\n\n'.join(sql)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, did, tid, ctype, cid=None, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
:param conn: Connection Object
|
||||
:param data: data
|
||||
:param did: Database ID
|
||||
:param tid: Table id
|
||||
:param ctype: Constraint Type
|
||||
:param cid: index Constraint ID
|
||||
:param template_path: Template Path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if cid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, cid=cid,
|
||||
constraint_type=ctype)
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(
|
||||
_('Could not find the constraint in the table.'))
|
||||
|
||||
old_data = res['rows'][0]
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
sql = render_template("/".join([template_path, 'update.sql']),
|
||||
data=data,
|
||||
o_data=old_data)
|
||||
else:
|
||||
required_args = [
|
||||
[u'columns', u'index'] # Either of one should be there.
|
||||
]
|
||||
|
||||
def is_key_str(key, data):
|
||||
return isinstance(data[key], (str, unicode)) and data[key] != ""
|
||||
|
||||
def is_key_list(key, data):
|
||||
return isinstance(data[key], list) and len(data[param]) > 0
|
||||
|
||||
for arg in required_args:
|
||||
if isinstance(arg, list):
|
||||
for param in arg:
|
||||
if param in data:
|
||||
if is_key_str(param, data) or is_key_list(param, data):
|
||||
break
|
||||
else:
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
elif arg not in data:
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
sql = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data,
|
||||
conn=conn,
|
||||
constraint_name='PRIMARY KEY'
|
||||
if ctype == 'p' else 'UNIQUE')
|
||||
return sql, name
|
|
@ -25,6 +25,8 @@ from pgadmin.utils.compile_template_name import compile_template_path
|
|||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas. \
|
||||
tables.indexes import utils as index_utils
|
||||
# If we are in Python3
|
||||
if not IS_PY2:
|
||||
unicode = str
|
||||
|
@ -256,17 +258,9 @@ class IndexesView(PGChildNodeView):
|
|||
# We need parent's name eg table name and schema name
|
||||
# when we create new index in update we can fetch it using
|
||||
# property sql
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_parent.sql']),
|
||||
tid=kwargs['tid']
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
schema, table = index_utils.get_parent(self.conn, kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
|
@ -474,99 +468,6 @@ class IndexesView(PGChildNodeView):
|
|||
status=200
|
||||
)
|
||||
|
||||
def _column_details(self, idx, data, mode='properties'):
|
||||
"""
|
||||
This functional will fetch list of column details for index
|
||||
|
||||
Args:
|
||||
idx: Index OID
|
||||
data: Properties data
|
||||
|
||||
Returns:
|
||||
Updated properties data with column details
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'column_details.sql']), idx=idx
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
# 'attdef' comes with quotes from query so we need to strip them
|
||||
# 'options' we need true/false to render switch ASC(false)/DESC(true)
|
||||
columns = []
|
||||
cols = []
|
||||
for row in rset['rows']:
|
||||
# We need all data as collection for ColumnsModel
|
||||
# we will not strip down colname when using in SQL to display
|
||||
cols_data = {
|
||||
'colname': row['attdef'] if mode == 'create' else
|
||||
row['attdef'].strip('"'),
|
||||
'collspcname': row['collnspname'],
|
||||
'op_class': row['opcname'],
|
||||
}
|
||||
|
||||
# ASC/DESC and NULLS works only with btree indexes
|
||||
if 'amname' in data and data['amname'] == 'btree':
|
||||
cols_data['sort_order'] = False
|
||||
if row['options'][0] == 'DESC':
|
||||
cols_data['sort_order'] = True
|
||||
|
||||
cols_data['nulls'] = False
|
||||
if row['options'][1].split(" ")[1] == 'FIRST':
|
||||
cols_data['nulls'] = True
|
||||
|
||||
columns.append(cols_data)
|
||||
|
||||
# We need same data as string to display in properties window
|
||||
# If multiple column then separate it by colon
|
||||
cols_str = row['attdef']
|
||||
if row['collnspname']:
|
||||
cols_str += ' COLLATE ' + row['collnspname']
|
||||
if row['opcname']:
|
||||
cols_str += ' ' + row['opcname']
|
||||
|
||||
# ASC/DESC and NULLS works only with btree indexes
|
||||
if 'amname' in data and data['amname'] == 'btree':
|
||||
# Append sort order
|
||||
cols_str += ' ' + row['options'][0]
|
||||
# Append nulls value
|
||||
cols_str += ' ' + row['options'][1]
|
||||
|
||||
cols.append(cols_str)
|
||||
|
||||
# Push as collection
|
||||
data['columns'] = columns
|
||||
# Push as string
|
||||
data['columns_csv'] = ', '.join(cols)
|
||||
|
||||
return data
|
||||
|
||||
def _include_details(self, idx, data, mode='properties'):
|
||||
"""
|
||||
This functional will fetch list of include details for index
|
||||
supported with Postgres 11+
|
||||
|
||||
Args:
|
||||
idx: Index OID
|
||||
data: Properties data
|
||||
|
||||
Returns:
|
||||
Updated properties data with include details
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'include_details.sql']), idx=idx
|
||||
)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
# Push as collection
|
||||
data['include'] = [col['colname'] for col in rset['rows']]
|
||||
return data
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, tid, idx):
|
||||
"""
|
||||
|
@ -602,11 +503,11 @@ class IndexesView(PGChildNodeView):
|
|||
data = dict(res['rows'][0])
|
||||
|
||||
# Add column details for current index
|
||||
data = self._column_details(idx, data)
|
||||
data = index_utils.get_column_details(self.conn, idx, data)
|
||||
|
||||
# Add Include details of the index
|
||||
if self.manager.version >= 110000:
|
||||
data = self._include_details(idx, data)
|
||||
data = index_utils.get_include_details(self.conn, idx, data)
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
|
@ -806,7 +707,8 @@ class IndexesView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
try:
|
||||
SQL, name = self.get_sql(did, scid, tid, idx, data)
|
||||
SQL, name = index_utils.get_sql(
|
||||
self.conn, data, did, tid, idx, self.datlastsysoid)
|
||||
if not isinstance(SQL, (str, unicode)):
|
||||
return SQL
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
@ -855,7 +757,9 @@ class IndexesView(PGChildNodeView):
|
|||
data['table'] = self.table
|
||||
|
||||
try:
|
||||
sql, name = self.get_sql(did, scid, tid, idx, data, mode='create')
|
||||
sql, name = index_utils.get_sql(
|
||||
self.conn, data, did, tid, idx, self.datlastsysoid,
|
||||
mode='create')
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -868,64 +772,6 @@ class IndexesView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_sql(self, did, scid, tid, idx, data, mode=None):
|
||||
"""
|
||||
This function will genrate sql from model data
|
||||
"""
|
||||
if idx is not None:
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did, tid=tid, idx=idx, datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("""Could not find the index in the table.""")
|
||||
)
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
|
||||
# If name is not present in data then
|
||||
# we will fetch it from old data, we also need schema & table name
|
||||
if 'name' not in data:
|
||||
data['name'] = old_data['name']
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn
|
||||
)
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name',
|
||||
'columns': 'Columns'
|
||||
}
|
||||
for arg in required_args:
|
||||
err = False
|
||||
if arg == 'columns' and len(data['columns']) < 1:
|
||||
err = True
|
||||
|
||||
if arg not in data:
|
||||
err = True
|
||||
# Check if we have at least one column
|
||||
if err:
|
||||
return gettext('-- definition incomplete')
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn, mode=mode
|
||||
)
|
||||
SQL += "\n"
|
||||
SQL += render_template(
|
||||
"/".join([self.template_path, 'alter.sql']),
|
||||
data=data, conn=self.conn
|
||||
)
|
||||
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, idx):
|
||||
"""
|
||||
|
@ -940,40 +786,9 @@ class IndexesView(PGChildNodeView):
|
|||
idx: Index ID
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'properties.sql']),
|
||||
did=did, tid=tid, idx=idx, datlastsysoid=self.datlastsysoid
|
||||
)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(gettext("""Could not find the index in the table."""))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
# Add column details for current index
|
||||
data = self._column_details(idx, data, 'create')
|
||||
|
||||
# Add Include details of the index
|
||||
if self.manager.version >= 110000:
|
||||
data = self._include_details(idx, data, 'create')
|
||||
|
||||
SQL, name = self.get_sql(did, scid, tid, None, data)
|
||||
if not isinstance(SQL, (str, unicode)):
|
||||
return SQL
|
||||
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template(
|
||||
"/".join([self.template_path, 'delete.sql']),
|
||||
data=data, conn=self.conn
|
||||
)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
SQL = index_utils.get_reverse_engineered_sql(
|
||||
self.conn, self.schema, self.table, did, tid, idx,
|
||||
self.datlastsysoid)
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
|
|
|
@ -0,0 +1,277 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Indexes. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = \
|
||||
'indexes/sql/#{0}#'.format(conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_column_details(conn, idx, data, mode='properties', template_path=None):
|
||||
"""
|
||||
This functional will fetch list of column for index.
|
||||
|
||||
:param conn: Connection Object
|
||||
:param idx: Index ID
|
||||
:param data: Data
|
||||
:param mode: 'create' or 'properties'
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([template_path, 'column_details.sql']), idx=idx
|
||||
)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
# 'attdef' comes with quotes from query so we need to strip them
|
||||
# 'options' we need true/false to render switch ASC(false)/DESC(true)
|
||||
columns = []
|
||||
cols = []
|
||||
for row in rset['rows']:
|
||||
# We need all data as collection for ColumnsModel
|
||||
# we will not strip down colname when using in SQL to display
|
||||
cols_data = {
|
||||
'colname': row['attdef'] if mode == 'create' else
|
||||
row['attdef'].strip('"'),
|
||||
'collspcname': row['collnspname'],
|
||||
'op_class': row['opcname'],
|
||||
}
|
||||
|
||||
# ASC/DESC and NULLS works only with btree indexes
|
||||
if 'amname' in data and data['amname'] == 'btree':
|
||||
cols_data['sort_order'] = False
|
||||
if row['options'][0] == 'DESC':
|
||||
cols_data['sort_order'] = True
|
||||
|
||||
cols_data['nulls'] = False
|
||||
if row['options'][1].split(" ")[1] == 'FIRST':
|
||||
cols_data['nulls'] = True
|
||||
|
||||
columns.append(cols_data)
|
||||
|
||||
# We need same data as string to display in properties window
|
||||
# If multiple column then separate it by colon
|
||||
cols_str = row['attdef']
|
||||
if row['collnspname']:
|
||||
cols_str += ' COLLATE ' + row['collnspname']
|
||||
if row['opcname']:
|
||||
cols_str += ' ' + row['opcname']
|
||||
|
||||
# ASC/DESC and NULLS works only with btree indexes
|
||||
if 'amname' in data and data['amname'] == 'btree':
|
||||
# Append sort order
|
||||
cols_str += ' ' + row['options'][0]
|
||||
# Append nulls value
|
||||
cols_str += ' ' + row['options'][1]
|
||||
|
||||
cols.append(cols_str)
|
||||
|
||||
# Push as collection
|
||||
data['columns'] = columns
|
||||
# Push as string
|
||||
data['columns_csv'] = ', '.join(cols)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_include_details(conn, idx, data, template_path=None):
|
||||
"""
|
||||
This functional will fetch list of include details for index
|
||||
supported with Postgres 11+
|
||||
|
||||
:param conn: Connection object
|
||||
:param idx: Index ID
|
||||
:param data: data
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([template_path, 'include_details.sql']), idx=idx
|
||||
)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
# Push as collection
|
||||
data['include'] = [col['colname'] for col in rset['rows']]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, did, tid, idx, datlastsysoid,
|
||||
mode=None, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
:param conn: Connection Object
|
||||
:param data: Data
|
||||
:param did:
|
||||
:param tid: Table ID
|
||||
:param idx: Index ID
|
||||
:param datlastsysoid:
|
||||
:param mode:
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if idx is not None:
|
||||
SQL = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, idx=idx,
|
||||
datlastsysoid=datlastsysoid)
|
||||
|
||||
status, res = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(_('Could not find the index in the table.'))
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
|
||||
# If name is not present in data then
|
||||
# we will fetch it from old data, we also need schema & table name
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=conn
|
||||
)
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name',
|
||||
'columns': 'Columns'
|
||||
}
|
||||
for arg in required_args:
|
||||
err = False
|
||||
if arg == 'columns' and len(data['columns']) < 1:
|
||||
err = True
|
||||
|
||||
if arg not in data:
|
||||
err = True
|
||||
# Check if we have at least one column
|
||||
if err:
|
||||
return _('-- definition incomplete'), name
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template(
|
||||
"/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn, mode=mode
|
||||
)
|
||||
SQL += "\n"
|
||||
SQL += render_template(
|
||||
"/".join([template_path, 'alter.sql']),
|
||||
data=data, conn=conn
|
||||
)
|
||||
|
||||
return SQL, name
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_reverse_engineered_sql(conn, schema, table, did, tid, idx,
|
||||
datlastsysoid,
|
||||
template_path=None):
|
||||
"""
|
||||
This function will return reverse engineered sql for specified trigger.
|
||||
|
||||
:param conn: Connection Object
|
||||
:param schema: Schema
|
||||
:param table: Table
|
||||
:param tid: Table ID
|
||||
:param idx: Index ID
|
||||
:param datlastsysoid:
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([template_path, 'properties.sql']),
|
||||
did=did, tid=tid, idx=idx,
|
||||
datlastsysoid=datlastsysoid)
|
||||
|
||||
status, res = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(_('Could not find the index in the table.'))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = schema
|
||||
data['table'] = table
|
||||
|
||||
# Add column details for current index
|
||||
data = get_column_details(conn, idx, data, 'create')
|
||||
|
||||
# Add Include details of the index
|
||||
if conn.manager.version >= 110000:
|
||||
data = get_include_details(conn, idx, data)
|
||||
|
||||
SQL, name = get_sql(conn, data, did, tid, None, datlastsysoid)
|
||||
|
||||
sql_header = u"-- Index: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL
|
||||
|
||||
return SQL
|
|
@ -26,8 +26,8 @@
|
|||
{% if data.columns|length > 0 %}{% if loop.index !=1 %},{% endif %}
|
||||
|
||||
{% if data.name %}CONSTRAINT {{conn|qtIdent(data.name)}} {% endif %}UNIQUE ({% for c in data.columns%}
|
||||
{% if loop.index != 1 %}, {% endif %}{{conn|qtIdent(c.column)}}{% endfor %})
|
||||
{% if data.include|length > 0 %}
|
||||
{% if loop.index != 1 %}, {% endif %}{{conn|qtIdent(c.column)}}{% endfor %}){% if data.include|length > 0 %}
|
||||
|
||||
INCLUDE({% for col in data.include %}{% if loop.index != 1 %}, {% endif %}{{conn|qtIdent(col)}}{% endfor %}){% endif %}
|
||||
{% if data.fillfactor %}
|
||||
|
||||
|
@ -89,8 +89,10 @@
|
|||
INCLUDE({% for col in data.include %}{% if loop.index != 1 %}, {% endif %}{{conn|qtIdent(col)}}{% endfor %})
|
||||
{% endif %}{% if data.fillfactor %}
|
||||
WITH (FILLFACTOR={{data.fillfactor}}){% endif %}{% if data.spcname and data.spcname != "pg_default" %}
|
||||
USING INDEX TABLESPACE {{ conn|qtIdent(data.spcname) }}{% endif %}
|
||||
{% if data.indconstraint %}WHERE ({{data.indconstraint}}){% endif%}{% if data.condeferrable %}
|
||||
|
||||
USING INDEX TABLESPACE {{ conn|qtIdent(data.spcname) }}{% endif %}{% if data.indconstraint %}
|
||||
|
||||
WHERE ({{data.indconstraint}}){% endif%}{% if data.condeferrable %}
|
||||
|
||||
DEFERRABLE{% if data.condeferred %}
|
||||
INITIALLY DEFERRED{% endif%}
|
||||
|
|
|
@ -17,6 +17,7 @@ CREATE{% if data.is_constraint_trigger %} CONSTRAINT{% endif %} TRIGGER {{ conn|
|
|||
ON {{ conn|qtIdent(data.schema, data.table) }}
|
||||
{% if data.tgdeferrable %}
|
||||
DEFERRABLE{% if data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{% if data.tgoldtable or data.tgnewtable %}
|
||||
REFERENCING{% if data.tgnewtable %} NEW TABLE AS {{ conn|qtIdent(data.tgnewtable) }}{% endif %}{% if data.tgoldtable %} OLD TABLE AS {{ conn|qtIdent(data.tgoldtable) }}{% endif %}
|
||||
|
|
|
@ -17,6 +17,7 @@ CREATE{% if data.is_constraint_trigger %} CONSTRAINT{% endif %} TRIGGER {{ conn|
|
|||
ON {{ conn|qtIdent(data.schema, data.table) }}
|
||||
{% if data.tgdeferrable %}
|
||||
DEFERRABLE{% if data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
|
||||
|
||||
{% endif %}
|
||||
FOR EACH{% if data.is_row_trigger %} ROW{% else %} STATEMENT{% endif %}
|
||||
{% if data.whenclause %}
|
||||
|
|
|
@ -17,6 +17,7 @@ CREATE{% if data.is_constraint_trigger %} CONSTRAINT{% endif %} TRIGGER {{ conn|
|
|||
ON {{ conn|qtIdent(data.schema, data.table) }}
|
||||
{% if data.tgdeferrable %}
|
||||
DEFERRABLE{% if data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
|
||||
|
||||
{% endif %}
|
||||
{% if data.tgoldtable or data.tgnewtable %}
|
||||
REFERENCING{% if data.tgnewtable %} NEW TABLE AS {{ conn|qtIdent(data.tgnewtable) }}{% endif %}{% if data.tgoldtable %} OLD TABLE AS {{ conn|qtIdent(data.tgoldtable) }}{% endif %}
|
||||
|
|
|
@ -17,6 +17,7 @@ CREATE{% if data.is_constraint_trigger %} CONSTRAINT{% endif %} TRIGGER {{ conn|
|
|||
ON {{ conn|qtIdent(data.schema, data.table) }}
|
||||
{% if data.tgdeferrable %}
|
||||
DEFERRABLE{% if data.tginitdeferred %} INITIALLY DEFERRED{% endif %}
|
||||
|
||||
{% endif %}
|
||||
FOR EACH{% if data.is_row_trigger %} ROW{% else %} STATEMENT{% endif %}
|
||||
{% if data.whenclause %}
|
||||
|
|
|
@ -55,15 +55,6 @@ class TestUtils(BaseTestGenerator):
|
|||
subject.table_template_path, 'tables/sql/#gpdb#10#')
|
||||
self.assertEqual(
|
||||
subject.data_type_template_path, 'datatype/sql/#gpdb#10#')
|
||||
self.assertEqual(
|
||||
subject.check_constraint_template_path,
|
||||
'check_constraint/sql/#gpdb#10#')
|
||||
self.assertEqual(
|
||||
subject.exclusion_constraint_template_path,
|
||||
'exclusion_constraint/sql/#gpdb#10#')
|
||||
self.assertEqual(
|
||||
subject.foreign_key_template_path,
|
||||
'foreign_key/sql/#gpdb#10#')
|
||||
self.assertEqual(
|
||||
subject.index_template_path,
|
||||
'indexes/sql/#gpdb#10#')
|
||||
|
|
|
@ -19,6 +19,10 @@ from pgadmin.browser.collection import CollectionNodeModule
|
|||
from pgadmin.browser.utils import PGChildNodeView
|
||||
from pgadmin.utils.ajax import make_json_response, internal_server_error, \
|
||||
make_response as ajax_response, gone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
triggers import utils as trigger_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import trigger_definition
|
||||
from pgadmin.utils.driver import get_driver
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.utils import IS_PY2
|
||||
|
@ -192,9 +196,6 @@ class TriggerView(PGChildNodeView):
|
|||
- This function is used to return modified SQL for the selected
|
||||
Trigger node
|
||||
|
||||
* get_sql(data, scid, tid, trid)
|
||||
- This function will generate sql from model data
|
||||
|
||||
* sql(gid, sid, did, scid, tid, trid):
|
||||
- This function will generate sql to show it in sql pane for the
|
||||
selected Trigger node.
|
||||
|
@ -210,13 +211,6 @@ class TriggerView(PGChildNodeView):
|
|||
* get_trigger_functions(gid, sid, did, scid, tid, trid):
|
||||
- This function will return list of trigger functions available
|
||||
via AJAX response
|
||||
|
||||
* _column_details(tid, clist)::
|
||||
- This function will fetch the columns for trigger
|
||||
|
||||
* _trigger_definition(data):
|
||||
- This function will set additional trigger definitions in
|
||||
AJAX response
|
||||
"""
|
||||
|
||||
node_type = blueprint.node_type
|
||||
|
@ -280,28 +274,9 @@ class TriggerView(PGChildNodeView):
|
|||
# We need parent's name eg table name and schema name
|
||||
# when we create new trigger in update we can fetch it using
|
||||
# property sql
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_parent.sql']),
|
||||
tid=kwargs['tid'])
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
for row in rset['rows']:
|
||||
self.schema = row['schema']
|
||||
self.table = row['table']
|
||||
|
||||
# Here we are storing trigger definition
|
||||
# We will use it to check trigger type definition
|
||||
self.trigger_definition = {
|
||||
'TRIGGER_TYPE_ROW': (1 << 0),
|
||||
'TRIGGER_TYPE_BEFORE': (1 << 1),
|
||||
'TRIGGER_TYPE_INSERT': (1 << 2),
|
||||
'TRIGGER_TYPE_DELETE': (1 << 3),
|
||||
'TRIGGER_TYPE_UPDATE': (1 << 4),
|
||||
'TRIGGER_TYPE_TRUNCATE': (1 << 5),
|
||||
'TRIGGER_TYPE_INSTEAD': (1 << 6)
|
||||
}
|
||||
schema, table = trigger_utils.get_parent(self.conn, kwargs['tid'])
|
||||
self.schema = schema
|
||||
self.table = table
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
|
@ -458,93 +433,6 @@ class TriggerView(PGChildNodeView):
|
|||
status=200
|
||||
)
|
||||
|
||||
def _column_details(self, tid, clist):
|
||||
"""
|
||||
This functional will fetch list of column for trigger
|
||||
|
||||
Args:
|
||||
tid: Table OID
|
||||
clist: List of columns
|
||||
|
||||
Returns:
|
||||
Updated properties data with column
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'get_columns.sql']),
|
||||
tid=tid, clist=clist)
|
||||
status, rset = self.conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
# 'tgattr' contains list of columns from table used in trigger
|
||||
columns = []
|
||||
|
||||
for row in rset['rows']:
|
||||
columns.append(row['name'])
|
||||
|
||||
return columns
|
||||
|
||||
def _trigger_definition(self, data):
|
||||
"""
|
||||
This functional will set the trigger definition
|
||||
|
||||
Args:
|
||||
data: Properties data
|
||||
|
||||
Returns:
|
||||
Updated properties data with trigger definition
|
||||
"""
|
||||
|
||||
# Fires event definition
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_BEFORE']:
|
||||
data['fires'] = 'BEFORE'
|
||||
elif data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_INSTEAD']:
|
||||
data['fires'] = 'INSTEAD OF'
|
||||
else:
|
||||
data['fires'] = 'AFTER'
|
||||
|
||||
# Trigger of type definition
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_ROW']:
|
||||
data['is_row_trigger'] = True
|
||||
else:
|
||||
data['is_row_trigger'] = False
|
||||
|
||||
# Event definition
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_INSERT']:
|
||||
data['evnt_insert'] = True
|
||||
else:
|
||||
data['evnt_insert'] = False
|
||||
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_DELETE']:
|
||||
data['evnt_delete'] = True
|
||||
else:
|
||||
data['evnt_delete'] = False
|
||||
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_UPDATE']:
|
||||
data['evnt_update'] = True
|
||||
else:
|
||||
data['evnt_update'] = False
|
||||
|
||||
if data['tgtype'] & self.trigger_definition['TRIGGER_TYPE_TRUNCATE']:
|
||||
data['evnt_truncate'] = True
|
||||
else:
|
||||
data['evnt_truncate'] = False
|
||||
|
||||
return data
|
||||
|
||||
def _format_args(self, args):
|
||||
"""
|
||||
This function will format arguments.
|
||||
|
||||
Args:
|
||||
args: Arguments
|
||||
|
||||
Returns:
|
||||
Formated arguments for function
|
||||
"""
|
||||
formatted_args = ["'{0}'".format(arg) for arg in args]
|
||||
return ', '.join(formatted_args)
|
||||
|
||||
@check_precondition
|
||||
def properties(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
|
@ -579,18 +467,10 @@ class TriggerView(PGChildNodeView):
|
|||
|
||||
# Making copy of output for future use
|
||||
data = dict(res['rows'][0])
|
||||
data = self.get_trigger_function_schema(data)
|
||||
data = trigger_utils.get_trigger_function_and_columns(
|
||||
self.conn, data, tid, self.blueprint.show_system_objects)
|
||||
|
||||
if len(data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
# and convert it to string
|
||||
data['tgargs'] = self._format_args(data['custom_tgargs'])
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
data = self._trigger_definition(data)
|
||||
data = trigger_definition(data)
|
||||
|
||||
return ajax_response(
|
||||
response=data,
|
||||
|
@ -763,7 +643,10 @@ class TriggerView(PGChildNodeView):
|
|||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
SQL, name = self.get_sql(scid, tid, trid, data)
|
||||
SQL, name = trigger_utils.get_sql(
|
||||
self.conn, data, tid, trid, self.datlastsysoid,
|
||||
self.blueprint.show_system_objects)
|
||||
|
||||
if not isinstance(SQL, (str, unicode)):
|
||||
return SQL
|
||||
SQL = SQL.strip('\n').strip(' ')
|
||||
|
@ -842,7 +725,9 @@ class TriggerView(PGChildNodeView):
|
|||
data['table'] = self.table
|
||||
|
||||
try:
|
||||
sql, name = self.get_sql(scid, tid, trid, data)
|
||||
sql, name = trigger_utils.get_sql(
|
||||
self.conn, data, tid, trid, self.datlastsysoid,
|
||||
self.blueprint.show_system_objects)
|
||||
if not isinstance(sql, (str, unicode)):
|
||||
return sql
|
||||
sql = sql.strip('\n').strip(' ')
|
||||
|
@ -856,95 +741,6 @@ class TriggerView(PGChildNodeView):
|
|||
except Exception as e:
|
||||
return internal_server_error(errormsg=str(e))
|
||||
|
||||
def get_trigger_function_schema(self, data):
|
||||
"""
|
||||
This function will return trigger function with schema name
|
||||
"""
|
||||
# If language is 'edbspl' then trigger function should be
|
||||
# 'Inline EDB-SPL' else we will find the trigger function
|
||||
# with schema name.
|
||||
if data['lanname'] == 'edbspl':
|
||||
data['tfunction'] = 'Inline EDB-SPL'
|
||||
else:
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'get_triggerfunctions.sql']),
|
||||
tgfoid=data['tgfoid'],
|
||||
show_system_objects=self.blueprint.show_system_objects
|
||||
)
|
||||
|
||||
status, result = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
# Update the trigger function which we have fetched with schema
|
||||
# name
|
||||
if 'rows' in result and len(result['rows']) > 0 and \
|
||||
'tfunctions' in result['rows'][0]:
|
||||
data['tfunction'] = result['rows'][0]['tfunctions']
|
||||
return data
|
||||
|
||||
def get_sql(self, scid, tid, trid, data):
|
||||
"""
|
||||
This function will genrate sql from model data
|
||||
"""
|
||||
if trid is not None:
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("""Could not find the trigger in the table.""")
|
||||
)
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
|
||||
# If name is not present in data then
|
||||
# we will fetch it from old data, we also need schema & table name
|
||||
if 'name' not in data:
|
||||
data['name'] = old_data['name']
|
||||
|
||||
self.trigger_name = data['name']
|
||||
self.lanname = old_data['lanname']
|
||||
self.is_trigger_enabled = old_data['is_enable_trigger']
|
||||
|
||||
old_data = self.get_trigger_function_schema(old_data)
|
||||
|
||||
if len(old_data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join
|
||||
# them
|
||||
old_data['tgargs'] = \
|
||||
self._format_args(old_data['custom_tgargs'])
|
||||
|
||||
if len(old_data['tgattr']) > 1:
|
||||
columns = ', '.join(old_data['tgattr'].split(' '))
|
||||
old_data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
old_data = self._trigger_definition(old_data)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([self.template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=self.conn
|
||||
)
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name',
|
||||
'tfunction': 'Trigger function'
|
||||
}
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return gettext('-- definition incomplete')
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template("/".join([self.template_path, 'create.sql']),
|
||||
data=data, conn=self.conn)
|
||||
return SQL, data['name'] if 'name' in data else old_data['name']
|
||||
|
||||
@check_precondition
|
||||
def sql(self, gid, sid, did, scid, tid, trid):
|
||||
"""
|
||||
|
@ -959,51 +755,9 @@ class TriggerView(PGChildNodeView):
|
|||
trid: Trigger ID
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([self.template_path,
|
||||
'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=self.datlastsysoid)
|
||||
|
||||
status, res = self.conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=res)
|
||||
if len(res['rows']) == 0:
|
||||
return gone(
|
||||
gettext("""Could not find the trigger in the table."""))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = self.schema
|
||||
data['table'] = self.table
|
||||
|
||||
data = self.get_trigger_function_schema(data)
|
||||
|
||||
if len(data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
data['tgargs'] = self._format_args(data['custom_tgargs'])
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = self._column_details(tid, columns)
|
||||
|
||||
data = self._trigger_definition(data)
|
||||
|
||||
SQL, name = self.get_sql(scid, tid, None, data)
|
||||
|
||||
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([self.template_path,
|
||||
'delete.sql']),
|
||||
data=data, conn=self.conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
|
||||
# If trigger is disbaled then add sql code for the same
|
||||
if data['is_enable_trigger'] != 'O':
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([self.template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=self.conn)
|
||||
SQL = trigger_utils.get_reverse_engineered_sql(
|
||||
self.conn, self.schema, self.table, tid, trid,
|
||||
self.datlastsysoid, self.blueprint.show_system_objects)
|
||||
|
||||
return ajax_response(response=SQL)
|
||||
|
||||
|
|
|
@ -0,0 +1,253 @@
|
|||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
""" Implements Utility class for Foreign Keys. """
|
||||
|
||||
from flask import render_template
|
||||
from flask_babelex import gettext as _
|
||||
from pgadmin.utils.ajax import internal_server_error
|
||||
from pgadmin.utils.exception import ObjectGone
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
|
||||
import trigger_definition
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def get_template_path(f):
|
||||
"""
|
||||
This function will behave as a decorator which will prepare
|
||||
the template path based on database server version.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
def wrap(*args, **kwargs):
|
||||
# Here args[0] will hold the connection object
|
||||
conn_obj = args[0]
|
||||
if 'template_path' not in kwargs:
|
||||
kwargs['template_path'] = 'triggers/sql/{0}/#{1}#'.format(
|
||||
conn_obj.manager.server_type, conn_obj.manager.version)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return wrap
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_parent(conn, tid, template_path=None):
|
||||
"""
|
||||
This function will return the parent of the given table.
|
||||
:param conn: Connection Object
|
||||
:param tid: Table oid
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_parent.sql']), tid=tid)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
raise Exception(rset)
|
||||
|
||||
schema = ''
|
||||
table = ''
|
||||
if 'rows' in rset and len(rset['rows']) > 0:
|
||||
schema = rset['rows'][0]['schema']
|
||||
table = rset['rows'][0]['table']
|
||||
|
||||
return schema, table
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_column_details(conn, tid, clist, template_path=None):
|
||||
"""
|
||||
This functional will fetch list of column for trigger.
|
||||
:param conn:
|
||||
:param tid:
|
||||
:param clist:
|
||||
:param template_path:
|
||||
:return:
|
||||
"""
|
||||
|
||||
SQL = render_template("/".join([template_path,
|
||||
'get_columns.sql']),
|
||||
tid=tid, clist=clist)
|
||||
status, rset = conn.execute_2darray(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=rset)
|
||||
|
||||
columns = []
|
||||
for row in rset['rows']:
|
||||
columns.append(row['name'])
|
||||
|
||||
return columns
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_trigger_function_and_columns(conn, data, tid,
|
||||
show_system_objects, template_path=None):
|
||||
"""
|
||||
This function will return trigger function with schema name.
|
||||
:param conn: Connection Object
|
||||
:param data: Data
|
||||
:param tid: Table ID
|
||||
:param show_system_objects: show system object
|
||||
:param template_path: Optional Template Path
|
||||
:return:
|
||||
"""
|
||||
# If language is 'edbspl' then trigger function should be
|
||||
# 'Inline EDB-SPL' else we will find the trigger function
|
||||
# with schema name.
|
||||
if data['lanname'] == 'edbspl':
|
||||
data['tfunction'] = 'Inline EDB-SPL'
|
||||
else:
|
||||
SQL = render_template("/".join(
|
||||
[template_path, 'get_triggerfunctions.sql']),
|
||||
tgfoid=data['tgfoid'],
|
||||
show_system_objects=show_system_objects
|
||||
)
|
||||
|
||||
status, result = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
return internal_server_error(errormsg=result)
|
||||
|
||||
# Update the trigger function which we have fetched with
|
||||
# schema name
|
||||
if 'rows' in result and len(result['rows']) > 0 and \
|
||||
'tfunctions' in result['rows'][0]:
|
||||
data['tfunction'] = result['rows'][0]['tfunctions']
|
||||
|
||||
if len(data['custom_tgargs']) > 1:
|
||||
# We know that trigger has more than 1 argument, let's join them
|
||||
# and convert it to string
|
||||
formatted_args = ["'{0}'".format(arg)
|
||||
for arg in data['custom_tgargs']]
|
||||
formatted_args = ', '.join(formatted_args)
|
||||
|
||||
data['tgargs'] = formatted_args
|
||||
|
||||
if len(data['tgattr']) >= 1:
|
||||
columns = ', '.join(data['tgattr'].split(' '))
|
||||
data['columns'] = get_column_details(conn, tid, columns)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_sql(conn, data, tid, trid, datlastsysoid,
|
||||
show_system_objects, template_path=None):
|
||||
"""
|
||||
This function will generate sql from model data.
|
||||
|
||||
:param conn: Connection Object
|
||||
:param data: Data
|
||||
:param tid: Table ID
|
||||
:param trid: Trigger ID
|
||||
:param datlastsysoid:
|
||||
:param show_system_objects: Show System Object value True or False
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
name = data['name'] if 'name' in data else None
|
||||
if trid is not None:
|
||||
sql = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=datlastsysoid)
|
||||
|
||||
status, res = conn.execute_dict(sql)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(_('Could not find the trigger in the table.'))
|
||||
|
||||
old_data = dict(res['rows'][0])
|
||||
# If name is not present in data then
|
||||
# we will fetch it from old data, we also need schema & table name
|
||||
if 'name' not in data:
|
||||
name = data['name'] = old_data['name']
|
||||
|
||||
old_data = get_trigger_function_and_columns(
|
||||
conn, old_data, tid, show_system_objects)
|
||||
|
||||
old_data = trigger_definition(old_data)
|
||||
|
||||
SQL = render_template(
|
||||
"/".join([template_path, 'update.sql']),
|
||||
data=data, o_data=old_data, conn=conn
|
||||
)
|
||||
else:
|
||||
required_args = {
|
||||
'name': 'Name',
|
||||
'tfunction': 'Trigger function'
|
||||
}
|
||||
|
||||
for arg in required_args:
|
||||
if arg not in data:
|
||||
return _('-- definition incomplete')
|
||||
|
||||
# If the request for new object which do not have did
|
||||
SQL = render_template("/".join([template_path, 'create.sql']),
|
||||
data=data, conn=conn)
|
||||
return SQL, name
|
||||
|
||||
|
||||
@get_template_path
|
||||
def get_reverse_engineered_sql(conn, schema, table, tid, trid,
|
||||
datlastsysoid, show_system_objects,
|
||||
template_path=None):
|
||||
"""
|
||||
This function will return reverse engineered sql for specified trigger.
|
||||
|
||||
:param conn: Connection Object
|
||||
:param schema: Schema
|
||||
:param table: Table
|
||||
:param tid: Table ID
|
||||
:param trid: Trigger ID
|
||||
:param datlastsysoid:
|
||||
:param show_system_objects: Show System Object value True or False
|
||||
:param template_path: Optional template path
|
||||
:return:
|
||||
"""
|
||||
SQL = render_template("/".join([template_path, 'properties.sql']),
|
||||
tid=tid, trid=trid,
|
||||
datlastsysoid=datlastsysoid)
|
||||
|
||||
status, res = conn.execute_dict(SQL)
|
||||
if not status:
|
||||
raise Exception(res)
|
||||
|
||||
if len(res['rows']) == 0:
|
||||
raise ObjectGone(_('Could not find the trigger in the table.'))
|
||||
|
||||
data = dict(res['rows'][0])
|
||||
|
||||
# Adding parent into data dict, will be using it while creating sql
|
||||
data['schema'] = schema
|
||||
data['table'] = table
|
||||
|
||||
data = \
|
||||
get_trigger_function_and_columns(conn, data, tid, show_system_objects)
|
||||
|
||||
data = trigger_definition(data)
|
||||
|
||||
SQL, name = get_sql(conn, data, tid, None, datlastsysoid,
|
||||
show_system_objects)
|
||||
|
||||
sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name'])
|
||||
|
||||
sql_header += render_template("/".join([template_path, 'delete.sql']),
|
||||
data=data, conn=conn)
|
||||
|
||||
SQL = sql_header + '\n\n' + SQL.strip('\n')
|
||||
|
||||
# If trigger is disabled then add sql code for the same
|
||||
if data['is_enable_trigger'] != 'O':
|
||||
SQL += '\n\n'
|
||||
SQL += render_template("/".join([template_path,
|
||||
'enable_disable_trigger.sql']),
|
||||
data=data, conn=conn)
|
||||
return SQL
|
File diff suppressed because it is too large
Load Diff
|
@ -12,7 +12,7 @@ from werkzeug.http import HTTP_STATUS_CODES
|
|||
from flask_babelex import gettext as _
|
||||
from flask import request
|
||||
|
||||
from pgadmin.utils.ajax import service_unavailable
|
||||
from pgadmin.utils.ajax import service_unavailable, gone
|
||||
|
||||
|
||||
class ConnectionLost(HTTPException):
|
||||
|
@ -105,3 +105,26 @@ class CryptKeyMissing(HTTPException):
|
|||
|
||||
def __repr__(self):
|
||||
return "Crypt key is missing."
|
||||
|
||||
|
||||
class ObjectGone(HTTPException):
|
||||
"""
|
||||
Exception
|
||||
"""
|
||||
|
||||
def __init__(self, error_msg):
|
||||
self.error_msg = error_msg
|
||||
HTTPException.__init__(self)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return HTTP_STATUS_CODES.get(410, 'Gone')
|
||||
|
||||
def get_response(self, environ=None):
|
||||
return gone(self.error_msg)
|
||||
|
||||
def __str__(self):
|
||||
return self.error_msg
|
||||
|
||||
def __repr__(self):
|
||||
return self.error_msg
|
||||
|
|
Loading…
Reference in New Issue