Implementation of Table, Column, Index, Trigger, Constraints and Rule Node.

- Table, Column, Index and Trigger (Author:- Murtuza Zabuawala)
 - Constraints Primary Key/Unique/Check/Foreign Key/Exclusion
     with integration into Table node (Author:- Harshal Dhumal)
 - Rule (Author:- Surinder Kumar)
 - Vacuum Control (Initial patch by Surinder Kumar and further enhancement by Murtuza)
pull/3/head
Akshay Joshi 2016-05-20 17:15:52 +05:30
parent 6895da9cbc
commit 4dbe058832
238 changed files with 18920 additions and 3 deletions

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,906 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
""" Implements Column Node """
from flask import render_template, make_response, request, jsonify
from flask.ext.babel import gettext
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.browser.server_groups.servers.databases.schemas.utils \
import DataTypeReader
from pgadmin.browser.collection import CollectionNodeModule
import pgadmin.browser.server_groups.servers.databases as database
from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
parse_priv_to_db
from functools import wraps
import json
class ColumnsModule(CollectionNodeModule):
"""
class ColumnsModule(CollectionNodeModule)
A module class for Column node derived from CollectionNodeModule.
Methods:
-------
* __init__(*args, **kwargs)
- Method is used to initialize the Column and it's base module.
* get_nodes(gid, sid, did, scid, tid)
- Method is used to generate the browser collection node.
* node_inode()
- Method is overridden from its base class to make the node as leaf node.
* script_load()
- Load the module script for schema, when any of the server node is
initialized.
"""
NODE_TYPE = 'column'
COLLECTION_LABEL = gettext("Columns")
def __init__(self, *args, **kwargs):
"""
Method is used to initialize the ColumnModule and it's base module.
Args:
*args:
**kwargs:
"""
self.min_ver = None
self.max_ver = None
super(ColumnsModule, self).__init__(*args, **kwargs)
def get_nodes(self, gid, sid, did, scid, **kwargs):
"""
Generate the collection node
"""
assert('tid' in kwargs or 'vid' in kwargs)
yield self.generate_browser_collection_node(
kwargs['tid'] if 'tid' in kwargs else kwargs['vid']
)
@property
def script_load(self):
"""
Load the module script for server, when any of the server-group node is
initialized.
"""
return database.DatabaseModule.NODE_TYPE
@property
def node_inode(self):
"""
Load the module node as a leaf node
"""
return False
blueprint = ColumnsModule(__name__)
class ColumnsView(PGChildNodeView, DataTypeReader):
"""
This class is responsible for generating routes for Column node
Methods:
-------
* __init__(**kwargs)
- Method is used to initialize the ColumnView and it's base view.
* module_js()
- This property defines (if javascript) exists for this node.
Override this property for your own logic
* check_precondition()
- This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
* list()
- This function is used to list all the Column nodes within that
collection.
* nodes()
- This function will used to create all the child node within that
collection, Here it will create all the Column node.
* properties(gid, sid, did, scid, tid, clid)
- This function will show the properties of the selected Column node
* create(gid, sid, did, scid, tid)
- This function will create the new Column object
* update(gid, sid, did, scid, tid, clid)
- This function will update the data for the selected Column node
* delete(self, gid, sid, scid, tid, clid):
- This function will drop the Column object
* msql(gid, sid, did, scid, tid, clid)
- This function is used to return modified SQL for the selected
Column node
* get_sql(data, scid, tid)
- This function will generate sql from model data
* sql(gid, sid, did, scid):
- This function will generate sql to show it in sql pane for the
selected Column node.
* dependency(gid, sid, did, scid):
- This function will generate dependency list show it in dependency
pane for the selected Column node.
* dependent(gid, sid, did, scid):
- This function will generate dependent list to show it in dependent
pane for the selected Column node.
"""
node_type = blueprint.node_type
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [
# Here we specify type as any because table
# are also has '-' in them if they are system table
{'type': 'string', 'id': 'clid'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'children': [{'get': 'children'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'module.js': [{}, {}, {'get': 'module_js'}],
})
def check_precondition(f):
"""
This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
"""
@wraps(f)
def wrap(*args, **kwargs):
# Here args[0] will hold self & kwargs will hold gid,sid,did
self = args[0]
driver = get_driver(PG_DEFAULT_DRIVER)
self.manager = driver.connection_manager(
kwargs['sid']
)
self.conn = self.manager.connection(did=kwargs['did'])
self.qtIdent = driver.qtIdent
# If DB not connected then return error to browser
if not self.conn.connected():
return precondition_required(
gettext(
"Connection to the server has been lost!"
)
)
ver = self.manager.version
# we will set template path for sql scripts
if ver >= 90200:
self.template_path = 'column/sql/9.2_plus'
else:
self.template_path = 'column/sql/9.1_plus'
# Allowed ACL for column 'Select/Update/Insert/References'
self.acl = ['a', 'r', 'w', 'x']
# We need parent's name eg table name and schema name
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=kwargs['tid'])
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
self.schema = row['schema']
self.table = row['table']
return f(*args, **kwargs)
return wrap
@check_precondition
def list(self, gid, sid, did, scid, tid):
"""
This function is used to list all the schema nodes within that collection.
Args:
gid: Server group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
Returns:
JSON of available column nodes
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']), tid=tid,
show_sys_objects=self.blueprint.show_system_objects)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return ajax_response(
response=res['rows'],
status=200
)
@check_precondition
def nodes(self, gid, sid, did, scid, tid):
"""
This function will used to create all the child node within that collection.
Here it will create all the schema node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
Returns:
JSON of available schema child nodes
"""
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']), tid=tid,
show_sys_objects=self.blueprint.show_system_objects)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon="icon-column",
datatype=row['datatype'] # We need datatype somewhere in
)) # exclusion constraint.
return make_json_response(
data=res,
status=200
)
def _formatter(self, scid, tid, clid, data):
"""
Args:
scid: schema oid
tid: table oid
clid: position of column in table
data: dict of query result
Returns:
It will return formatted output of collections
"""
# To check if column is primary key
if 'attnum' in data and 'indkey' in data:
# Current column
attnum = str(data['attnum'])
# Single/List of primary key column(s)
indkey = str(data['indkey'])
# We will check if column is in primary column(s)
if attnum in indkey.split(" "):
data['is_pk'] = True
else:
data['is_pk'] = False
# Find length & precision of column data type
fulltype = self.get_full_type(
data['typnspname'], data['typname'],
data['isdup'], data['attndims'], data['atttypmod']
)
import re
# If we have length & precision both
matchObj = re.search(r'(\d+),(\d+)', fulltype)
if matchObj:
data['attlen'] = matchObj.group(1)
data['attprecision'] = matchObj.group(2)
else:
# If we have length only
matchObj = re.search(r'(\d+)', fulltype)
if matchObj:
data['attlen'] = matchObj.group(1)
data['attprecision'] = None
else:
data['attlen'] = None
data['attprecision'] = None
# We need to fetch inherited tables for each table
SQL = render_template("/".join([self.template_path,
'get_inherited_tables.sql']),
tid=tid)
status, inh_res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=inh_res)
for row in inh_res['rows']:
if row['attrname'] == data['name']:
data['is_inherited'] = True
data['tbls_inherited'] = row['inhrelname']
# We need to format variables according to client js collection
if 'attoptions' in data and data['attoptions'] is not None:
spcoptions = []
for spcoption in data['attoptions']:
k, v = spcoption.split('=')
spcoptions.append({'name': k, 'value': v})
data['attoptions'] = spcoptions
# Need to format security labels according to client js collection
if 'seclabels' in data and data['seclabels'] is not None:
seclabels = []
for seclbls in data['seclabels']:
k, v = seclbls.split('=')
seclabels.append({'provider': k, 'label': v})
data['seclabels'] = seclabels
# We need to parse & convert ACL coming from database to json format
SQL = render_template("/".join([self.template_path, 'acl.sql']),
tid=tid, clid=clid)
status, acl = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=acl)
# We will set get privileges from acl sql so we don't need
# it from properties sql
data['attacl'] = []
for row in acl['rows']:
priv = parse_priv_from_db(row)
data.setdefault(row['deftype'], []).append(priv)
# we are receiving request when in edit mode
# we will send filtered types related to current type
present_type = data['cltype']
type_id = data['atttypid']
SQL = render_template("/".join([self.template_path,
'is_referenced.sql']),
tid=tid, clid=clid)
status, is_reference = self.conn.execute_scalar(SQL)
edit_types_list = list()
# We will need present type in edit mode
edit_types_list.append(present_type)
if int(is_reference) == 0:
SQL = render_template("/".join([self.template_path,
'edit_mode_types.sql']),
type_id=type_id)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
edit_types_list.append(row['typname'])
else:
edit_types_list.append(present_type)
data['edit_types'] = edit_types_list
# Manual Data type formatting
# If data type has () with them then we need to remove them
# eg bit(1) because we need to match the name with combobox
isArray = False
if data['cltype'].endswith('[]'):
isArray = True
data['cltype'] = data['cltype'].rstrip('[]')
idx = data['cltype'].find('(')
if idx and data['cltype'].endswith(')'):
data['cltype'] = data['cltype'][:idx]
if isArray:
data['cltype'] += "[]"
return data
@check_precondition
def properties(self, gid, sid, did, scid, tid, clid):
"""
This function will show the properties of the selected schema node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
scid: Schema ID
tid: Table ID
clid: Column ID
Returns:
JSON of selected schema node
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']), tid=tid, clid=clid
, show_sys_objects=self.blueprint.show_system_objects)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
# Making copy of output for future use
data = dict(res['rows'][0])
data = self._formatter(scid, tid, clid, data)
return ajax_response(
response=data,
status=200
)
def _cltype_formatter(self, type):
"""
Args:
data: Type string
Returns:
We need to remove [] from type and append it
after length/precision so we will set flag for
sql template
"""
if '[]' in type:
type = type.replace('[]', '')
self.hasSqrBracket = True
else:
self.hasSqrBracket = False
return type
@check_precondition
def create(self, gid, sid, did, scid, tid):
"""
This function will creates new the schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
"""
data = request.form if request.form else json.loads(
request.data.decode()
)
for k, v in data.items():
try:
data[k] = json.loads(v)
except (ValueError, TypeError):
data[k] = v
required_args = {
'name': 'Name',
'cltype': 'Type'
}
for arg in required_args:
if arg not in data:
return make_json_response(
status=410,
success=0,
errormsg=gettext(
"Couldn't find the required parameter (%s)." %
required_args[arg]
)
)
# Parse privilege data coming from client according to database format
if 'attacl' in data:
data['attacl'] = parse_priv_to_db(data['attacl'], self.acl)
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
# check type for '[]' in it
data['cltype'] = self._cltype_formatter(data['cltype'])
data['hasSqrBracket'] = self.hasSqrBracket
try:
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data, conn=self.conn)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
# we need oid to to add object in tree at browser
SQL = render_template("/".join([self.template_path,
'get_position.sql']),
tid=tid, data=data)
status, clid = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=tid)
return jsonify(
node=self.blueprint.generate_browser_node(
clid,
scid,
data['name'],
icon="icon-column"
)
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, clid):
"""
This function will updates existing the schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
clid: Column ID
"""
# We will first fetch the column name for current request
# so that we create template for dropping column
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']), tid=tid, clid=clid
, show_sys_objects=self.blueprint.show_system_objects)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = dict(res['rows'][0])
# We will add table & schema as well
data['schema'] = self.schema
data['table'] = self.table
SQL = render_template("/".join([self.template_path,
'delete.sql']),
data=data, conn=self.conn)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=gettext("Column is dropped"),
data={
'id': clid,
'tid': tid
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def update(self, gid, sid, did, scid, tid, clid):
"""
This function will updates existing the schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
clid: Column ID
"""
data = request.form if request.form else json.loads(request.data.decode())
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
# check type for '[]' in it
if 'cltype' in data:
data['cltype'] = self._cltype_formatter(data['cltype'])
data['hasSqrBracket'] = self.hasSqrBracket
try:
SQL = self.get_sql(scid, tid, clid, data)
if SQL and SQL.strip('\n') and SQL.strip(' '):
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info="Column updated",
data={
'id': clid,
'tid': tid,
'scid': scid
}
)
else:
return make_json_response(
success=1,
info="Nothing to update",
data={
'id': clid,
'tid': tid,
'scid': scid
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def msql(self, gid, sid, did, scid, tid, clid=None):
"""
This function will generates modified sql for schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
clid: Column ID (When working with existing column)
"""
data = dict()
for k, v in request.args.items():
try:
data[k] = json.loads(v)
except ValueError:
data[k] = v
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
# check type for '[]' in it
if 'cltype' in data:
data['cltype'] = self._cltype_formatter(data['cltype'])
data['hasSqrBracket'] = self.hasSqrBracket
try:
SQL = self.get_sql(scid, tid, clid, data)
if SQL and SQL.strip('\n') and SQL.strip(' '):
return make_json_response(
data=SQL,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, scid, tid, clid, data):
"""
This function will genrate sql from model data
"""
if clid is not None:
SQL = render_template("/".join([self.template_path,
'properties.sql']), tid=tid, clid=clid
, show_sys_objects=self.blueprint.show_system_objects)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
old_data = dict(res['rows'][0])
# We will add table & schema as well
old_data = self._formatter(scid, tid, clid, old_data)
# If name is not present in data then
# we will fetch it from old data, we also need schema & table name
if 'name' not in data:
data['name'] = old_data['name']
# Convert acl coming from client in db parsing format
key = 'attacl'
if key in data and data[key] is not None:
if 'added' in data[key]:
data[key]['added'] = parse_priv_to_db(
data[key]['added'], self.acl
)
if 'changed' in data[key]:
data[key]['changed'] = parse_priv_to_db(
data[key]['changed'], self.acl
)
if 'deleted' in data[key]:
data[key]['deleted'] = parse_priv_to_db(
data[key]['deleted'], self.acl
)
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data, conn=self.conn
)
else:
required_args = [
'name',
'cltype'
]
for arg in required_args:
if arg not in data:
return gettext('-- incomplete definition')
# We will convert privileges coming from client required
# in server side format
if 'attacl' in data:
data['attacl'] = parse_priv_to_db(data['attacl'],
self.acl)
# If the request for new object which do not have did
SQL = render_template("/".join([self.template_path, 'create.sql']),
data=data, conn=self.conn)
return SQL
@check_precondition
def sql(self, gid, sid, did, scid, tid, clid):
"""
This function will generates reverse engineered sql for schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
clid: Column ID
"""
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']), tid=tid, clid=clid
, show_sys_objects=self.blueprint.show_system_objects)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = dict(res['rows'][0])
# We do not want to display length as -1 in create query
if 'attlen' in data and data['attlen'] == -1:
data['attlen'] = ''
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
# check type for '[]' in it
if 'cltype' in data:
data['cltype'] = self._cltype_formatter(data['cltype'])
data['hasSqrBracket'] = self.hasSqrBracket
# We will add table & schema as well
data = self._formatter(scid, tid, clid, data)
SQL = self.get_sql(scid, tid, None, data)
sql_header = "-- Column: {0}\n\n-- ".format(self.qtIdent(self.conn,
data['schema'],
data['table'],
data['name']))
sql_header += render_template("/".join([self.template_path,
'delete.sql']),
data=data, conn=self.conn)
SQL = sql_header + '\n\n' + SQL
return SQL
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def dependents(self, gid, sid, did, scid, tid, clid):
"""
This function get the dependents and return ajax response
for the column node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
clid: Column ID
"""
# Specific condition for column which we need to append
where = "WHERE dep.refobjid={0}::OID AND dep.refobjsubid={1}".format(
tid, clid
)
dependents_result = self.get_dependents(
self.conn, clid, where=where
)
# Specific sql to run againt column to fetch dependents
SQL = render_template("/".join([self.template_path,
'depend.sql']), where=where)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in res['rows']:
ref_name = row['refname']
if ref_name is None:
continue
dep_type = ''
dep_str = row['deptype']
if dep_str == 'a':
dep_type = 'auto'
elif dep_str == 'n':
dep_type = 'normal'
elif dep_str == 'i':
dep_type = 'internal'
dependents_result.append({'type': 'sequence', 'name': ref_name, 'field': dep_type})
return ajax_response(
response=dependents_result,
status=200
)
@check_precondition
def dependencies(self, gid, sid, did, scid, tid, clid):
"""
This function get the dependencies and return ajax response
for the column node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
clid: Column ID
"""
# Specific condition for column which we need to append
dependencies_result = self.get_dependencies(
self.conn, clid
)
return ajax_response(
response=dependencies_result,
status=200
)
ColumnsView.register_node_view(blueprint)

Binary file not shown.

After

Width:  |  Height:  |  Size: 400 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 435 B

View File

@ -0,0 +1,586 @@
define(
['jquery', 'underscore', 'underscore.string', 'pgadmin', 'pgadmin.browser',
'backform', 'alertify', 'pgadmin.browser.collection'],
function($, _, S, pgAdmin, pgBrowser, Backform, alertify) {
if (!pgBrowser.Nodes['coll-column']) {
var databases = pgAdmin.Browser.Nodes['coll-column'] =
pgAdmin.Browser.Collection.extend({
node: 'column',
label: '{{ _('Columns') }}',
type: 'coll-column',
sqlAlterHelp: 'sql-altertable.html',
sqlCreateHelp: 'sql-altertable.html',
columns: ['name', 'atttypid', 'description']
});
};
// Switch Cell for Primary Key selection
var SwitchDepCell = Backgrid.BooleanCell.extend({
initialize: function() {
Backgrid.BooleanCell.prototype.initialize.apply(this, arguments);
Backgrid.Extension.DependentCell.prototype.initialize.apply(this, arguments);
},
dependentChanged: function () {
var model = this.model,
column = this.column,
editable = this.column.get("editable"),
input = this.$el.find('input[type=checkbox]').first();
is_editable = _.isFunction(editable) ? !!editable.apply(column, [model]) : !!editable;
if (is_editable) {
this.$el.addClass("editable");
input.prop('disabled', false);
} else {
this.$el.removeClass("editable");
input.prop('disabled', true);
}
this.delegateEvents();
return this;
},
remove: Backgrid.Extension.DependentCell.prototype.remove
});
// This Node model will be used for variable control for column
var VariablesModel = Backform.VariablesModel = pgAdmin.Browser.Node.Model.extend({
defaults: {
name: null,
value: null
},
schema: [{
id: 'name', label: '{{ _('Name') }}', cell: 'select2',
type: 'text', disabled: false, node: 'column',
options: [['n_distinct', 'n_distinct'],
['n_distinct_inherited','n_distinct_inherited']],
select2: {placeholder: "Select variable"},
cellHeaderClasses:'width_percent_50'
},{
id: 'value', label: '{{ _('Value') }}',
type: 'text', disabled: false,
cellHeaderClasses:'width_percent_50'
}],
validate: function() {
var err = {},
errmsg = null;
if (_.isUndefined(this.get('value')) ||
_.isNull(this.get('value')) ||
String(this.get('value')).replace(/^\s+|\s+$/g, '') == '') {
errmsg = '{{ _('Please provide input for variable.')}}';
this.errorModel.set('value', errmsg);
return errmsg;
} else {
this.errorModel.unset('value');
}
return null;
}
});
if (!pgBrowser.Nodes['column']) {
pgAdmin.Browser.Nodes['column'] = pgAdmin.Browser.Node.extend({
parent_type: ['table', 'view', 'mview'],
collection_type: ['coll-table', 'coll-view', 'coll-mview'],
type: 'column',
label: '{{ _('Column') }}',
hasSQL: true,
canDrop: function(itemData, item, data){
if (pgBrowser.Nodes['schema'].canChildDrop.apply(this, [itemData, item, data])) {
var t = pgBrowser.tree, i = item, d = itemData, parents = [];
// To iterate over tree to check parent node
while (i) {
parents.push(d._type);
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// Check if menu is allowed ?
if(_.indexOf(parents, 'catalog') > -1 ||
_.indexOf(parents, 'view') > -1 ||
_.indexOf(parents, 'mview') > -1) {
return false;
} else if(_.indexOf(parents, 'table') > -1) {
return true;
}
} else {
return false;
}
},
hasDepends: true,
Init: function() {
/* Avoid mulitple registration of menus */
if (this.initialized)
return;
this.initialized = true;
pgBrowser.add_menus([{
name: 'create_column_on_coll', node: 'coll-column', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Column...') }}',
icon: 'wcTabIcon icon-column', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_column', node: 'column', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Column...') }}',
icon: 'wcTabIcon icon-column', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_column_onTable', node: 'table', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Column...') }}',
icon: 'wcTabIcon icon-column', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_column_onView', node: 'view', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Column...') }}',
icon: 'wcTabIcon icon-column', data: {action: 'create', check: true},
enable: 'canCreate'
}
]);
},
model: pgAdmin.Browser.Node.Model.extend({
defaults: {
name: undefined,
attowner: undefined,
atttypid: undefined,
attnum: undefined,
cltype: undefined,
collspcname: undefined,
attacl: undefined,
description: undefined,
parent_tbl: undefined,
min_val: undefined,
max_val: undefined,
edit_types: undefined,
is_primary_key: false,
inheritedfrom: undefined,
attstattarget:undefined
},
schema: [{
id: 'name', label: '{{ _('Name') }}', cell: 'string',
type: 'text', disabled: 'inSchemaWithColumnCheck',
cellHeaderClasses:'width_percent_30',
editable: 'editable_check_for_table'
},{
// Need to show this field only when creating new table [in SubNode control]
id: 'is_primary_key', label: '{{ _('Is primary key?') }}', cell: SwitchDepCell,
type: 'switch', deps:['name'],
options: {
onText: 'Yes', offText: 'No', onColor: 'success',
offColor: 'primary', size: 'small'},
cellHeaderClasses:'width_percent_5',
visible: function(m) {
return _.isUndefined(m.top.node_info['table'] || m.top.node_info['view'] || m.top.node_info['mview']);
},
disabled: function(m){
// If primary key already exist then disable.
if (m.top && !_.isUndefined(m.top.get('oid')) &&
m.top.get('primary_key').length > 0 &&
!_.isUndefined(m.top.get('primary_key').first().get('oid'))) {
return true;
}
var name = m.get('name');
if(!m.inSchemaWithColumnCheck.apply(this, [m]) &&
(_.isUndefined(name) || _.isNull(name) || name == '')) {
return true;
}
return false;
},
editable: function(m){
var name = m.get('name');
// If HeaderCell then allow True
if(m instanceof Backbone.Collection) {
return true;
}
// If primary key already exist then disable.
if (m.top && !_.isUndefined(m.top.get('oid')) &&
m.top.get('primary_key').length > 0 &&
!_.isUndefined(m.top.get('primary_key').first().get('oid'))) {
return false;
}
if(!m.inSchemaWithColumnCheck.apply(this, [m]) &&
!_.isUndefined(name) && !_.isNull(name) && name !== '') {
return true;
}
return false;
// Set to false if no condition is met
m.set('is_primary_key', false);
}
},{
id: 'attnum', label:'{{ _('Position') }}', cell: 'string',
type: 'text', disabled: 'inSchema', mode: ['properties']
},{
id: 'cltype', label:'{{ _('Data type') }}', cell: 'node-ajax-options',
type: 'text', disabled: 'inSchemaWithColumnCheck',
control: 'node-ajax-options', url: 'get_types', node: 'table',
cellHeaderClasses:'width_percent_30', first_empty: true,
select2: { allowClear: false }, group: '{{ _('Definition') }}',
transform: function(data, cell) {
/* 'transform' function will be called by control, and cell both.
* The way, we use the transform in cell, and control is different.
* Because - options are shared using 'column' object in backgrid,
* hence - the cell is passed as second parameter, while the control
* uses (this) as a object.
*/
var control = cell || this,
m = control.model;
/* We need different data in create mode & in edit mode
* if we are in create mode then return data as it is
* if we are in edit mode then we need to filter data
*/
control.model.datatypes = data;
var edit_types = m.get('edit_types'),
result = [];
// If called from Table, We will check if in edit mode
// then send edit_types only
if( !_.isUndefined(m.top) && !m.top.isNew() ) {
_.each(data, function(t) {
if (_.indexOf(edit_types, t.value) != -1) {
result.push(t);
}
});
// There may be case that user adds new column in existing collection
// we will not have edit types then
return result.length > 0 ? result : data;
}
// If called from Column
if(m.isNew()) {
return data;
} else {
//edit mode
_.each(data, function(t) {
if (_.indexOf(edit_types, t.value) != -1) {
result.push(t);
}
});
return result;
}
},
editable: 'editable_check_for_table'
},{
// Need to show this field only when creating new table [in SubNode control]
id: 'inheritedfrom', label: '{{ _('Inherited from table') }}',
type: 'text', disabled: true, editable: false,
cellHeaderClasses:'width_percent_30',
visible: function(m) {
return _.isUndefined(m.top.node_info['table'] || m.top.node_info['view'] || m.top.node_info['mview']);
}
},{
id: 'attlen', label:'{{ _('Length') }}', cell: 'string',
deps: ['cltype'], type: 'int', group: '{{ _('Definition') }}',
disabled: function(m) {
var of_type = m.get('cltype'),
flag = true;
_.each(m.datatypes, function(o) {
if ( of_type == o.value ) {
if(o.length)
{
m.set('min_val', o.min_val, {silent: true});
m.set('max_val', o.max_val, {silent: true});
flag = false;
}
}
});
flag && setTimeout(function() {
m.set('attlen', null);
},10);
return flag;
}
},{
id: 'attprecision', label:'{{ _('Precision') }}', cell: 'string',
deps: ['cltype'], type: 'int', group: '{{ _('Definition') }}',
disabled: function(m) {
var of_type = m.get('cltype'),
flag = true;
_.each(m.datatypes, function(o) {
if ( of_type == o.value ) {
if(o.precision)
{
m.set('min_val', o.min_val, {silent: true});
m.set('max_val', o.max_val, {silent: true});
flag = false;
}
}
});
flag && setTimeout(function() {
m.set('attprecision', null);
},10);
return flag;
}
},{
id: 'collspcname', label:'{{ _('Collation') }}', cell: 'string',
type: 'text', control: 'node-ajax-options', url: 'get_collations',
group: '{{ _('Definition') }}', node: 'collation',
deps: ['cltype'], disabled: function(m) {
var of_type = m.get('cltype'),
flag = true;
_.each(m.datatypes, function(o) {
if ( of_type == o.value ) {
if(o.is_collatable)
{
flag = false;
}
}
});
if (flag) {
setTimeout(function(){
m.set('collspcname', "");
}, 10);
}
return flag;
}
},{
id: 'defval', label:'{{ _('Default Value') }}', cell: 'string',
type: 'text', disabled: 'inSchemaWithColumnCheck',
group: '{{ _('Definition') }}'
},{
id: 'attnotnull', label:'{{ _('Not NULL?') }}', cell: 'string',
type: 'switch', disabled: 'inSchemaWithColumnCheck',
group: '{{ _('Definition') }}'
},{
id: 'attstattarget', label:'{{ _('Statistics') }}', cell: 'string',
type: 'text', disabled: 'inSchemaWithColumnCheck', mode: ['properties', 'edit'],
group: '{{ _('Definition') }}'
},{
id: 'attstorage', label:'{{ _('Storage') }}', group: '{{ _('Definition') }}',
type: 'text', mode: ['properties', 'edit'],
cell: 'string', disabled: 'inSchemaWithColumnCheck', first_empty: true,
control: 'select2', select2: { placeholder: "Select storage",
allowClear: false,
width: "100%"
},
options: [
{label: "PLAIN", value: "p"},
{label: "MAIN", value: "m"},
{label: "EXTERNAL", value: "e"},
{label: "EXTENDED", value: "x"},
]
},{
id: 'is_pk', label:'{{ _('Primary key?') }}',
type: 'switch', disabled: true, mode: ['properties']
},{
id: 'is_fk', label:'{{ _('Foreign key?') }}',
type: 'switch', disabled: true, mode: ['properties']
},{
id: 'is_inherited', label:'{{ _('Inherited?') }}',
type: 'switch', disabled: true, mode: ['properties']
},{
id: 'tbls_inherited', label:'{{ _('Inherited from table(s)') }}',
type: 'text', disabled: true, mode: ['properties'], deps: ['is_inherited'],
visible: function(m) {
if (!_.isUndefined(m.get('is_inherited')) && m.get('is_inherited')) {
return true;
} else {
return false;
}
}
},{
id: 'is_sys_column', label:'{{ _('System Column?') }}', cell: 'string',
type: 'switch', disabled: true, mode: ['properties']
},{
id: 'description', label:'{{ _('Comment') }}', cell: 'string',
type: 'multiline', mode: ['properties', 'create', 'edit'],
disabled: 'inSchema'
},{
id: 'attacl', label: 'Privileges', type: 'collection',
group: '{{ _('Security') }}', control: 'unique-col-collection',
model: pgAdmin.Browser.Node.PrivilegeRoleModel.extend({
privileges: ['a','r','w','x']}),
mode: ['edit'], canAdd: true, canDelete: true,
uniqueCol : ['grantee']
},{
id: 'attoptions', label: 'Variables', type: 'collection',
group: '{{ _('Security') }}', control: 'unique-col-collection',
model: VariablesModel, uniqueCol : ['name'],
mode: ['edit', 'create'], canAdd: true, canEdit: false,
canDelete: true
},{
id: 'seclabels', label: '{{ _('Security Labels') }}',
model: pgAdmin.Browser.SecurityModel,
editable: false, type: 'collection',
group: '{{ _('Security') }}', mode: ['edit', 'create'],
min_version: 90100, canAdd: true,
canEdit: false, canDelete: true, control: 'unique-col-collection'
}
],
validate: function(keys) {
var err = {},
changedAttrs = this.changed,
msg = undefined;
// Nothing to validate
if (keys && keys.length == 0) {
this.errorModel.clear();
return null;
} else {
this.errorModel.clear();
}
if (_.isUndefined(this.get('name'))
|| String(this.get('name')).replace(/^\s+|\s+$/g, '') == '') {
msg = '{{ _('Column name can not be empty.') }}';
this.errorModel.set('name', msg);
return msg;
}
if (_.isUndefined(this.get('cltype'))
|| String(this.get('cltype')).replace(/^\s+|\s+$/g, '') == '') {
msg = '{{ _('Column type can not be empty.') }}';
this.errorModel.set('cltype', msg);
return msg;
}
if (!_.isUndefined(this.get('cltype'))
&& !_.isUndefined(this.get('attlen'))
&& !_.isNull(this.get('attlen'))
&& this.get('attlen') !== '') {
// Validation for Length field
if (this.get('attlen') < this.get('min_val'))
msg = '{{ _('Length should not be less than: ') }}' + this.get('min_val');
if (this.get('attlen') > this.get('max_val'))
msg = '{{ _('Length should not be greater than: ') }}' + this.get('max_val');
// If we have any error set then throw it to user
if(msg) {
this.errorModel.set('attlen', msg)
return msg;
}
}
if (!_.isUndefined(this.get('cltype'))
&& !_.isUndefined(this.get('attprecision'))
&& !_.isNull(this.get('attprecision'))
&& this.get('attprecision') !== '') {
// Validation for precision field
if (this.get('attprecision') < this.get('min_val'))
msg = '{{ _('Precision should not be less than: ') }}' + this.get('min_val');
if (this.get('attprecision') > this.get('max_val'))
msg = '{{ _('Precision should not be greater than: ') }}' + this.get('max_val');
// If we have any error set then throw it to user
if(msg) {
this.errorModel.set('attprecision', msg)
return msg;
}
}
return null;
},
isInhertedColumn: function() {
},
// We will check if we are under schema node & in 'create' mode
inSchema: function() {
if(this.node_info && 'catalog' in this.node_info)
{
return true;
}
return false;
},
// We will check if we are under schema node & in 'create' mode
inSchemaWithModelCheck: function(m) {
if(this.node_info && 'schema' in this.node_info)
{
// We will disable control if it's in 'edit' mode
if (m.isNew()) {
return false;
} else {
return true;
}
}
return true;
},
// Checks weather to enable/disable control
inSchemaWithColumnCheck: function(m) {
var node_info = this.node_info || m.node_info || m.top.node_info;
// disable all fields if column is listed under view or mview
if ('view' in node_info || 'mview' in node_info) {
if (this && _.has(this, 'name') && (this.name != 'defval')) {
return true;
}
}
if(node_info && 'schema' in node_info)
{
// We will disable control if it's system columns
// inheritedfrom check is useful when we use this schema in table node
// inheritedfrom has value then we should disable it
if(!_.isUndefined(m.get('inheritedfrom'))) {
return true;
}
// ie: it's position is less then 1
if (m.isNew()) {
return false;
}
// if we are in edit mode
if (!_.isUndefined(m.get('attnum')) && m.get('attnum') > 0 ) {
return false;
} else {
return true;
}
}
return true;
},
editable_check_for_table: function(arg) {
if (arg instanceof Backbone.Collection) {
return !arg.model.prototype.inSchemaWithColumnCheck.apply(
this, [arg.top]
);
} else {
return !arg.inSchemaWithColumnCheck.apply(
this, [arg]
);
}
}
}),
// Below function will enable right click menu for creating column
canCreate: function(itemData, item, data) {
// If check is false then , we will allow create menu
if (data && data.check == false)
return true;
var t = pgBrowser.tree, i = item, d = itemData, parents = [];
// To iterate over tree to check parent node
while (i) {
// If it is schema then allow user to create table
if (_.indexOf(['schema'], d._type) > -1) {
return true;
}
else if (_.indexOf(['view', 'coll-view',
'mview',
'coll-mview'], d._type) > -1) {
parents.push(d._type);
break;
}
parents.push(d._type);
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// If node is under catalog then do not allow 'create' menu
if (_.indexOf(parents, 'catalog') > -1 ||
_.indexOf(parents, 'coll-view') > -1 ||
_.indexOf(parents, 'coll-mview') > -1 ||
_.indexOf(parents, 'mview') > -1 ||
_.indexOf(parents, 'view') > -1) {
return false;
} else {
return true;
}
}
});
}
return pgBrowser.Nodes['column'];
});

View File

@ -0,0 +1,13 @@
{% macro APPLY(conn, schema_name, table_object, column_object, role, privs, with_grant_privs) -%}
{% if privs %}
GRANT {% for p in privs %}{% if loop.index != 1 %}, {% endif %}{{p}}({{conn|qtIdent(column_object)}}){% endfor %}
ON {{ conn|qtIdent(schema_name, table_object) }} TO {{ conn|qtIdent(role) }};
{% endif %}
{% if with_grant_privs %}
GRANT {% for p in with_grant_privs %}{% if loop.index != 1 %}, {% endif %}{{p}}({{conn|qtIdent(column_object)}}){% endfor %}
ON {{ conn|qtIdent(schema_name, table_object) }} TO {{ conn|qtIdent(role) }} WITH GRANT OPTION;
{% endif %}
{%- endmacro %}
{% macro RESETALL(conn, schema_name, table_object, column_object, role) -%}
REVOKE ALL({{ conn|qtIdent(column_object) }}) ON {{ conn|qtIdent(schema_name, table_object) }} FROM {{ conn|qtIdent(role) }};
{%- endmacro %}

View File

@ -0,0 +1,6 @@
{% macro APPLY(conn, type, schema_name, parent_object, child_object, provider, label) -%}
SECURITY LABEL FOR {{ conn|qtIdent(provider) }} ON {{ type }} {{ conn|qtIdent(schema_name, parent_object, child_object) }} IS {{ label|qtLiteral }};
{%- endmacro %}
{% macro DROP(conn, type, schema_name, parent_object, child_object, provider) -%}
SECURITY LABEL FOR {{ conn|qtIdent(provider) }} ON {{ type }} {{ conn|qtIdent(schema_name, parent_object, child_object) }} IS NULL;
{%- endmacro %}

View File

@ -0,0 +1,133 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Implements Constraint Node"""
from flask.ext.babel import gettext
from flask import render_template, make_response
from pgadmin.browser.collection import CollectionNodeModule
import pgadmin.browser.server_groups.servers.databases as database
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response
from .type import ConstraintRegistry
class ConstraintsModule(CollectionNodeModule):
"""
class ConstraintsModule(CollectionNodeModule)
A module class for Constraint node derived from CollectionNodeModule.
Methods:
-------
* __init__(*args, **kwargs)
- Method is used to initialize the ConstraintsModule and it's base module.
* get_nodes(gid, sid, did)
- Method is used to generate the browser collection node.
* node_inode()
- Method is overridden from its base class to make the node as leaf node.
* script_load()
- Load the module script for constraint node, when any of the database node is
initialized.
"""
NODE_TYPE = 'constraints'
COLLECTION_LABEL = gettext("Constraints")
def __init__(self, *args, **kwargs):
self.min_ver = None
self.max_ver = None
super(ConstraintsModule, self).__init__(*args, **kwargs)
def get_nodes(self, gid, sid, did, scid, tid):
"""
Generate the collection node
"""
yield self.generate_browser_collection_node(tid)
@property
def script_load(self):
"""
Load the module script for constraints, when any of the table node is
initialized.
"""
return database.DatabaseModule.NODE_TYPE
blueprint = ConstraintsModule(__name__)
@blueprint.route('/nodes/<int:gid>/<int:sid>/<int:did>/<int:scid>/<int:tid>/')
def nodes(**kwargs):
"""
Returns all constraint as a tree node.
Args:
**kwargs:
Returns:
"""
cmd = {"cmd": "nodes"}
res = []
for name in ConstraintRegistry.registry:
module = (ConstraintRegistry.registry[name])['nodeview']
view = module(**cmd)
res = res + view.get_nodes(**kwargs)
return make_json_response(
data=res,
status=200
)
@blueprint.route('/obj/<int:gid>/<int:sid>/<int:did>/<int:scid>/<int:tid>/')
def proplist(**kwargs):
"""
Returns all constraint with properties.
Args:
**kwargs:
Returns:
"""
cmd = {"cmd": "obj"}
res = []
for name in ConstraintRegistry.registry:
module = (ConstraintRegistry.registry[name])['nodeview']
view = module(**cmd)
res = res + view.get_node_list(**kwargs)
return ajax_response(
response=res,
status=200
)
@blueprint.route('/module.js')
def module_js():
"""
This property defines whether javascript exists for this node.
"""
return make_response(
render_template(
"constraints/js/constraints.js",
_=gettext,
constraints=[
(ConstraintRegistry.registry[n])['blueprint'].NODE_TYPE \
for n in ConstraintRegistry.registry
]
),
200, {'Content-Type': 'application/x-javascript'}
)

View File

@ -0,0 +1,833 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Implements the Check Constraint Module."""
import json
from flask import render_template, make_response, request, jsonify
from flask.ext.babel import gettext as _
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.browser.collection import CollectionNodeModule
import pgadmin.browser.server_groups.servers.databases as database
from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from functools import wraps
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
import ConstraintRegistry, ConstraintTypeModule
class CheckConstraintModule(CollectionNodeModule):
"""
class CheckConstraintModule(CollectionNodeModule):
This class represents The Check Constraint Module.
Methods:
-------
* __init__(*args, **kwargs)
- Initialize the Check Constraint Module.
* get_nodes(gid, sid, did, scid)
- Generate the Check Constraint collection node.
* node_inode(gid, sid, did, scid)
- Returns Check Constraint node as leaf node.
* script_load()
- Load the module script for the Check Constraint, when any of the
Check node is initialized.
"""
NODE_TYPE = 'check_constraints'
COLLECTION_LABEL = _("Check Constraints")
def __init__(self, *args, **kwargs):
super(CheckConstraintModule, self).__init__(*args, **kwargs)
self.min_ver = None
self.max_ver = None
def get_nodes(self, gid, sid, did, scid, doid):
"""
Generate the Check Constraint collection node.
"""
yield self.generate_browser_collection_node(doid)
@property
def node_inode(self):
"""
Returns Check Constraint node as leaf node.
"""
return False
@property
def script_load(self):
"""
Load the module script for the Check Constraint, when any of the
Check node is initialized.
"""
return database.DatabaseModule.NODE_TYPE
@property
def csssnippets(self):
"""
Returns a snippet of css to include in the page
"""
return [
render_template(
"check_constraint/css/check_constraint.css",
node_type=self.node_type
)
]
blueprint = CheckConstraintModule(__name__)
class CheckConstraintView(PGChildNodeView):
"""
class CheckConstraintView(PGChildNodeView):
This class inherits PGChildNodeView to get the different routes for
the module.
The class is responsible to Create, Read, Update and Delete operations for
the Check Constraint.
Methods:
-------
* module_js():
- Load JS file (check-constraints.js) for this module.
* check_precondition(f):
- Works as a decorator.
- Checks database connection status.
- Attach connection object and template path.
* list(gid, sid, did, scid, doid):
- List the Check Constraints.
* nodes(gid, sid, did, scid):
- Returns all the Check Constraints to generate Nodes in the browser.
* properties(gid, sid, did, scid, doid):
- Returns the Check Constraint properties.
* create(gid, sid, did, scid):
- Creates a new Check Constraint object.
* update(gid, sid, did, scid, doid):
- Updates the Check Constraint object.
* delete(gid, sid, did, scid, doid):
- Drops the Check Constraint object.
* sql(gid, sid, did, scid, doid=None):
- Returns the SQL for the Check Constraint object.
* msql(gid, sid, did, scid, doid=None):
- Returns the modified SQL.
* get_sql(gid, sid, data, scid, tid=None):
- Generates the SQL statements to create/update the Check Constraint.
object.
* dependents(gid, sid, did, scid, tid, cid):
- Returns the dependents for the Check Constraint object.
* dependencies(gid, sid, did, scid, tid, cid):
- Returns the dependencies for the Check Constraint object.
* validate_check_constraint(gid, sid, did, scid, tid, cid):
- Validate check constraint.
"""
node_type = blueprint.node_type
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [
{'type': 'int', 'id': 'cid'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'delete': [{'delete': 'delete'}],
'children': [{'get': 'children'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'module.js': [{}, {}, {'get': 'module_js'}],
'validate': [{'get': 'validate_check_constraint'}],
})
def module_js(self):
"""
Load JS file (check_constraint.js) for this module.
"""
return make_response(
render_template(
"check_constraint/js/check_constraint.js",
_=_
),
200, {'Content-Type': 'application/x-javascript'}
)
def check_precondition(f):
"""
Works as a decorator.
Checks database connection status.
Attach connection object and template path.
"""
@wraps(f)
def wrap(*args, **kwargs):
self = args[0]
driver = get_driver(PG_DEFAULT_DRIVER)
self.manager = driver.connection_manager(kwargs['sid'])
self.conn = self.manager.connection(did=kwargs['did'])
self.qtIdent = driver.qtIdent
# If DB not connected then return error to browser
if not self.conn.connected():
return precondition_required(
_("Connection to the server has been lost!")
)
ver = self.manager.version
# we will set template path for sql scripts
if ver >= 90200:
self.template_path = 'check_constraint/sql/9.2_plus'
elif ver >= 90100:
self.template_path = 'check_constraint/sql/9.1_plus'
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=kwargs['tid'])
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
self.schema = rset['rows'][0]['schema']
self.table = rset['rows'][0]['table']
return f(*args, **kwargs)
return wrap
def end_transaction(self):
"""
End database transaction.
Returns:
"""
SQL = "END;"
self.conn.execute_scalar(SQL)
@check_precondition
def list(self, gid, sid, did, scid, tid, cid=None):
"""
List the Check Constraints.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Id
"""
try:
res = self.get_node_list(gid, sid, did, scid, tid, cid)
return ajax_response(
response=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_node_list(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all check constraints
nodes within that collection as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Cehck constraint ID
Returns:
"""
SQL = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid)
status, res = self.conn.execute_dict(SQL)
return res['rows']
@check_precondition
def nodes(self, gid, sid, did, scid, tid, cid=None):
"""
Returns all the Check Constraints.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check constraint Id.
"""
try:
res = self.get_nodes(gid, sid, did, scid, tid, cid)
return make_json_response(
data=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_nodes(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all event check constraint as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Check constraint ID
Returns:
"""
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
if "convalidated" in row and row["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon=icon,
valid=valid
))
return res
@check_precondition
def properties(self, gid, sid, did, scid, tid, cid):
"""
Returns the Check Constraints property.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Check Id
cid: Check Constraint Id
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = res['rows'][0]
return ajax_response(
response=data,
status=200
)
@check_precondition
def create(self, gid, sid, did, scid, tid, cid=None):
"""
This function will create a primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Check constraint ID
Returns:
"""
required_args = ['consrc']
data = request.form if request.form else json.loads(request.data.decode())
for k, v in data.items():
try:
data[k] = json.loads(v)
except (ValueError, TypeError):
data[k] = v
for arg in required_args:
if arg not in data or data[arg] == '':
return make_json_response(
status=400,
success=0,
errormsg=_(
"Couldn't find the required parameter (%s)." % arg
)
)
data['schema'] = self.schema
data['table'] = self.table
try:
if 'name' not in data or data['name'] == "":
SQL = "BEGIN;"
# Start transaction.
status, res = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
# The below SQL will execute CREATE DDL only
SQL = render_template(
"/".join([self.template_path, 'create.sql']),
data=data
)
status, msg = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=msg)
if 'name' not in data or data['name'] == "":
sql = render_template(
"/".join([self.template_path,
'get_oid_with_transaction.sql'],
),
tid=tid)
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
self.end_transaction()
data['name'] = res['rows'][0]['name']
else:
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
tid=tid,
name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
icon = "icon-check_constraints_bad"
valid = False
else:
icon = "icon-check_constraints"
valid = True
sql = render_template("/".join([self.template_path, 'alter.sql']),
data=data,
conn=self.conn)
sql = sql.strip('\n').strip(' ')
if sql != '':
status, result = self.conn.execute_scalar(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=result)
return jsonify(
node=self.blueprint.generate_browser_node(
res['rows'][0]['oid'],
tid,
data['name'],
icon=icon,
valid=valid
)
)
except Exception as e:
self.end_transaction()
return make_json_response(
status=400,
success=0,
errormsg=e
)
@check_precondition
def delete(self, gid, sid, did, scid, tid, cid):
"""
Drops the Check Constraint object.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Check Id
cid: Check Constraint Id
"""
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = res['rows'][0]
SQL = render_template("/".join([self.template_path,
'delete.sql']),
data=data)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Check Constraint dropped"),
data={
'id': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def update(self, gid, sid, did, scid, tid, cid):
"""
Updates the Check Constraint object.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
data = request.form if request.form else json.loads(request.data.decode())
try:
data['schema'] = self.schema
data['table'] = self.table
SQL = self.get_sql(gid, sid, data, scid, tid, cid)
SQL = SQL.strip('\n').strip(' ')
if SQL != "":
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
sql = render_template("/".join([self.template_path, 'get_name.sql']),
cid=cid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
if "convalidated" in res['rows'][0] and res['rows'][0]["convalidated"]:
icon = 'icon-check_constraints_)bad'
valid = False
else:
icon = 'icon-check_constraints'
valid = True
return make_json_response(
success=1,
info="Check Constraint updated",
data={
'id': cid,
'tid': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did,
'icon': icon,
'val id': valid
}
)
else:
return make_json_response(
success=1,
info="Nothing to update",
data={
'id': cid,
'tid': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def sql(self, gid, sid, did, scid, tid, cid=None):
"""
Returns the SQL for the Check Constraint object.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = res['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data)
SQL += "\n"
SQL += render_template(
"/".join([self.template_path, 'alter.sql']),
data=data)
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
sql_header += render_template(
"/".join([self.template_path, 'delete.sql']),
data=data)
sql_header += "\n"
SQL = sql_header + SQL
return ajax_response(response=SQL)
@check_precondition
def msql(self, gid, sid, did, scid, tid, cid=None):
"""
Returns the modified SQL.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
Returns:
Check Constraint object in json format.
"""
data = {}
for k, v in request.args.items():
try:
data[k] = json.loads(v)
except ValueError:
data[k] = v
data['schema'] = self.schema
data['table'] = self.table
try:
sql = self.get_sql(gid, sid, data, scid, tid, cid)
sql = sql.strip('\n').strip(' ')
return make_json_response(
data=sql,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, gid, sid, data, scid, tid, cid=None):
"""
Generates the SQL statements to create/update the Check Constraint.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
try:
if cid is not None:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, cid=cid)
status, res = self.conn.execute_dict(SQL)
if not status:
return False, internal_server_error(errormsg=res)
old_data = res['rows'][0]
required_args = ['name']
for arg in required_args:
if arg not in data:
data[arg] = old_data[arg]
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data, conn=self.conn
)
else:
required_args = ['consrc']
for arg in required_args:
if arg not in data:
return _('-- definition incomplete')
elif isinstance(data[arg], list) and len(data[arg]) < 1:
return _('-- definition incomplete')
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data)
SQL += "\n"
SQL += render_template("/".join([self.template_path, 'alter.sql']),
data=data)
return SQL
except Exception as e:
return False, internal_server_error(errormsg=str(e))
@check_precondition
def dependents(self, gid, sid, did, scid, tid, cid):
"""
This function get the dependents and return ajax response
for the Check Constraint node.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
dependents_result = self.get_dependents(self.conn, cid)
return ajax_response(
response=dependents_result,
status=200
)
@check_precondition
def dependencies(self, gid, sid, did, scid, tid, cid):
"""
This function get the dependencies and return ajax response
for the Check Constraint node.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
"""
dependencies_result = self.get_dependencies(self.conn, cid)
return ajax_response(
response=dependencies_result,
status=200
)
@check_precondition
def validate_check_constraint(self, gid, sid, did, scid, tid, cid):
"""
Validate check constraint.
Args:
gid: Server Group Id
sid: Server Id
did: Database Id
scid: Schema Id
tid: Table Id
cid: Check Constraint Id
Returns:
"""
data = {}
try:
data['schema'] = self.schema
data['table'] = self.table
sql = render_template("/".join([self.template_path, 'get_name.sql']), cid=cid)
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
data['name'] = res
sql = render_template("/".join([self.template_path, 'validate.sql']), data=data)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Check constraint updated."),
data={
'id': cid,
'tid': tid,
'scid': scid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
constraint = ConstraintRegistry(
'check_constraint', CheckConstraintModule, CheckConstraintView
)
CheckConstraintView.register_node_view(blueprint)

View File

@ -0,0 +1,15 @@
.icon-check_bad, .icon-check_constraints_bad {
background-image: url('{{ url_for('NODE-%s.static' % node_type, filename='img/check-constraints-bad.png' )}}') !important;
background-repeat: no-repeat;
align-content: center;
vertical-align: middle;
height: 1.3em;
}
.icon-check, .icon-check_constraints {
background-image: url('{{ url_for('NODE-%s.static' % node_type, filename='img/check-constraints.png' )}}') !important;
background-repeat: no-repeat;
align-content: center;
vertical-align: middle;
height: 1.3em;
}

View File

@ -0,0 +1,216 @@
// Check Constraint Module: Node
define(
[
'jquery', 'underscore', 'underscore.string', 'pgadmin', 'pgadmin.browser',
'alertify', 'pgadmin.browser.collection'
],
function($, _, S, pgAdmin, pgBrowser, Alertify) {
// Check Constraint Node
if (!pgBrowser.Nodes['check_constraints']) {
pgAdmin.Browser.Nodes['check_constraints'] = pgBrowser.Node.extend({
type: 'check_constraints',
label: '{{ _('Check') }}',
collection_type: 'coll-constraints',
sqlAlterHelp: 'ddl-alter.html',
sqlCreateHelp: 'ddl-constraints.html',
hasSQL: true,
hasDepends: true,
parent_type: ['table'],
Init: function() {
// Avoid mulitple registration of menus
if (this.initialized)
return;
this.initialized = true;
pgBrowser.add_menus([{
name: 'create_check_constraints_on_coll', node: 'coll-constraints', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 5, label: '{{ _('Check...') }}',
icon: 'wcTabIcon icon-check_constraints', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'validate_check_constraint', node: 'check_constraints', module: this,
applies: ['object', 'context'], callback: 'validate_check_constraint',
category: 'validate', priority: 4, label: '{{ _('Validate check constraint') }}',
icon: 'fa fa-link', enable : 'is_not_valid', data: {action: 'edit', check: true}
}
]);
},
is_not_valid: function(itemData, item, data) {
if (this.canCreate(itemData, item, data)) {
return (itemData && !itemData.valid);
} else {
return false;
}
},
callbacks: {
validate_check_constraint: function(args) {
var input = args || {};
obj = this,
t = pgBrowser.tree,
i = input.item || t.selected(),
d = i && i.length == 1 ? t.itemData(i) : undefined;
if (!d) {
return false;
}
var data = d;
$.ajax({
url: obj.generate_url(i, 'validate', d, true),
type:'GET',
success: function(res) {
if (res.success == 1) {
Alertify.success("{{ _('" + res.info + "') }}");
t.removeIcon(i);
data.valid = true;
data.icon = 'icon-check_constraints';
t.addIcon(i, {icon: data.icon});
setTimeout(function() {t.deselect(i);}, 10);
setTimeout(function() {t.select(i);}, 100);
}
},
error: function(xhr, status, error) {
try {
var err = $.parseJSON(xhr.responseText);
if (err.success == 0) {
msg = S('{{ _(' + err.errormsg + ')}}').value();
Alertify.error("{{ _('" + err.errormsg + "') }}");
}
} catch (e) {}
t.unload(i);
}
});
return false;
}
},
canDrop: pgBrowser.Nodes['schema'].canChildDrop,
model: pgAdmin.Browser.Node.Model.extend({
defaults: {
name: undefined,
oid: undefined,
description: undefined,
consrc: undefined,
connoinherit: undefined,
convalidated: true
},
// Check Constraint Schema
schema: [{
id: 'name', label: '{{ _('Name') }}', type:'text', cell:'string',
disabled: 'isDisabled'
},{
id: 'oid', label:'{{ _('OID') }}', cell: 'string',
type: 'text' , mode: ['properties']
},{
id: 'comment', label: '{{ _('Comment') }}', type: 'multiline', cell:
'string', mode: ['properties', 'create', 'edit'],
deps:['name'], disabled:function(m) {
var name = m.get('name');
if (!(name && name != '')) {
setTimeout(function(){
m.set('comment', null);
},10);
return true;
} else {
return false;
}
}
},{
id: 'consrc', label: '{{ _('Check') }}', type: 'multiline', cell:
'string', group: '{{ _('Definition') }}', mode: ['properties',
'create', 'edit'], disabled: function(m) {
return ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
}, editable: false
},{
id: 'connoinherit', label: '{{ _('No Inherit') }}', type:
'switch', cell: 'boolean', group: '{{ _('Definition') }}', mode:
['properties', 'create', 'edit'], min_version: 90200,
disabled: function(m) {
return ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
}
},{
id: 'convalidated', label: "{{ _("Don't validate") }}", type: 'switch', cell:
'boolean', group: '{{ _('Definition') }}', min_version: 90200,
disabled: function(m) {
if ((_.isFunction(m.isNew) && !m.isNew()) ||
(_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid')))) {
return !m.get("convalidated");
} else {
return false;
}
},
mode: ['properties', 'create', 'edit']
}],
// Client Side Validation
validate: function() {
var err = {},
errmsg;
if (_.isUndefined(this.get('consrc')) || String(this.get('consrc')).replace(/^\s+|\s+$/g, '') == '') {
err['consrc'] = '{{ _('Check can not be empty!') }}';
errmsg = errmsg || err['consrc'];
}
this.errorModel.clear().set(err);
if (_.size(err)) {
this.trigger('on-status', {msg: errmsg});
return errmsg;
}
return null;
},
isDisabled: function(m){
if ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) ||
(_.isFunction(m.isNew) && !m.isNew())) {
var server = (this.node_info || m.top.node_info).server;
if (server.version < 90200)
{
return true;
}
}
return false;
}
}),
// Below function will enable right click menu for creating check constraint.
canCreate: function(itemData, item, data) {
// If check is false then , we will allow create menu
if (data && data.check == false)
return true;
var t = pgBrowser.tree, i = item, d = itemData, parents = [];
// To iterate over tree to check parent node
while (i) {
// If it is schema then allow user to c reate table
if (_.indexOf(['schema'], d._type) > -1)
return true;
parents.push(d._type);
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// If node is under catalog then do not allow 'create' menu
if (_.indexOf(parents, 'catalog') > -1) {
return false;
} else {
return true;
}
}
});
}
return pgBrowser.Nodes['check_constraints'];
});

View File

@ -0,0 +1,820 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Implements Exclusion constraint Node"""
import json
from flask import render_template, make_response, request, jsonify
from flask.ext.babel import gettext as _
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
import pgadmin.browser.server_groups.servers.databases as database
from functools import wraps
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
import ConstraintRegistry, ConstraintTypeModule
class ExclusionConstraintModule(ConstraintTypeModule):
"""
class ForeignKeyConstraintModule(CollectionNodeModule)
A module class for Exclusion constraint node derived from ConstraintTypeModule.
Methods:
-------
* __init__(*args, **kwargs)
- Method is used to initialize the ForeignKeyConstraintModule and it's base module.
* get_nodes(gid, sid, did)
- Method is used to generate the browser collection node.
* node_inode()
- Method is overridden from its base class to make the node as leaf node.
* script_load()
- Load the module script for language, when any of the database node is
initialized.
"""
NODE_TYPE = 'exclusion_constraint'
COLLECTION_LABEL = _("Foreign Keys")
def __init__(self, *args, **kwargs):
"""
Method is used to initialize the ForeignKeyConstraintModule and it's base module.
Args:
*args:
**kwargs:
Returns:
"""
self.min_ver = None
self.max_ver = None
super(ExclusionConstraintModule, self).__init__(*args, **kwargs)
def get_nodes(self, gid, sid, did, scid, tid):
"""
Generate the collection node
"""
pass
@property
def node_inode(self):
"""
Override this property to make the node a leaf node.
Returns: False as this is the leaf node
"""
return False
@property
def script_load(self):
"""
Load the module script for exclusion_constraint, when any of the table node is
initialized.
Returns: node type of the server module.
"""
return database.DatabaseModule.NODE_TYPE
blueprint = ExclusionConstraintModule(__name__)
class ExclusionConstraintView(PGChildNodeView):
"""
class ExclusionConstraintView(PGChildNodeView)
A view class for Exclusion constraint node derived from PGChildNodeView. This class is
responsible for all the stuff related to view like creating, updating Exclusion constraint
node, showing properties, showing sql in sql pane.
Methods:
-------
* __init__(**kwargs)
- Method is used to initialize the ForeignKeyConstraintView and it's base view.
* module_js()
- This property defines (if javascript) exists for this node.
Override this property for your own logic
* check_precondition()
- This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
* end_transaction()
- To end any existing database transaction.
* list()
- This function returns Exclusion constraint nodes within that
collection as http response.
* get_list()
- This function is used to list all the language nodes within that collection
and return list of Exclusion constraint nodes.
* nodes()
- This function returns child node within that collection.
Here return all Exclusion constraint node as http response.
* get_nodes()
- returns all Exclusion constraint nodes' list.
* properties()
- This function will show the properties of the selected Exclusion.
* update()
- This function will update the data for the selected Exclusion.
* msql()
- This function is used to return modified SQL for the selected Exclusion.
* get_sql()
- This function will generate sql from model data.
* sql():
- This function will generate sql to show it in sql pane for the selected Exclusion.
* get_access_methods():
- Returns access methods for exclusion constraint.
* get_oper_class():
- Returns operator classes for selected access method.
* get_operator():
- Returns operators for selected column.
"""
node_type = 'exclusion_constraint'
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [{'type': 'int', 'id': 'exid'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'delete': [{'delete': 'delete'}],
'children': [{'get': 'children'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'module.js': [{}, {}, {'get': 'module_js'}]
})
def module_js(self):
"""
This property defines (if javascript) exists for this node.
Override this property for your own logic.
"""
return make_response(
render_template(
"exclusion_constraint/js/exclusion_constraint.js",
_=_
),
200, {'Content-Type': 'application/x-javascript'}
)
def check_precondition(f):
"""
This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
"""
@wraps(f)
def wrap(*args, **kwargs):
# Here args[0] will hold self & kwargs will hold gid,sid,did
self = args[0]
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
kwargs['sid']
)
self.conn = self.manager.connection(did=kwargs['did'])
# If DB not connected then return error to browser
if not self.conn.connected():
return precondition_required(
_(
"Connection to the server has been lost!"
)
)
ver = self.manager.version
if ver >= 90200:
self.template_path = 'exclusion_constraint/sql/9.2_plus'
elif ver >= 90100:
self.template_path = 'exclusion_constraint/sql/9.1_plus'
# We need parent's name eg table name and schema name
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=kwargs['tid'])
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
self.schema = row['schema']
self.table = row['table']
return f(*args, **kwargs)
return wrap
def end_transaction(self):
SQL = render_template(
"/".join([self.template_path, 'end.sql']))
# End transaction if any.
self.conn.execute_scalar(SQL)
@check_precondition
def properties(self, gid, sid, did, scid, tid, exid=None):
"""
This function is used to list all the Exclusion constraint
nodes within that collection.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
try:
sql = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid, cid=exid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
result = res['rows'][0]
sql = render_template(
"/".join([self.template_path, 'get_constraint_cols.sql']),
cid=exid,
colcnt=result['indnatts'])
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
columns = []
for row in res['rows']:
if row['options'] & 1:
order = False
nulls_order = True if (row['options'] & 2) else False
else:
order = True
nulls_order = True if (row['options'] & 2) else False
columns.append({"column": row['coldef'].strip('"'),
"oper_class": row['opcname'],
"order": order,
"nulls_order": nulls_order,
"operator": row['oprname'],
"col_type": row['datatype']
})
result['columns'] = columns
return ajax_response(
response=result,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def list(self, gid, sid, did, scid, tid, exid=None):
"""
This function returns all exclusion constraints
nodes within that collection as a http response.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
try:
res = self.get_node_list(gid, sid, did, scid, tid, exid)
return ajax_response(
response=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_node_list(self, gid, sid, did, scid, tid, exid=None):
"""
This function returns all exclusion constraints
nodes within that collection as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid)
status, res = self.conn.execute_dict(SQL)
return res['rows']
@check_precondition
def nodes(self, gid, sid, did, scid, tid, exid=None):
"""
This function returns all Exclusion constraint nodes as a
http response.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
try:
res = self.get_nodes(gid, sid, did, scid, tid, exid)
return make_json_response(
data=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_nodes(self, gid, sid, did, scid, tid, exid=None):
"""
This function returns all Exclusion constraint nodes as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']),
tid=tid)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon="icon-exclusion_constraint"
))
return res
@check_precondition
def create(self, gid, sid, did, scid, tid, exid=None):
"""
This function will create a Exclusion constraint.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
required_args = ['columns']
data = request.form if request.form else json.loads(request.data.decode())
for k, v in data.items():
try:
data[k] = json.loads(v)
except (ValueError, TypeError):
data[k] = v
for arg in required_args:
if arg not in data:
return make_json_response(
status=400,
success=0,
errormsg=_(
"Couldn't find required parameter (%s)." % str(arg)
)
)
elif isinstance(data[arg], list) and len(data[arg]) < 1:
return make_json_response(
status=400,
success=0,
errormsg=_(
"Couldn't find required parameter (%s)." % str(arg)
)
)
data['schema'] = self.schema
data['table'] = self.table
try:
if 'name' not in data or data['name'] == "":
SQL = render_template(
"/".join([self.template_path, 'begin.sql']))
# Start transaction.
status, res = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
# The below SQL will execute CREATE DDL only
SQL = render_template(
"/".join([self.template_path, 'create.sql']),
data=data, conn=self.conn
)
status, res = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
if 'name' not in data or data['name'] == "":
sql = render_template(
"/".join([self.template_path,
'get_oid_with_transaction.sql']),
tid=tid)
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
self.end_transaction()
data['name'] = res['rows'][0]['name']
else:
sql = render_template("/".join([self.template_path, 'get_oid.sql']), name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
if 'name' in data and data['name'] != '':
sql = render_template("/".join([self.template_path, 'alter.sql']), data=data, conn=self.conn)
sql = sql.strip('\n').strip(' ')
if sql != '':
status, result = self.conn.execute_scalar(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=result)
return jsonify(
node=self.blueprint.generate_browser_node(
res['rows'][0]['oid'],
tid,
data['name'],
icon="icon-exclusion_constraint"
)
)
except Exception as e:
self.end_transaction()
return make_json_response(
status=400,
success=0,
errormsg=e
)
@check_precondition
def update(self, gid, sid, did, scid, tid, exid=None):
"""
This function will update the data for the selected
Exclusion constraint.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
data = request.form if request.form else json.loads(request.data.decode())
try:
data['schema'] = self.schema
data['table'] = self.table
sql = self.get_sql(data, tid, exid)
sql = sql.strip('\n').strip(' ')
if sql != "":
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
sql = render_template("/".join([self.template_path, 'get_oid.sql']), name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info="Exclusion constraint updated",
data={
'id': res['rows'][0]['oid'],
'tid': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did,
}
)
else:
return make_json_response(
success=1,
info="Nothing to update",
data={
'id': exid,
'tid': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, exid=None):
"""
This function will delete an existing Exclusion.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
# Below code will decide if it's simple drop or drop with cascade call
if self.cmd == 'delete':
# This is a cascade operation
cascade = True
else:
cascade = False
try:
sql = render_template("/".join([self.template_path, 'get_name.sql']),
cid=exid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
data = res['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
sql = render_template("/".join([self.template_path, 'delete.sql']),
data=data,
cascade=cascade)
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("Exclusion constraint dropped."),
data={
'id': exid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def msql(self, gid, sid, did, scid, tid, exid=None):
"""
This function returns modified SQL for the selected
Exclusion constraint.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
data = {}
for k, v in request.args.items():
try:
data[k] = json.loads(v)
except ValueError:
data[k] = v
data['schema'] = self.schema
data['table'] = self.table
try:
sql = self.get_sql(data, tid, exid)
sql = sql.strip('\n').strip(' ')
return make_json_response(
data=sql,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, data, tid, exid=None):
"""
This function will generate sql from model data.
Args:
data: Contains the data of the selected Exclusion constraint.
tid: Table ID.
exid: Exclusion constraint ID
Returns:
"""
if exid is not None:
sql = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid,
cid=exid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
old_data = res['rows'][0]
required_args = ['name']
for arg in required_args:
if arg not in data:
data[arg] = old_data[arg]
sql = render_template("/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data)
else:
required_args = ['columns']
for arg in required_args:
if arg not in data:
return _('-- definition incomplete')
elif isinstance(data[arg], list) and len(data[arg]) < 1:
return _('-- definition incomplete')
sql = render_template("/".join([self.template_path, 'create.sql']),
data=data, conn=self.conn)
sql += "\n"
sql += render_template("/".join([self.template_path, 'alter.sql']),
data=data, conn=self.conn)
return sql
@check_precondition
def sql(self, gid, sid, did, scid, tid, exid=None):
"""
This function generates sql to show in the sql pane for the selected
Exclusion constraint.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
exid: Exclusion constraint ID
Returns:
"""
try:
SQL = render_template(
"/".join([self.template_path, 'properties.sql']),
tid=tid, conn=self.conn, cid=exid)
status, result = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=result)
data = result['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
sql = render_template(
"/".join([self.template_path, 'get_constraint_cols.sql']),
cid=exid,
colcnt=data['indnatts'])
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
columns = []
for row in res['rows']:
if row['options'] & 1:
order = False
nulls_order = True if (row['options'] & 2) else False
else:
order = True
nulls_order = True if (row['options'] & 2) else False
columns.append({"column": row['coldef'].strip('"'),
"oper_class": row['opcname'],
"order": order,
"nulls_order": nulls_order,
"operator": row['oprname']
})
data['columns'] = columns
if not data['amname'] or data['amname'] == '':
data['amname'] = 'btree'
SQL = render_template(
"/".join([self.template_path, 'create.sql']), data=data)
SQL += "\n"
SQL += render_template(
"/".join([self.template_path, 'alter.sql']),
data=data, conn=self.conn)
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
sql_header += render_template(
"/".join([self.template_path, 'delete.sql']),
data=data)
sql_header += "\n"
SQL = sql_header + SQL
return ajax_response(response=SQL)
except Exception as e:
return internal_server_error(errormsg=str(e))
constraint = ConstraintRegistry(
'exclusion_constraint', ExclusionConstraintModule, ExclusionConstraintView
)
ExclusionConstraintView.register_node_view(blueprint)

View File

@ -0,0 +1,915 @@
define(
['jquery', 'underscore', 'underscore.string', 'pgadmin',
'pgadmin.browser', 'alertify', 'pgadmin.browser.collection'],
function($, _, S, pgAdmin, pgBrowser, Alertify) {
var ExclusionConstraintColumnModel = pgBrowser.Node.Model.extend({
defaults: {
column: undefined,
oper_class: undefined,
order: undefined,
nulls_order: undefined,
operator:undefined,
col_type:undefined
},
toJSON: function () {
var d = pgBrowser.Node.Model.prototype.toJSON.apply(this, arguments);
delete d.col_type;
return d;
},
schema: [{
id: 'column', label:'{{ _('Column') }}', type:'text', editable: false,
cell:'string'
},{
id: 'oper_class', label:'{{ _('Operator class') }}', type:'text',
node: 'table', url: 'get_oper_class', first_empty: true,
editable: function(m) {
if (m instanceof Backbone.Collection) {
return true;
}
if ((_.has(m.collection, 'handler') &&
!_.isUndefined(m.collection.handler) &&
!_.isUndefined(m.collection.handler.get('oid')))) {
return false;
}
if (m.collection) {
var indexType = m.collection.handler.get('amname')
return (indexType == 'btree' || _.isUndefined(indexType) ||
_.isNull(indexType) || indexType == '');
} else {
return true;
}
},
select2: {
allowClear: true, width: 'style',
placeholder: '{{ _("Select the operator class") }}'
}, cell: Backgrid.Extension.Select2Cell.extend({
initialize: function () {
Backgrid.Extension.Select2Cell.prototype.initialize.apply(this, arguments);
var self = this,
url = self.column.get('url') || self.defaults.url,
m = self.model,
indextype = self.model.collection.handler.get('amname');
if (url && (indextype == 'btree' || _.isUndefined(indextype) ||
_.isNull(indextype) || indextype == '')) {
var node = this.column.get('schema_node'),
eventHandler = m.top || m,
node_info = this.column.get('node_info'),
full_url = node.generate_url.apply(
node, [
null, url, this.column.get('node_data'),
this.column.get('url_with_id') || false, node_info
]),
data = [];
indextype = 'btree';
if (this.column.get('version_compatible')) {
eventHandler.trigger('pgadmin:view:fetching', m, self.column);
$.ajax({
async: false,
data : {indextype:indextype},
url: full_url,
success: function(res) {
data = res.data;
},
error: function() {
eventHandler.trigger('pgadmin:view:fetch:error', m, self.column);
}
});
eventHandler.trigger('pgadmin:view:fetched', m, self.column);
}
/*
* Transform the data
*/
transform = self.column.get('transform') || self.defaults.transform;
if (transform && _.isFunction(transform)) {
// We will transform the data later, when rendering.
// It will allow us to generate different data based on the
// dependencies.
self.column.set('options', transform.bind(self, data));
} else {
self.column.set('options', data);
}
} else {
self.column.set('options', []);
}
}
})
},{
id: 'order', label:'{{ _('DESC') }}', type: 'switch',
options: {
onText: 'ASC',
offText: 'DESC',
},editable: function(m) {
if (m instanceof Backbone.Collection) {
return true;
}
if ((_.has(m.collection, 'handler') &&
!_.isUndefined(m.collection.handler) &&
!_.isUndefined(m.collection.handler.get('oid')))) {
return false;
}
return true;
}
},{
id: 'nulls_order', label:'{{ _('NULLs order') }}', type:"switch",
options: {
onText: 'FIRST',
offText: 'LAST',
},editable: function(m) {
if (m instanceof Backbone.Collection) {
return true;
}
if ((_.has(m.collection, 'handler') &&
!_.isUndefined(m.collection.handler) &&
!_.isUndefined(m.collection.handler.get('oid')))) {
return false;
}
return true;
}
},{
id: 'operator', label:'{{ _('Operator') }}', type: 'text',
node: 'table', url: 'get_operator',
editable: function(m) {
if (m instanceof Backbone.Collection) {
return true;
}
if ((_.has(m.collection, 'handler') &&
!_.isUndefined(m.collection.handler) &&
!_.isUndefined(m.collection.handler.get('oid')))) {
return false;
}
return true;
},
select2: {
allowClear: false, width: 'style',
}, cell: Backgrid.Extension.Select2Cell.extend({
initialize: function () {
Backgrid.Extension.Select2Cell.prototype.initialize.apply(this, arguments);
var self = this,
url = self.column.get('url') || self.defaults.url,
m = self.model,
col_type = self.model.get('col_type');
self.column.set('options', []);
if (url && !_.isUndefined(col_type) && !_.isNull(col_type) && col_type != '') {
var node = this.column.get('schema_node'),
eventHandler = m.top || m,
node_info = this.column.get('node_info'),
full_url = node.generate_url.apply(
node, [
null, url, this.column.get('node_data'),
this.column.get('url_with_id') || false, node_info
]),
data = [];
if (this.column.get('version_compatible')) {
eventHandler.trigger('pgadmin:view:fetching', m, self.column);
$.ajax({
async: false,
data : {col_type:col_type},
url: full_url,
success: function(res) {
data = res.data;
},
error: function() {
eventHandler.trigger('pgadmin:view:fetch:error', m, self.column);
}
});
eventHandler.trigger('pgadmin:view:fetched', m, self.column);
}
/*
* Transform the data
*/
transform = self.column.get('transform') || self.defaults.transform;
if (transform && _.isFunction(transform)) {
// We will transform the data later, when rendering.
// It will allow us to generate different data based on the
// dependencies.
self.column.set('options', transform.bind(self, data));
} else {
self.column.set('options', data);
}
}
}
})
}
]
});
var ExclusionConstraintColumnControl = Backform.ExclusionConstraintColumnControl =
Backform.UniqueColCollectionControl.extend({
initialize: function(opts) {
Backform.UniqueColCollectionControl.prototype.initialize.apply(
this, arguments
);
var self = this,
node = 'exclusion_constraint',
headerSchema = [{
id: 'column', label:'', type:'text',
node: 'column', control: Backform.NodeListByNameControl.extend({
initialize: function() {
// Here we will decide if we need to call URL
// Or fetch the data from parent columns collection
if(self.model.handler) {
Backform.Select2Control.prototype.initialize.apply(this, arguments);
// Do not listen for any event(s) for existing constraint.
if (_.isUndefined(self.model.get('oid'))) {
var tableCols = self.model.top.get('columns');
this.listenTo(tableCols, 'remove' , this.removeColumn);
this.listenTo(tableCols, 'change:name', this.resetColOptions);
this.listenTo(tableCols, 'change:cltype', this.resetColOptions);
}
this.custom_options();
} else {
Backform.NodeListByNameControl.prototype.initialize.apply(this, arguments);
}
},
removeColumn: function (m) {
var that = this;
setTimeout(function () {
that.custom_options();
that.render.apply(that);
}, 50);
},
resetColOptions: function(m) {
var that = this;
if (m.previous('name') == self.headerData.get('column')) {
/*
* Table column name has changed so update
* column name in exclude constraint as well.
*/
self.headerData.set(
{"column": m.get('name')});
self.headerDataChanged();
}
setTimeout(function () {
that.custom_options();
that.render.apply(that);
}, 50);
},
custom_options: function() {
// We will add all the columns entered by user in table model
var columns = self.model.top.get('columns'),
added_columns_from_tables = [],
col_types = [];
if (columns.length > 0) {
_.each(columns.models, function(m) {
var col = m.get('name');
if(!_.isUndefined(col) && !_.isNull(col)) {
added_columns_from_tables.push({
label: col, value: col, image:'icon-column'
});
col_types.push({name:col, type:m.get('cltype')});
}
});
}
// Set the values in to options so that user can select
this.field.set('options', added_columns_from_tables);
self.field.set('col_types', col_types);
},
remove: function () {
if(self.model.handler) {
tableCols = self.model.top.get('columns');
this.stopListening(tableCols, 'remove' , this.removeColumn);
this.stopListening(tableCols, 'change:name' , this.resetColOptions);
this.stopListening(tableCols, 'change:cltype' , this.resetColOptions);
Backform.Select2Control.prototype.remove.apply(this, arguments);
} else {
Backform.NodeListByNameControl.prototype.remove.apply(this, arguments);
}
},
template: _.template([
'<div class="<%=Backform.controlsClassName%> <%=extraClasses.join(\' \')%>">',
' <select class="pgadmin-node-select form-control" name="<%=name%>" style="width:100%;" value="<%-value%>" <%=disabled ? "disabled" : ""%> <%=required ? "required" : ""%> >',
' <% if (first_empty) { %>',
' <option value="" <%="" === rawValue ? "selected" : "" %>><%- empty_value %></option>',
' <% } %>',
' <% for (var i=0; i < options.length; i++) { %>',
' <% var option = options[i]; %>',
' <option <% if (option.image) { %> data-image=<%= option.image %> <% } %> value=<%= formatter.fromRaw(option.value) %> <%=option.value === rawValue ? "selected=\'selected\'" : "" %>><%-option.label%></option>',
' <% } %>',
' </select>',
'</div>'].join("\n"))
}),
transform: function(rows) {
// This will only get called in case of NodeListByNameControl.
var that = this,
node = that.field.get('schema_node'),
res = [],
col_types = [],
filter = that.field.get('filter') || function() { return true; };
filter = filter.bind(that);
_.each(rows, function(r) {
if (filter(r)) {
var l = (_.isFunction(node['node_label']) ?
(node['node_label']).apply(node, [r, that.model, that]) :
r.label),
image = (_.isFunction(node['node_image']) ?
(node['node_image']).apply(
node, [r, that.model, that]
) :
(node['node_image'] || ('icon-' + node.type)));
res.push({
'value': r.label,
'image': image,
'label': l
});
col_types.push({name:r.label, type:r.datatype});
}
});
self.field.set('col_types', col_types);
return res;
},
canAdd: function(m) {
return !((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
},
select2: {
allowClear: false, width: 'style',
placeholder: 'Select column'
}, first_empty: !self.model.isNew(),
disabled: function(m) {
return !_.isUndefined(self.model.get('oid'));
}
}],
headerDefaults = {column: null},
gridCols = ['column', 'oper_class', 'order', 'nulls_order', 'operator'];
self.headerData = new (Backbone.Model.extend({
defaults: headerDefaults,
schema: headerSchema
}))({});
var headerGroups = Backform.generateViewSchema(
self.field.get('node_info'), self.headerData, 'create',
node, self.field.get('node_data')
),
fields = [];
_.each(headerGroups, function(o) {
fields = fields.concat(o.fields);
});
self.headerFields = new Backform.Fields(fields);
self.gridSchema = Backform.generateGridColumnsFromModel(
self.field.get('node_info'), self.field.get('model'), 'edit', gridCols, self.field.get('schema_node')
);
self.controls = [];
self.listenTo(self.headerData, "change", self.headerDataChanged);
self.listenTo(self.headerData, "select2", self.headerDataChanged);
self.listenTo(self.collection, "add", self.onAddorRemoveColumns);
self.listenTo(self.collection, "remove", self.onAddorRemoveColumns);
},
generateHeader: function(data) {
var header = [
'<div class="subnode-header-form">',
' <div class="container-fluid">',
' <div class="row">',
' <div class="col-xs-4">',
' <label class="control-label"><%-column_label%></label>',
' </div>',
' <div class="col-xs-4" header="column"></div>',
' <div class="col-xs-4">',
' <button class="btn-sm btn-default add" <%=canAdd ? "" : "disabled=\'disabled\'"%> ><%-add_label%></buttton>',
' </div>',
' </div>',
' </div>',
'</div>',].join("\n")
_.extend(data, {
column_label: '{{ _('Column')}}',
add_label: '{{ _('ADD')}}'
});
var self = this,
headerTmpl = _.template(header),
$header = $(headerTmpl(data)),
controls = this.controls;
this.headerFields.each(function(field) {
var control = new (field.get("control"))({
field: field,
model: self.headerData
});
$header.find('div[header="' + field.get('name') + '"]').append(
control.render().$el
);
controls.push(control);
});
// We should not show add but in properties mode
if (data.mode == 'properties') {
$header.find("button.add").remove();
}
self.$header = $header;
return $header;
},
events: _.extend(
{}, Backform.UniqueColCollectionControl.prototype.events,
{'click button.add': 'addColumns'}
),
showGridControl: function(data) {
var self = this,
titleTmpl = _.template("<div class='subnode-header'></div>"),
$gridBody =
$("<div class='pgadmin-control-group backgrid form-group col-xs-12 object subnode'></div>").append(
titleTmpl({label: data.label})
);
$gridBody.append(self.generateHeader(data));
var gridColumns = _.clone(this.gridSchema.columns);
// Insert Delete Cell into Grid
if (data.disabled == false && data.canDelete) {
gridColumns.unshift({
name: "pg-backform-delete", label: "",
cell: Backgrid.Extension.DeleteCell,
editable: false, cell_priority: -1
});
}
if (self.grid) {
self.grid.remove();
self.grid.null;
}
// Initialize a new Grid instance
var grid = self.grid = new Backgrid.Grid({
columns: gridColumns,
collection: self.collection,
className: "backgrid table-bordered"
});
self.$grid = grid.render().$el;
$gridBody.append(self.$grid);
setTimeout(function() {
self.headerData.set({
'column': self.$header.find(
'div[header="column"] select'
).val()
}, {silent:true}
);
}, 10);
// Render node grid
return $gridBody;
},
headerDataChanged: function() {
var self = this, val,
data = this.headerData.toJSON(),
inSelected = false,
checkVars = ['column'];
if (!self.$header) {
return;
}
if (self.control_data.canAdd) {
self.collection.each(function(m) {
if (!inSelected) {
_.each(checkVars, function(v) {
if (!inSelected) {
val = m.get(v);
inSelected = ((
(_.isUndefined(val) || _.isNull(val)) &&
(_.isUndefined(data[v]) || _.isNull(data[v]))
) ||
(val == data[v]));
}
});
}
});
}
else {
inSelected = true;
}
self.$header.find('button.add').prop('disabled', inSelected);
},
addColumns: function(ev) {
ev.preventDefault();
var self = this,
column = self.headerData.get('column');
if (!column || column == '') {
return false;
}
var coll = self.model.get(self.field.get('name')),
m = new (self.field.get('model'))(
self.headerData.toJSON(), {
silent: true, top: self.model.top,
collection: coll, handler: coll
}),
col_types =self.field.get('col_types') || [];
for(var i=0; i < col_types.length; i++) {
var col_type = col_types[i];
if (col_type['name'] == m.get('column')) {
m.set({'col_type':col_type['type']});
break;
}
}
coll.add(m);
var idx = coll.indexOf(m);
// idx may not be always > -1 because our UniqueColCollection may
// remove 'm' if duplicate value found.
if (idx > -1) {
self.$grid.find('.new').removeClass('new');
var newRow = self.grid.body.rows[idx].$el;
newRow.addClass("new");
$(newRow).pgMakeVisible('backform-tab');
} else {
delete m;
}
return false;
},
onAddorRemoveColumns: function() {
var self = this;
// Wait for collection to be updated before checking for the button to be
// enabled, or not.
setTimeout(function() {
self.collection.trigger('pgadmin:columns:updated', self.collection);
self.headerDataChanged();
}, 10);
},
remove: function() {
/*
* Stop listening the events registered by this control.
*/
this.stopListening(this.headerData, "change", this.headerDataChanged);
this.listenTo(this.headerData, "select2", this.headerDataChanged);
this.listenTo(this.collection, "remove", this.onAddorRemoveColumns);
// Remove header controls.
_.each(this.controls, function(controls) {
controls.remove();
});
ExclusionConstraintColumnControl.__super__.remove.apply(this, arguments);
// Remove the header model
delete (this.headerData);
}
});
// Extend the browser's node class for exclusion constraint node
if (!pgBrowser.Nodes['exclusion_constraint']) {
pgAdmin.Browser.Nodes['exclusion_constraint'] = pgBrowser.Node.extend({
type: 'exclusion_constraint',
label: '{{ _('Exclusion constraint') }}',
collection_type: 'coll-constraints',
sqlAlterHelp: 'ddl-alter.html',
sqlCreateHelp: 'ddl-constraints.html',
hasSQL: true,
parent_type: 'table',
canDrop: true,
canDropCascade: true,
hasDepends: true,
Init: function() {
/* Avoid multiple registration of menus */
if (this.initialized)
return;
this.initialized = true;
pgBrowser.add_menus([{
name: 'create_exclusion_constraint_on_coll', node: 'coll-constraints', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Exclusion constraint...') }}',
icon: 'wcTabIcon icon-exclusion_constraint', data: {action: 'create', check: true},
enable: 'canCreate'
}]);
},
is_not_valid: function(node) {
return (node && !node.valid);
},
// Define the model for exclusion constraint node
model: pgAdmin.Browser.Node.Model.extend({
defaults: {
name: undefined,
oid: undefined,
comment: undefined,
spcname: "pg_default",
amname: "gist",
fillfactor: undefined,
condeferrable: undefined,
condeferred: undefined,
columns: []
},
// Define the schema for the exclusion constraint node
schema: [{
id: 'name', label: '{{ _('Name') }}', type: 'text',
mode: ['properties', 'create', 'edit'], editable: true,
},{
id: 'oid', label:'{{ _('OID') }}', cell: 'string',
type: 'text' , mode: ['properties']
},{
id: 'comment', label:'{{ _('Comment') }}', cell: 'string',
type: 'multiline', mode: ['properties', 'create', 'edit'],
deps:['name'], disabled:function(m) {
var name = m.get('name');
if (!(name && name != '')) {
setTimeout(function(){
m.set('comment', null);
},10);
return true;
} else {
return false;
}
}
},{
id: 'spcname', label: '{{ _('Tablespace') }}',
type: 'text', group: '{{ _('Definition') }}',
control: 'node-list-by-name', node: 'tablespace',
select2:{allowClear:false},
filter: function(m) {
// Don't show pg_global tablespace in selection.
if (m.label == "pg_global") return false;
else return true;
}
},{
id: 'amname', label: '{{ _('Access method') }}',
type: 'text', group: '{{ _('Definition') }}',
url:"get_access_methods", node: 'table',
control: Backform.NodeAjaxOptionsControl.extend({
// When access method changes we need to clear columns collection
onChange: function() {
Backform.NodeAjaxOptionsControl.prototype.onChange.apply(this, arguments);
var self = this,
// current access method
current_am = self.model.get('amname'),
// previous access method
previous_am = self.model.previous('amname'),
column_collection = self.model.get('columns');
if (column_collection.length > 0 && current_am != previous_am) {
var msg = '{{ _('Changing access method will clear columns collection') }}';
Alertify.confirm(msg, function (e) {
// User clicks Ok, lets clear collection
column_collection.reset();
setTimeout(function() {
column_collection.trigger('pgadmin:columns:updated', column_collection);
}, 10);
}, function() {
// User clicks Cancel set previous value again in combo box
setTimeout(function(){
self.model.set('amname', previous_am);
}, 10);
});
}
}
}),
select2:{allowClear:true},
disabled: function(m) {
return ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
}
},{
id: 'fillfactor', label: '{{ _('Fill factor') }}',
type: 'int', group: '{{ _('Definition') }}', allowNull: true
},{
id: 'condeferrable', label: '{{ _('Deferrable') }}',
type: 'switch', group: '{{ _('Definition') }}', deps: ['index'],
disabled: function(m) {
return ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
}
},{
id: 'condeferred', label: '{{ _('Deferred') }}',
type: 'switch', group: '{{ _('Definition') }}',
deps: ['condeferrable'],
disabled: function(m) {
if((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew())) {
return true;
}
// Disable if condeferred is false or unselected.
if(m.get('condeferrable') == true) {
return false;
} else {
setTimeout(function(){
m.set('condeferred', false);
},10);
return true;
}
}
},{
id: 'constraint', label:'{{ _('Constraint') }}', cell: 'string',
type: 'multiline', mode: ['create', 'edit'], editable: false,
group: '{{ _('Definition') }}', disabled: function(m) {
return ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
}
},{
id: 'columns', label: '{{ _('Columns') }}',
type: 'collection', group: '{{ _('Columns') }}', disabled: false,
deps:['amname'], canDelete: true, editable: false,
canAdd: function(m) {
// We can't update columns of existing exclusion constraint.
return !((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
},
control: ExclusionConstraintColumnControl,
model: ExclusionConstraintColumnModel,
disabled: function(m) {
return ((_.has(m, 'handler') &&
!_.isUndefined(m.handler) &&
!_.isUndefined(m.get('oid'))) || (_.isFunction(m.isNew) && !m.isNew()));
},
cell: Backgrid.StringCell.extend({
initialize: function() {
Backgrid.StringCell.prototype.initialize.apply(this, arguments);
var self = this;
// Do not listen for any event(s) for existing constraint.
if (_.isUndefined(self.model.get('oid'))) {
var tableCols = self.model.top.get('columns');
self.listenTo(tableCols, 'remove' , self.removeColumn);
self.listenTo(tableCols, 'change:name', self.resetColOptions);
self.listenTo(tableCols, 'change:cltype', self.removeColumnWithType);
}
this.model.get('columns').on('pgadmin:columns:updated', function() {
self.render.apply(self);
});
},
removeColumnWithType: function(m){
var self = this,
cols = self.model.get('columns'),
removedCols = cols.where(
{col_type: m.previous('cltype')}
);
cols.remove(removedCols);
setTimeout(function () {
self.render();
}, 10);
setTimeout(function () {
constraints = self.model.top.get("exclude_constraint");
var removed = [];
constraints.each(function(constraint) {
if (constraint.get("columns").length == 0) {
removed.push(constraint);
}
});
constraints.remove(removed);
},100);
},
removeColumn: function(m){
var self = this,
removedCols = self.model.get('columns').where(
{column: m.get('name')}
);
self.model.get('columns').remove(removedCols);
setTimeout(function () {
self.render();
}, 10);
setTimeout(function () {
constraints = self.model.top.get("exclude_constraint");
var removed = [];
constraints.each(function(constraint) {
if (constraint.get("columns").length == 0) {
removed.push(constraint);
}
});
constraints.remove(removed);
},100);
},
resetColOptions : function(m) {
var self = this,
updatedCols = self.model.get('columns').where(
{"column": m.previous('name')}
);
if (updatedCols.length > 0) {
/*
* Table column name has changed so update
* column name in foreign key as well.
*/
updatedCols[0].set(
{"column": m.get('name')});
}
setTimeout(function () {
self.render();
}, 10);
},
formatter: {
fromRaw: function (rawValue, model) {
return rawValue.pluck("column").toString();
},
toRaw: function (val, model) {
return val;
}
},
render: function() {
return Backgrid.StringCell.prototype.render.apply(this, arguments);
},
remove: function() {
var tableCols = this.model.top.get('columns'),
cols = this.model.get('columns');
if (cols) {
cols.off('pgadmin:columns:updated');
}
this.stopListening(tableCols, 'remove' , self.removeColumn);
this.stopListening(tableCols, 'change:name' , self.resetColOptions);
this.stopListening(tableCols, 'change:cltype' , self.removeColumnWithType);
Backgrid.StringCell.prototype.remove.apply(this, arguments);
}
}),
}],
validate: function() {
this.errorModel.clear();
var columns = this.get('columns');
if ((_.isUndefined(columns) || _.isNull(columns) || columns.length < 1)) {
var msg = '{{ _('Please specify columns for Exclude constraint.') }}';
this.errorModel.set('columns', msg);
return msg;
}
return null;
}
}),
canCreate: function(itemData, item, data) {
// If check is false then , we will allow create menu
if (data && data.check == false)
return true;
var t = pgBrowser.tree, i = item, d = itemData, parents = [];
// To iterate over tree to check parent node
while (i) {
// If it is schema then allow user to create table
if (_.indexOf(['schema'], d._type) > -1)
return true;
parents.push(d._type);
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// If node is under catalog then do not allow 'create' menu
if (_.indexOf(parents, 'catalog') > -1) {
return false;
} else {
return true;
}
}
});
}
return pgBrowser.Nodes['exclusion_constraint'];
});

View File

@ -0,0 +1,12 @@
.icon-foreign_key {
background-image: url('{{ url_for('NODE-foreign_key.static', filename='img/foreign_key.png') }}') !important;
background-repeat: no-repeat;
align-content: center;
vertical-align: middle;
height: 1.3em;
}
.icon-foreign_key_no_validate {
background-image: url('{{ url_for('NODE-foreign_key.static', filename='img/foreign_key_no_validate.png') }}') !important;
border-radius: 10px
}

View File

@ -0,0 +1,878 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Implements Primary key constraint Node"""
import json
from flask import render_template, make_response, request, jsonify
from flask.ext.babel import gettext as _
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
import pgadmin.browser.server_groups.servers.databases as database
from functools import wraps
from pgadmin.browser.server_groups.servers.databases.schemas.tables.constraints.type \
import ConstraintRegistry, ConstraintTypeModule
class IndexConstraintModule(ConstraintTypeModule):
"""
class IndexConstraintModule(CollectionNodeModule)
A module class for Primary key constraint node derived from ConstraintTypeModule.
Methods:
-------
* __init__(*args, **kwargs)
- Method is used to initialize the PrimaryKeyConstraintModule and it's base module.
* get_nodes(gid, sid, did)
- Method is used to generate the browser collection node.
* node_inode()
- Method is overridden from its base class to make the node as leaf node.
* script_load()
- Load the module script for language, when any of the database node is
initialized.
"""
NODE_TYPE = 'Index constraint'
COLLECTION_LABEL = _('index_constraint')
def __init__(self, *args, **kwargs):
"""
Method is used to initialize the PrimaryKeyConstraintModule and it's base module.
Args:
*args:
**kwargs:
Returns:
"""
self.min_ver = None
self.max_ver = None
super(IndexConstraintModule, self).__init__(*args, **kwargs)
def get_nodes(self, gid, sid, did, scid, tid):
"""
Generate the collection node
"""
pass
@property
def node_inode(self):
"""
Override this property to make the node a leaf node.
Returns: False as this is the leaf node
"""
return False
@property
def script_load(self):
"""
Load the module script for primary_key, when any of the table node is
initialized.
Returns: node type of the server module.
"""
return database.DatabaseModule.NODE_TYPE
class PrimaryKeyConstraintModule(IndexConstraintModule):
"""
class PrimaryKeyConstraintModule(IndexConstraintModule)
A module class for the catalog schema node derived from IndexConstraintModule.
"""
NODE_TYPE = 'primary_key'
COLLECTION_LABEL = _("Primary key")
primary_key_blueprint = PrimaryKeyConstraintModule(__name__)
class UniqueConstraintModule(IndexConstraintModule):
"""
class UniqueConstraintModule(IndexConstraintModule)
A module class for the catalog schema node derived from IndexConstraintModule.
"""
NODE_TYPE = 'unique_constraint'
COLLECTION_LABEL = _("Unique constraint")
unique_constraint_blueprint = UniqueConstraintModule(__name__)
class IndexConstraintView(PGChildNodeView):
"""
class PrimaryKeyConstraintView(PGChildNodeView)
A view class for Primary key constraint node derived from PGChildNodeView. This class is
responsible for all the stuff related to view like creating, updating Primary key constraint
node, showing properties, showing sql in sql pane.
Methods:
-------
* __init__(**kwargs)
- Method is used to initialize the PrimaryKeyConstraintView and it's base view.
* module_js()
- This property defines (if javascript) exists for this node.
Override this property for your own logic
* check_precondition()
- This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
* list()
- This function returns primary key constraint nodes within that
collection as http response.
* get_list()
- This function is used to list all the language nodes within that collection
and return list of primary key constraint nodes.
* nodes()
- This function returns child node within that collection.
Here return all primary key constraint node as http response.
* get_nodes()
- returns all primary key constraint nodes' list.
* properties()
- This function will show the properties of the selected primary key.
* update()
- This function will update the data for the selected primary key.
* msql()
- This function is used to return modified SQL for the selected primary key.
* get_sql()
- This function will generate sql from model data.
* sql():
- This function will generate sql to show it in sql pane for the selected primary key.
* get_indices():
- This function returns indices for current table.
"""
node_type = 'index_constraint'
node_label = _('Index constraint')
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [{'type': 'int', 'id': 'cid'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'delete': [{'delete': 'delete'}],
'children': [{'get': 'children'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'module.js': [{}, {}, {'get': 'module_js'}]
})
def module_js(self):
"""
This property defines (if javascript) exists for this node.
Override this property for your own logic.
"""
return make_response(
render_template(
"index_constraint/js/index_constraint.js",
_=_,
node_type=self.node_type,
node_label=self.node_label
),
200, {'Content-Type': 'application/x-javascript'}
)
def check_precondition(f):
"""
This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
"""
@wraps(f)
def wrap(*args, **kwargs):
# Here args[0] will hold self & kwargs will hold gid,sid,did
self = args[0]
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
kwargs['sid']
)
self.conn = self.manager.connection(did=kwargs['did'])
# If DB not connected then return error to browser
if not self.conn.connected():
return precondition_required(
_(
"Connection to the server has been lost!"
)
)
self.template_path = 'index_constraint/sql'
# We need parent's name eg table name and schema name
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=kwargs['tid'])
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
self.schema = row['schema']
self.table = row['table']
return f(*args, **kwargs)
return wrap
def end_transaction(self):
SQL = render_template(
"/".join([self.template_path, 'end.sql']))
# End transaction if any.
self.conn.execute_scalar(SQL)
@check_precondition
def properties(self, gid, sid, did, scid, tid, cid=None):
"""
This function is used to list all the primary key
nodes within that collection.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
sql = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid,
cid=cid,
constraint_type= self.constraint_type)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
result = res['rows'][0]
sql = render_template(
"/".join([self.template_path, 'get_constraint_cols.sql']),
cid=cid,
colcnt=result['indnatts'])
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
columns = []
for row in res['rows']:
columns.append({"column": row['column'].strip('"')})
result['columns'] = columns
return ajax_response(
response=result,
status=200
)
@check_precondition
def list(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all primary keys
nodes within that collection as a http response.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
try:
res = self.get_node_list(gid, sid, did, scid, tid, cid)
return ajax_response(
response=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_node_list(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all primary keys
nodes within that collection as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
SQL = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid,
constraint_type= self.constraint_type)
status, res = self.conn.execute_dict(SQL)
return res['rows']
@check_precondition
def nodes(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all event trigger nodes as a
http response.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
try:
res = self.get_nodes(gid, sid, did, scid, tid, cid)
return make_json_response(
data=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_nodes(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns all event trigger nodes as a list.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
res = []
SQL = render_template("/".join([self.template_path, 'nodes.sql']),
tid=tid,
constraint_type=self.constraint_type)
status, rset = self.conn.execute_2darray(SQL)
for row in rset['rows']:
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon="icon-%s" % self.node_type
))
return res
@check_precondition
def create(self, gid, sid, did, scid, tid, cid=None):
"""
This function will create a primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
required_args = [
[u'columns', u'index'] # Either of one should be there.
]
data = request.form if request.form else json.loads(request.data.decode())
for k, v in data.items():
try:
data[k] = json.loads(v)
except (ValueError, TypeError):
data[k] = v
for arg in required_args:
if isinstance(arg, list):
for param in arg:
if (param in data and
(not isinstance(data[param], list) or
(isinstance(data[param], list) and
len(data[param]) > 0))):
break
else:
return make_json_response(
status=400,
success=0,
errormsg=_(
"Couldn't find at least one required parameter (%s)." % str(param)
)
)
elif arg not in data:
return make_json_response(
status=400,
success=0,
errormsg=_(
"Couldn't find the required parameter (%s)." % arg
)
)
data['schema'] = self.schema
data['table'] = self.table
try:
if 'name' not in data or data['name'] == "":
SQL = render_template(
"/".join([self.template_path, 'begin.sql']))
# Start transaction.
status, res = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
# The below SQL will execute CREATE DDL only
SQL = render_template(
"/".join([self.template_path, 'create.sql']),
data=data, conn=self.conn,
constraint_name=self.constraint_name
)
status, msg = self.conn.execute_scalar(SQL)
if not status:
self.end_transaction()
return internal_server_error(errormsg=msg)
if 'name' not in data or data['name'] == "":
sql = render_template(
"/".join([self.template_path,
'get_oid_with_transaction.sql'],
),
constraint_type=self.constraint_type,
tid=tid)
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
self.end_transaction()
data['name'] = res['rows'][0]['name']
else:
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
tid=tid,
constraint_type=self.constraint_type,
name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=res)
sql = render_template("/".join([self.template_path, 'alter.sql']),
data=data,
conn=self.conn)
sql = sql.strip('\n').strip(' ')
if sql != '':
status, result = self.conn.execute_scalar(sql)
if not status:
self.end_transaction()
return internal_server_error(errormsg=result)
return jsonify(
node=self.blueprint.generate_browser_node(
res['rows'][0]['oid'],
tid,
data['name'],
icon="icon-%s" % self.node_type
)
)
except Exception as e:
self.end_transaction()
return make_json_response(
status=400,
success=0,
errormsg=e
)
@check_precondition
def update(self, gid, sid, did, scid, tid, cid=None):
"""
This function will update the data for the selected
primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
data = request.form if request.form else json.loads(request.data.decode())
try:
data['schema'] = self.schema
data['table'] = self.table
sql = self.get_sql(data, tid, cid)
sql = sql.strip('\n').strip(' ')
if sql != "":
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
sql = render_template("/".join([self.template_path, 'get_oid.sql']),
tid=tid,
constraint_type=self.constraint_type,
name=data['name'])
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info="Constraint updated",
data={
'id': cid,
'tid': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
else:
return make_json_response(
success=1,
info="Nothing to update",
data={
'id': cid,
'tid': tid,
'scid': scid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, cid=None):
"""
This function will delete an existing primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
# Below code will decide if it's simple drop or drop with cascade call
if self.cmd == 'delete':
# This is a cascade operation
cascade = True
else:
cascade = False
try:
sql = render_template("/".join([self.template_path, 'get_name.sql']),
tid=tid,
constraint_type = self.constraint_type,
cid=cid)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
data = res['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
sql = render_template("/".join([self.template_path, 'delete.sql']),
data=data,
cascade=cascade)
status, res = self.conn.execute_scalar(sql)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=_("{0} dropped.".format(self.node_label)),
data={
'id': cid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def msql(self, gid, sid, did, scid, tid, cid=None):
"""
This function returns modified SQL for the selected
primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
data = {}
for k, v in request.args.items():
try:
data[k] = json.loads(v)
except ValueError:
data[k] = v
data['schema'] = self.schema
data['table'] = self.table
try:
sql = self.get_sql(data, tid, cid)
sql = sql.strip('\n').strip(' ')
return make_json_response(
data=sql,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, data, tid, cid=None):
"""
This function will generate sql from model data.
Args:
data: Contains the data of the selected primary key constraint.
tid: Table ID.
cid: Primary key constraint ID
Returns:
"""
if cid is not None:
sql = render_template("/".join([self.template_path, 'properties.sql']),
tid=tid,
cid=cid,
constraint_type= self.constraint_type)
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
old_data = res['rows'][0]
required_args = [u'name']
for arg in required_args:
if arg not in data:
data[arg] = old_data[arg]
sql = render_template("/".join([self.template_path, 'update.sql']),
data=data,
o_data=old_data)
else:
required_args = [
[u'columns', u'index'] # Either of one should be there.
]
for arg in required_args:
if isinstance(arg, list):
for param in arg:
if (param in data and
((isinstance(data[param], str) and
data[param] != "") or
(isinstance(data[param], list) and
len(data[param]) > 0))):
break
else:
return _('-- definition incomplete')
elif arg not in data:
return _('-- definition incomplete')
sql = render_template("/".join([self.template_path, 'create.sql']),
data=data,
conn=self.conn,
constraint_name=self.constraint_name)
sql += "\n"
sql += render_template("/".join([self.template_path, 'alter.sql']),
data=data,
conn=self.conn)
return sql
@check_precondition
def sql(self, gid, sid, did, scid, tid, cid=None):
"""
This function generates sql to show in the sql pane for the selected
primary key.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
cid: Primary key constraint ID
Returns:
"""
try:
SQL = render_template(
"/".join([self.template_path, 'properties.sql']),
tid=tid,
conn=self.conn,
cid=cid,
constraint_type=self.constraint_type)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = res['rows'][0]
data['schema'] = self.schema
data['table'] = self.table
sql = render_template(
"/".join([self.template_path, 'get_constraint_cols.sql']),
cid=cid, colcnt=data['indnatts'])
status, res = self.conn.execute_dict(sql)
if not status:
return internal_server_error(errormsg=res)
columns = []
for row in res['rows']:
columns.append({"column": row['column'].strip('"')})
data['columns'] = columns
SQL = render_template(
"/".join([self.template_path, 'create.sql']),
data=data,
constraint_name=self.constraint_name)
SQL += "\n"
SQL += render_template(
"/".join([self.template_path, 'alter.sql']),
data=data, conn=self.conn)
sql_header = "-- Constraint: {0}\n\n-- ".format(data['name'])
sql_header += render_template(
"/".join([self.template_path, 'delete.sql']),
data=data)
sql_header += "\n"
SQL = sql_header + SQL
return ajax_response(response=SQL)
except Exception as e:
return internal_server_error(errormsg=str(e))
class PrimaryKeyConstraintView(IndexConstraintView):
node_type = 'primary_key'
node_label = _('Primary key')
constraint_name = "PRIMARY KEY"
constraint_type = "p"
class UniqueConstraintView(IndexConstraintView):
node_type = 'unique_constraint'
node_label = _('Unique constraint')
constraint_name = "UNIQUE"
constraint_type = "u"
primary_key_constraint = ConstraintRegistry(
'primary_key', PrimaryKeyConstraintModule, PrimaryKeyConstraintView
)
unique_constraint = ConstraintRegistry(
'unique_constraint', UniqueConstraintModule, UniqueConstraintView
)
PrimaryKeyConstraintView.register_node_view(primary_key_blueprint)
UniqueConstraintView.register_node_view(unique_constraint_blueprint)

View File

@ -0,0 +1,536 @@
define(
['jquery', 'underscore', 'underscore.string', 'pgadmin',
'pgadmin.browser', 'alertify', 'pgadmin.browser.collection'],
function($, _, S, pgAdmin, pgBrowser, alertify) {
// Extend the browser's node class for index constraint node
if (!pgBrowser.Nodes['{{node_type}}']) {
pgAdmin.Browser.Nodes['{{node_type}}'] = pgBrowser.Node.extend({
type: '{{node_type}}',
label: '{{ node_label }}',
collection_type: 'coll-constraints',
sqlAlterHelp: 'ddl-alter.html',
sqlCreateHelp: 'ddl-constraints.html',
hasSQL: true,
hasDepends: false,
parent_type: 'table',
canDrop: true,
canDropCascade: true,
Init: function() {
/* Avoid multiple registration of menus */
if (this.initialized)
return;
this.initialized = true;
pgBrowser.add_menus([{
name: 'create_{{node_type}}_on_coll', node: 'coll-constraints', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ node_label }}',
icon: 'wcTabIcon icon-{{node_type}}', data: {action: 'create', check: true},
enable: 'canCreate'
}
]);
},
canCreate: function(itemData, item, data) {
// If check is false then , we will allow create menu
if (data && data.check == false)
return true;
var t = pgBrowser.tree, i = item, d = itemData, parents = [];
// To iterate over tree to check parent node
while (i) {
// If it is schema then allow user to c reate table
if (_.indexOf(['schema'], d._type) > -1) {
{% if node_type == 'primary_key' %}
// There should be only one primary key per table.
var children = t.children(arguments[1], false),
primary_key_found = false;
_.each(children, function(child){
data = pgBrowser.tree.itemData($(child));
if (!primary_key_found && data._type == "primary_key") {
primary_key_found = true;
}
});
return !primary_key_found;
{% else %}
return true;
{% endif %}
}
parents.push(d._type);
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// If node is under catalog then do not allow 'create' menu
if (_.indexOf(parents, 'catalog') > -1) {
return false;
} else {
return true;
}
},
// Define the model for index constraint node
model: pgAdmin.Browser.Node.Model.extend({
defaults: {
name: undefined,
oid: undefined,
comment: undefined,
spcname: "pg_default",
index: undefined,
fillfactor: undefined,
condeferrable: undefined,
condeferred: undefined,
columns: []
},
// Define the schema for the index constraint node
schema: [{
id: 'name', label: '{{ _('Name') }}', type: 'text',
mode: ['properties', 'create', 'edit'], editable:true,
cellHeaderClasses:'width_percent_40',
},{
id: 'oid', label:'{{ _('OID') }}', cell: 'string',
type: 'text' , mode: ['properties'], editable: false,
cellHeaderClasses:'width_percent_20',
},{
id: 'comment', label:'{{ _('Comment') }}', cell: 'string',
type: 'multiline', mode: ['properties', 'create', 'edit'],
deps:['name'], disabled:function(m) {
var name = m.get('name');
if (!(name && name != '')) {
setTimeout(function(){
m.set('comment', null);
},10);
return true;
} else {
return false;
}
}
},{
id: 'columns', label: '{{ _('Columns') }}',
type: 'collection', group: '{{ _('Definition') }}',
editable: false,
cell: Backgrid.StringCell.extend({
initialize: function() {
Backgrid.StringCell.prototype.initialize.apply(this, arguments);
var self = this,
collection = this.model.get('columns');
// Do not listen for any event(s) for existing constraint.
if (_.isUndefined(self.model.get('oid'))) {
var tableCols = self.model.top.get('columns');
self.listenTo(tableCols, 'remove' , self.removeColumn);
self.listenTo(tableCols, 'change:name', self.resetColOptions);
}
collection.on('pgadmin:multicolumn:updated', function() {
self.render.apply(self);
});
self.listenTo(collection, "add", self.render);
self.listenTo(collection, "remove", self.render);
},
removeColumn: function(m) {
var self = this,
removedCols = self.model.get('columns').where(
{column: m.get('name')}
);
self.model.get('columns').remove(removedCols);
setTimeout(function () {
self.render();
}, 10);
{% if node_type == 'primary_key' %}
var key = 'primary_key'
{% else %}
var key = 'unique_constraint'
{% endif %}
setTimeout(function () {
constraints = self.model.top.get(key);
var removed = [];
constraints.each(function(constraint) {
if (constraint.get("columns").length == 0) {
removed.push(constraint);
}
});
constraints.remove(removed);
},100);
},
resetColOptions : function(m) {
var self = this,
updatedCols = self.model.get('columns').where(
{column: m.previous('name')}
);
if (updatedCols.length > 0) {
/*
* Table column name has changed so update
* column name in primary key as well.
*/
updatedCols[0].set(
{"column": m.get('name')},
{silent: true});
}
setTimeout(function () {
self.render();
}, 10);
},
formatter: {
fromRaw: function (rawValue, model) {
return rawValue.pluck("column").toString();
},
toRaw: function (val, model) {
return val;
}
},
render: function() {
return Backgrid.StringCell.prototype.render.apply(this, arguments);
},
remove: function() {
var tableCols = this.model.top.get('columns'),
primary_key_col = this.model.get('columns');
if (primary_key_col) {
primary_key_col.off('pgadmin:multicolumn:updated');
}
this.stopListening(tableCols, 'remove' , self.removeColumn);
this.stopListening(tableCols, 'change:name' , self.resetColOptions);
Backgrid.StringCell.prototype.remove.apply(this, arguments);
}
}),
canDelete: true, canAdd: true,
control: Backform.MultiSelectAjaxControl.extend({
formatter: {
fromRaw: function (rawData, model) {
var res = _.isObject(rawData) ?
rawData : JSON.parse(rawData);
return _.pluck(res, 'column');
},
toRaw: function (formattedData, model) {
return formattedData;
}
},
defaults: _.extend(
{},
Backform.NodeListByNameControl.prototype.defaults,
{
select2: {
multiple: true,
allowClear: true,
width: 'style',
placeholder: '{{ _('Select the column(s)') }}',
}
}
),
initialize: function() {
// Here we will decide if we need to call URL
// Or fetch the data from parent columns collection
var self = this;
if(this.model.handler) {
Backform.Select2Control.prototype.initialize.apply(this, arguments);
// Do not listen for any event(s) for existing constraint.
if (_.isUndefined(self.model.get('oid'))) {
var tableCols = self.model.top.get('columns');
self.listenTo(tableCols, 'remove' , self.resetColOptions);
self.listenTo(tableCols, 'change:name', self.resetColOptions);
}
self.custom_options();
} else {
Backform.MultiSelectAjaxControl.prototype.initialize.apply(this, arguments);
}
self.model.get('columns').on('pgadmin:multicolumn:updated', function() {
self.render.apply(self);
});
},
resetColOptions: function(m) {
var self = this;
setTimeout(function () {
self.custom_options();
self.render.apply(self);
}, 50);
},
custom_options: function() {
// We will add all the columns entered by user in table model
var columns = this.model.top.get('columns'),
added_columns_from_tables = [];
if (columns.length > 0) {
_.each(columns.models, function(m) {
var col = m.get('name');
if(!_.isUndefined(col) && !_.isNull(col)) {
added_columns_from_tables.push(
{label: col, value: col, image:'icon-column'}
);
}
});
}
// Set the values in to options so that user can select
this.field.set('options', added_columns_from_tables);
},
onChange: function(e) {
var self = this,
model = this.model,
$el = $(e.target),
attrArr = this.field.get("name").split('.'),
name = attrArr.shift(),
path = attrArr.join('.'),
vals = this.getValueFromDOM(),
collection = model.get(name),
removed = [];
this.stopListening(this.model, "change:" + name, this.render);
/*
* Iterate through all the values, and find out how many are already
* present in the collection.
*/
collection.each(function(m) {
var column = m.get('column'),
idx = _.indexOf(vals, column);
if (idx > -1) {
vals.splice(idx, 1);
} else {
removed.push(column);
}
});
/*
* Adding new values
*/
_.each(vals, function(v) {
var m = new (self.field.get('model'))(
{column: v}, { silent: true,
top: self.model.top,
collection: collection,
handler: collection
});
collection.add(m);
});
/*
* Removing unwanted!
*/
_.each(removed, function(v) {
collection.remove(collection.where({column: v}));
});
this.listenTo(this.model, "change:" + name, this.render);
},
remove: function() {
if(this.model.handler) {
var self = this,
tableCols = self.model.top.get('columns');
self.stopListening(tableCols, 'remove' , self.resetColOptions);
self.stopListening(tableCols, 'change:name' , self.resetColOptions);
self.model.get('columns').off('pgadmin:multicolumn:updated');
Backform.Select2Control.prototype.remove.apply(this, arguments);
} else {
Backform.MultiSelectAjaxControl.prototype.remove.apply(this, arguments);
}
}
}),
deps: ['index'], node: 'column',
model: pgBrowser.Node.Model.extend({
defaults: {
column: undefined
},
validate: function() {
return null;
}
}),
transform : function(data){
var res = [];
if (data && _.isArray(data)) {
_.each(data, function(d) {
res.push({label: d.label, value: d.label, image:'icon-column'});
})
}
return res;
},
select2:{allowClear:false},
disabled: function(m) {
// If we are in table edit mode then
if (_.has(m, 'top') && !_.isUndefined(m.top)
&& !m.top.isNew()) {
// If OID is undefined then user is trying to add
// new constraint which should be allowed for Unique
return !_.isUndefined(m.get('oid'));
}
// We can't update columns of existing index constraint.
if (!m.isNew()) {
return true;
}
// Disable if index is selected.
var index = m.get('index');
if(_.isUndefined(index) || index == '') {
return false;
} else {
var col = m.get('columns');
col.reset();
return true;
}
}
},{
id: 'spcname', label: '{{ _('Tablespace') }}',
type: 'text', group: '{{ _('Definition') }}',
control: 'node-list-by-name', node: 'tablespace',
deps: ['index'],
select2:{allowClear:false},
filter: function(m) {
// Don't show pg_global tablespace in selection.
if (m.label == "pg_global") return false;
else return true;
},
disabled: function(m) {
// Disable if index is selected.
m = m.top || m;
var index = m.get('index');
if(_.isUndefined(index) || index == '') {
return false;
} else {
setTimeout(function(){
m.set('spcname', '');
},10);
return true;
}
}
},{
id: 'index', label: '{{ _('Index') }}',
type: 'text', group: '{{ _('Definition') }}',
control: Backform.NodeListByNameControl.extend({
initialize:function() {
if (_.isUndefined(this.model.top)) {
Backform.NodeListByNameControl.prototype.initialize.apply(this,arguments);
} else {
Backform.Control.prototype.initialize.apply(this,arguments);
}
}
}),
select2:{allowClear:true}, node: 'index',
disabled: function(m) {
// If we are in table edit mode then disable it
if (_.has(m, 'top') && !_.isUndefined(m.top)
&& !m.top.isNew()) {
return true;
}
// We can't update index of existing index constraint.
return !m.isNew();
},
// We will not show this field in Create Table mode
visible: function(m) {
return !_.isUndefined(m.top.node_info['table']);
}
},{
id: 'fillfactor', label: '{{ _('Fill factor') }}', deps: ['index'],
type: 'int', group: '{{ _('Definition') }}', allowNull: true,
disabled: function(m) {
// Disable if index is selected.
var index = m.get('index');
if(_.isUndefined(index) || index == '') {
return false;
} else {
setTimeout(function(){
m.set('fillfactor', null);
},10);
return true;
}
}
},{
id: 'condeferrable', label: '{{ _('Deferrable') }}',
type: 'switch', group: '{{ _('Definition') }}', deps: ['index'],
disabled: function(m) {
// If we are in table edit mode then
if (_.has(m, 'top') && !_.isUndefined(m.top)
&& !m.top.isNew()) {
// If OID is undefined then user is trying to add
// new constraint which should allowed for Unique
return !_.isUndefined(m.get('oid'));
}
// We can't update condeferrable of existing index constraint.
if (!m.isNew()) {
return true;
}
// Disable if index is selected.
var index = m.get('index');
if(_.isUndefined(index) || index == '') {
return false;
} else {
setTimeout(function(){
m.set('condeferrable', false);
},10);
return true;
}
}
},{
id: 'condeferred', label: '{{ _('Deferred') }}',
type: 'switch', group: '{{ _('Definition') }}',
deps: ['condeferrable'],
disabled: function(m) {
// If we are in table edit mode then
if (_.has(m, 'top') && !_.isUndefined(m.top)
&& !m.top.isNew()) {
// If OID is undefined then user is trying to add
// new constraint which should allowed for Unique
return !_.isUndefined(m.get('oid'));
}
// We can't update condeferred of existing index constraint.
if (!m.isNew()) {
return true;
}
// Disable if condeferred is false or unselected.
if(m.get('condeferrable') == true) {
return false;
} else {
setTimeout(function(){
m.set('condeferred', false);
},10);
return true;
}
}
}
],
validate: function() {
this.errorModel.clear();
// Clear parent's error as well
if (_.has(this, 'top')) {
this.top.errorModel.clear();
}
var columns = this.get('columns'),
index = this.get('index');
if ((_.isUndefined(index) || String(index).replace(/^\s+|\s+$/g, '') == '') &&
(_.isUndefined(columns) || _.isNull(columns) || columns.length < 1)) {
var msg = '{{ _('Please specify columns for ') }}' + '{{ node_label }}';
this.errorModel.set('columns', msg);
return msg;
}
return null;
}
})
});
}
return pgBrowser.Nodes['{{node_type}}'];
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 314 B

View File

@ -0,0 +1,54 @@
define(
[
'jquery', 'underscore', 'underscore.string', 'pgadmin', 'pgadmin.browser',
'pgadmin.browser.collection'{% for c in constraints %}, 'pgadmin.node.{{ c|safe }}'{%endfor%}
],
function($, _, S, pgAdmin, pgBrowser) {
if (!pgBrowser.Nodes['coll-constraints']) {
var databases = pgAdmin.Browser.Nodes['coll-constraints'] =
pgAdmin.Browser.Collection.extend({
node: 'constraints',
label: '{{ _('Constraints') }}',
type: 'coll-constraints',
columns: ['name', 'comment']
});
};
if (!pgBrowser.Nodes['constraints']) {
pgAdmin.Browser.Nodes['constraints'] = pgBrowser.Node.extend({
type: 'constraints',
label: '{{ _('Constraints') }}',
collection_type: 'coll-constraints',
parent_type: ['table'],
Init: function() {
/* Avoid mulitple registration of menus */
if (this.initialized)
return;
this.initialized = true;
pgBrowser.add_menus([]);
},
model: pgAdmin.Browser.Node.Model.extend({
defaults: {
name: undefined,
oid: undefined,
comment: undefined
},
schema: [{
id: 'name', label: '{{ _('Name') }}', type: 'text',
mode: ['properties', 'create', 'edit']
},{
id: 'oid', label:'{{ _('Oid') }}', cell: 'string',
type: 'text' , mode: ['properties']
},{
id: 'comment', label:'{{ _('Comment') }}', cell: 'string',
type: 'multiline', mode: ['properties', 'create', 'edit']
}]
})
});
}
return pgBrowser.Nodes['constraints'];
});

View File

@ -0,0 +1,42 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from pgadmin.browser.collection import CollectionNodeModule
from flask import Blueprint
class ConstraintRegistry(object):
"""
ConstraintTypeRegistry
It is more of a registry for difference type of constraints for the tables.
Its job is to initialize to different type of constraint blueprint and
register it with its respective NodeView.
"""
registry = dict()
def __init__(self, name, con_blueprint, con_nodeview):
if name not in ConstraintRegistry.registry:
blueprint = con_blueprint(name)
# TODO:: register the view with the blueprint
con_nodeview.register_node_view(blueprint)
ConstraintRegistry.registry[name] = {
'blueprint': blueprint,
'nodeview': con_nodeview
}
class ConstraintTypeModule(CollectionNodeModule):
register = Blueprint.register
def __init__(self, *args, **kwargs):
super(ConstraintTypeModule, self).__init__(*args, **kwargs)

View File

@ -0,0 +1,874 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
""" Implements Index Node """
from flask import render_template, make_response, request, jsonify
from flask.ext.babel import gettext
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error
from pgadmin.browser.utils import PGChildNodeView
from pgadmin.browser.collection import CollectionNodeModule
import pgadmin.browser.server_groups.servers.databases as database
from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.browser.server_groups.servers.utils import parse_priv_from_db, \
parse_priv_to_db
from functools import wraps
import json
class IndexesModule(CollectionNodeModule):
"""
class IndexesModule(CollectionNodeModule)
A module class for Index node derived from CollectionNodeModule.
Methods:
-------
* __init__(*args, **kwargs)
- Method is used to initialize the Index and it's base module.
* get_nodes(gid, sid, did, scid, tid)
- Method is used to generate the browser collection node.
* node_inode()
- Method is overridden from its base class to make the node as leaf node.
* script_load()
- Load the module script for schema, when any of the server node is
initialized.
"""
NODE_TYPE = 'index'
COLLECTION_LABEL = gettext("Indexes")
def __init__(self, *args, **kwargs):
"""
Method is used to initialize the IndexModule and it's base module.
Args:
*args:
**kwargs:
"""
self.min_ver = None
self.max_ver = None
super(IndexesModule, self).__init__(*args, **kwargs)
def BackendSupported(self, manager, **kwargs):
"""
Load this module if vid is view, we will not load it under
material view
"""
if super(IndexesModule, self).BackendSupported(manager, **kwargs):
conn = manager.connection(did=kwargs['did'])
# If DB is not connected then return error to browser
if not conn.connected():
return precondition_required(
gettext(
"Connection to the server has been lost!"
)
)
if 'vid' not in kwargs:
return True
template_path = 'index/sql/9.1_plus'
SQL = render_template("/".join(
[template_path, 'backend_support.sql']), vid=kwargs['vid'])
status, res = conn.execute_scalar(SQL)
# check if any errors
if not status:
return internal_server_error(errormsg=res)
# Check vid is view not material view
# then true, othewise false
return res
def get_nodes(self, gid, sid, did, scid, **kwargs):
"""
Generate the collection node
"""
assert('tid' in kwargs or 'vid' in kwargs)
yield self.generate_browser_collection_node(
kwargs['tid'] if 'tid' in kwargs else kwargs['vid']
)
@property
def script_load(self):
"""
Load the module script for server, when any of the server-group node is
initialized.
"""
return database.DatabaseModule.NODE_TYPE
@property
def node_inode(self):
"""
Load the module node as a leaf node
"""
return False
blueprint = IndexesModule(__name__)
class IndexesView(PGChildNodeView):
"""
This class is responsible for generating routes for Index node
Methods:
-------
* __init__(**kwargs)
- Method is used to initialize the IndexView and it's base view.
* module_js()
- This property defines (if javascript) exists for this node.
Override this property for your own logic
* check_precondition()
- This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
* list()
- This function is used to list all the Index nodes within that
collection.
* nodes()
- This function will used to create all the child node within that
collection, Here it will create all the Index node.
* properties(gid, sid, did, scid, tid, idx)
- This function will show the properties of the selected Index node
* create(gid, sid, did, scid, tid)
- This function will create the new Index object
* update(gid, sid, did, scid, tid, idx)
- This function will update the data for the selected Index node
* delete(self, gid, sid, scid, tid, idx):
- This function will drop the Index object
* msql(gid, sid, did, scid, tid, idx)
- This function is used to return modified SQL for the selected
Index node
* get_sql(data, scid, tid)
- This function will generate sql from model data
* sql(gid, sid, did, scid):
- This function will generate sql to show it in sql pane for the
selected Index node.
* dependency(gid, sid, did, scid):
- This function will generate dependency list show it in dependency
pane for the selected Index node.
* dependent(gid, sid, did, scid):
- This function will generate dependent list to show it in dependent
pane for the selected Index node.
"""
node_type = blueprint.node_type
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [
{'type': 'int', 'id': 'idx'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'delete': [{'delete': 'delete'}],
'children': [{'get': 'children'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'module.js': [{}, {}, {'get': 'module_js'}],
'get_collations': [{'get': 'get_collations'},
{'get': 'get_collations'}],
'get_access_methods': [{'get': 'get_access_methods'},
{'get': 'get_access_methods'}],
'get_op_class': [{'get': 'get_op_class'},
{'get': 'get_op_class'}]
})
def check_precondition(f):
"""
This function will behave as a decorator which will checks
database connection before running view, it will also attaches
manager,conn & template_path properties to self
"""
@wraps(f)
def wrap(*args, **kwargs):
# Here args[0] will hold self & kwargs will hold gid,sid,did
self = args[0]
self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(
kwargs['sid']
)
self.conn = self.manager.connection(did=kwargs['did'])
# If DB not connected then return error to browser
if not self.conn.connected():
return precondition_required(
gettext(
"Connection to the server has been lost!"
)
)
# We need datlastsysoid to check if current index is system index
self.datlastsysoid = self.manager.db_info[kwargs['did']]['datlastsysoid']
# we will set template path for sql scripts
self.template_path = 'index/sql/9.1_plus'
# We need parent's name eg table name and schema name
# when we create new index in update we can fetch it using
# property sql
SQL = render_template("/".join([self.template_path,
'get_parent.sql']),
tid=kwargs['tid'])
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
self.schema = row['schema']
self.table = row['table']
return f(*args, **kwargs)
return wrap
@check_precondition
def get_collations(self, gid, sid, did, scid, tid, idx=None):
"""
This function will return list of collation available
via AJAX response
"""
res = [{'label': '', 'value': ''}]
try:
SQL = render_template("/".join([self.template_path,
'get_collations.sql']))
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
res.append(
{'label': row['collation'],
'value': row['collation']}
)
return make_json_response(
data=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_access_methods(self, gid, sid, did, scid, tid, idx=None):
"""
This function will return list of access methods available
via AJAX response
"""
res = [{'label': '', 'value': ''}]
try:
SQL = render_template("/".join([self.template_path,
'get_am.sql']))
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
res.append(
{'label': row['amname'],
'value': row['amname']}
)
return make_json_response(
data=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def get_op_class(self, gid, sid, did, scid, tid, idx=None):
"""
This function will return list of op_class method
for each access methods available via AJAX response
"""
res = dict()
try:
# Fetching all the access methods
SQL = render_template("/".join([self.template_path,
'get_am.sql']))
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
for row in rset['rows']:
# Fetching all the op_classes for each access method
SQL = render_template("/".join([self.template_path,
'get_op_class.sql']),
oid=row['oid'])
status, result = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=res)
op_class_list = [{'label': '', 'value': ''}]
for r in result['rows']:
op_class_list.append({'label': r['opcname'],
'value': r['opcname']})
# Append op_class list in main result as collection
res[row['amname']] = op_class_list
return make_json_response(
data=res,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def list(self, gid, sid, did, scid, tid):
"""
This function is used to list all the schema nodes within that collection.
Args:
gid: Server group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
Returns:
JSON of available schema nodes
"""
SQL = render_template("/".join([self.template_path,
'nodes.sql']), tid=tid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return ajax_response(
response=res['rows'],
status=200
)
@check_precondition
def nodes(self, gid, sid, did, scid, tid):
"""
This function will used to create all the child node within that collection.
Here it will create all the schema node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
Returns:
JSON of available schema child nodes
"""
res = []
SQL = render_template("/".join([self.template_path,
'nodes.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon="icon-index"
))
return make_json_response(
data=res,
status=200
)
def _column_details(self, idx, data):
"""
This functional will fetch list of column details for index
Args:
idx: Index OID
data: Properties data
Returns:
Updated properties data with column details
"""
SQL = render_template("/".join([self.template_path,
'column_details.sql']), idx=idx)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
# 'attdef' comes with quotes from query so we need to strip them
# 'options' we need true/false to render switch ASC(false)/DESC(true)
columns = []
cols = []
cnt = 1
for row in rset['rows']:
# We need all data as collection for ColumnsModel
cols_data = {
'colname': row['attdef'].strip('"'),
'collspcname': row['collnspname'],
'op_class': row['opcname'],
}
if row['options'][0] == 'DESC':
cols_data['sort_order'] = True
columns.append(cols_data)
# We need same data as string to display in properties window
# If multiple column then separate it by colon
cols_str = row['attdef']
if row['collnspname']:
cols_str += ' COLLATE ' + row['collnspname']
if row['opcname']:
cols_str += ' ' + row['opcname']
if row['options'][0] == 'DESC':
cols_str += ' DESC'
cols.append(cols_str)
# Push as collection
data['columns'] = columns
# Push as string
data['cols'] = ', '.join(cols)
return data
@check_precondition
def properties(self, gid, sid, did, scid, tid, idx):
"""
This function will show the properties of the selected schema node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
scid: Schema ID
tid: Table ID
idx: Index ID
Returns:
JSON of selected schema node
"""
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, idx=idx,
datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
# Making copy of output for future use
data = dict(res['rows'][0])
# Add column details for current index
data = self._column_details(idx, data)
return ajax_response(
response=data,
status=200
)
@check_precondition
def create(self, gid, sid, did, scid, tid):
"""
This function will creates new the schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
"""
data = request.form if request.form else json.loads(
request.data.decode()
)
for k, v in data.items():
try:
data[k] = json.loads(v)
except (ValueError, TypeError):
data[k] = v
required_args = {
'name': 'Name',
'columns': 'Columns'
}
for arg in required_args:
err_msg = None
if arg == 'columns' and len(data['columns']) < 1:
err_msg = "You must provide one or more column to create index"
if arg not in data:
err_msg = "Couldn't find the required parameter (%s)." % \
required_args[arg]
# Check if we have at least one column
if err_msg is not None:
return make_json_response(
status=410,
success=0,
errormsg=gettext(err_msg)
)
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
try:
SQL = render_template("/".join([self.template_path,
'create.sql']),
data=data, conn=self.conn)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
# If user chooses concurrent index then we can not run it along
# with other alter statments so we will separate alter index part
SQL = render_template("/".join([self.template_path,
'alter.sql']),
data=data, conn=self.conn)
SQL = SQL.strip('\n').strip(' ')
if SQL != '':
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
# we need oid to to add object in tree at browser
SQL = render_template("/".join([self.template_path,
'get_oid.sql']),
tid=tid, data=data)
status, idx = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=tid)
return jsonify(
node=self.blueprint.generate_browser_node(
idx,
scid,
data['name'],
icon="icon-index"
)
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, idx):
"""
This function will updates existing the schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
idx: Index ID
"""
# Below will decide if it's simple drop or drop with cascade call
if self.cmd == 'delete':
# This is a cascade operation
cascade = True
else:
cascade = False
try:
# We will first fetch the index name for current request
# so that we create template for dropping index
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, idx=idx,
datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = dict(res['rows'][0])
SQL = render_template("/".join([self.template_path,
'delete.sql']),
data=data, conn=self.conn, cascade=cascade)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=gettext("Index is dropped"),
data={
'id': idx,
'tid': tid
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def update(self, gid, sid, did, scid, tid, idx):
"""
This function will updates existing the schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
idx: Index ID
"""
data = request.form if request.form else json.loads(request.data.decode())
data['schema'] = self.schema
data['table'] = self.table
try:
SQL = self.get_sql(scid, tid, idx, data)
if SQL and SQL.strip('\n') and SQL.strip(' '):
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info="Index updated",
data={
'id': idx,
'tid': tid,
'scid': scid
}
)
else:
return make_json_response(
success=1,
info="Nothing to update",
data={
'id': idx,
'tid': tid,
'scid': scid
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def msql(self, gid, sid, did, scid, tid, idx=None):
"""
This function will generates modified sql for schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
idx: Index ID (When working with existing index)
"""
data = dict()
for k, v in request.args.items():
try:
data[k] = json.loads(v)
except ValueError:
data[k] = v
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
try:
SQL = self.get_sql(scid, tid, idx, data)
if SQL and SQL.strip('\n') and SQL.strip(' '):
return make_json_response(
data=SQL,
status=200
)
except Exception as e:
return internal_server_error(errormsg=str(e))
def get_sql(self, scid, tid, idx, data):
"""
This function will genrate sql from model data
"""
if idx is not None:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, idx=idx,
datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
old_data = dict(res['rows'][0])
# If name is not present in data then
# we will fetch it from old data, we also need schema & table name
if 'name' not in data:
data['name'] = old_data['name']
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data, conn=self.conn
)
else:
required_args = {
'name': 'Name',
'columns': 'Columns'
}
for arg in required_args:
err = False
if arg == 'columns' and len(data['columns']) < 1:
err = True
if arg not in data:
err = True
# Check if we have at least one column
if err:
return gettext('-- incomplete definition')
# If the request for new object which do not have did
SQL = render_template("/".join([self.template_path, 'create.sql']),
data=data, conn=self.conn)
SQL += "\n"
SQL += render_template("/".join([self.template_path, 'alter.sql']),
data=data, conn=self.conn)
return SQL
@check_precondition
def sql(self, gid, sid, did, scid, tid, idx):
"""
This function will generates reverse engineered sql for schema object
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
idx: Index ID
"""
try:
SQL = render_template("/".join([self.template_path,
'properties.sql']),
tid=tid, idx=idx,
datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
data = dict(res['rows'][0])
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema
data['table'] = self.table
# Add column details for current index
data = self._column_details(idx, data)
SQL = self.get_sql(scid, tid, None, data)
sql_header = "-- Index: {0}\n\n-- ".format(data['name'])
sql_header += render_template("/".join([self.template_path,
'delete.sql']),
data=data, conn=self.conn)
SQL = sql_header + '\n\n' + SQL
return ajax_response(response=SQL)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def dependents(self, gid, sid, did, scid, tid, idx):
"""
This function get the dependents and return ajax response
for the schema node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
idx: Index ID
"""
dependents_result = self.get_dependents(
self.conn, idx
)
return ajax_response(
response=dependents_result,
status=200
)
@check_precondition
def dependencies(self, gid, sid, did, scid, tid, idx):
"""
This function get the dependencies and return ajax response
for the schema node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
scid: Schema ID
tid: Table ID
idx: Index ID
"""
dependencies_result = self.get_dependencies(
self.conn, idx
)
return ajax_response(
response=dependencies_result,
status=200
)
IndexesView.register_node_view(blueprint)

Binary file not shown.

After

Width:  |  Height:  |  Size: 468 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 562 B

View File

@ -0,0 +1,409 @@
define(
['jquery', 'underscore', 'underscore.string', 'pgadmin', 'pgadmin.browser',
'backform', 'alertify', 'pgadmin.browser.collection'],
function($, _, S, pgAdmin, pgBrowser, Backform, alertify) {
if (!pgBrowser.Nodes['coll-index']) {
var databases = pgAdmin.Browser.Nodes['coll-index'] =
pgAdmin.Browser.Collection.extend({
node: 'index',
label: '{{ _('Indexes') }}',
type: 'coll-index',
sqlAlterHelp: 'sql-alterindex.html',
sqlCreateHelp: 'sql-createindex.html',
columns: ['name', 'description']
});
};
// Model to create column collection control
var ColumnModel = pgAdmin.Browser.Node.Model.extend({
defaults: {
colname: undefined,
collspcname: undefined,
op_class: undefined,
sort_order: false,
nulls: false
},
schema: [
{
id: 'colname', label:'{{ _('Column') }}', cell: 'string',
type: 'text', disabled: 'inSchema', editable: false,
control: 'node-list-by-name', node: 'column'
},{
id: 'collspcname', label:'{{ _('Collation') }}', cell: 'string',
type: 'text', disabled: 'inSchema', editable: false,
control: 'node-ajax-options', url: 'get_collations', node: 'index'
},{
id: 'op_class', label:'{{ _('Operator class') }}', cell: 'string',
type: 'text', disabled: 'checkAccessMethod', editable: false,
control: 'node-ajax-options', url: 'get_op_class', node: 'index',
deps: ['amname'], transform: function(data) {
/* We need to extract data from collection according
* to access method selected by user if not selected
* send btree related op_class options
*/
var amname = this.model.handler.get('amname'),
options = data['btree'];
if(_.isUndefined(amname))
return options;
_.each(data, function(v, k) {
if(amname === k) {
options = v;
}
});
return options;
}
},{
id: 'sort_order', label:'{{ _('Sort order') }}', cell: 'switch',
type: 'switch', disabled: 'checkAccessMethod', editable: false,
deps: ['amname'],
options: {
'onText': 'DESC', 'offText': 'ASC',
'onColor': 'success', 'offColor': 'default',
'size': 'small'
}
},{
id: 'nulls', label:'{{ _('NULLs') }}', cell: 'switch',
type: 'switch', disabled: 'checkAccessMethod', editable: false,
deps: ['amname', 'sort_order'],
options: {
'onText': 'FIRST', 'offText': 'LAST',
'onColor': 'success', 'offColor': 'default',
'size': 'small'
}
}
],
validate: function() {
this.errorModel.clear();
if (_.isUndefined(this.get('colname'))
|| String(this.get('colname')).replace(/^\s+|\s+$/g, '') == '') {
msg = '{{ _('Column Name can not be empty.') }}';
this.errorModel.set('colname', msg);
return msg;
}
},
// We will check if we are under schema node
inSchema: function() {
if(this.node_info && 'catalog' in this.node_info) {
return true;
}
return false;
},
// We will check if we are under schema node & in 'create' mode
inSchemaWithModelCheck: function(m) {
if(this.node_info && 'schema' in this.node_info) {
// We will disable control if it's in 'edit' mode
if (m.isNew()) {
return false;
} else {
return true;
}
}
return true;
},
// We will check if we are under schema node and added condition
checkAccessMethod: function(m) {
//Access method is empty or btree then do not disable field
var parent_model = m.handler;
if(!m.inSchema.apply(this, [m]) &&
(_.isUndefined(parent_model.get('amname')) ||
_.isNull(parent_model.get('amname')) ||
String(parent_model.get('amname')).replace(/^\s+|\s+$/g, '') == '' ||
parent_model.get('amname') === 'btree')) {
// We need to set nulls to true if sort_order is set to desc
// nulls first is default for desc
if(m.get('sort_order') == true) {
setTimeout(function() { m.set('nulls', true) }, 10);
} else {
setTimeout(function() { m.set('nulls', false) }, 10);
}
return false;
}
return true;
},
});
if (!pgBrowser.Nodes['index']) {
pgAdmin.Browser.Nodes['index'] = pgAdmin.Browser.Node.extend({
parent_type: ['table', 'view', 'mview'],
collection_type: ['coll-table', 'coll-view'],
type: 'index',
label: '{{ _('Index') }}',
hasSQL: true,
hasDepends: true,
Init: function() {
/* Avoid mulitple registration of menus */
if (this.initialized)
return;
this.initialized = true;
pgBrowser.add_menus([{
name: 'create_index_on_coll', node: 'coll-index', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Index...') }}',
icon: 'wcTabIcon icon-index', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_index', node: 'index', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Index...') }}',
icon: 'wcTabIcon icon-index', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_index_onTable', node: 'table', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _('Index...') }}',
icon: 'wcTabIcon icon-index', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_index_onMatView', node: 'mview', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 5, label: '{{ _('Index...') }}',
icon: 'wcTabIcon icon-index', data: {action: 'create', check: true},
enable: 'canCreate'
}
]);
},
canDrop: pgBrowser.Nodes['schema'].canChildDrop,
canDropCascade: pgBrowser.Nodes['schema'].canChildDrop,
model: pgAdmin.Browser.Node.Model.extend({
defaults: {
name: undefined,
nspname: undefined,
tabname: undefined,
spcname: 'pg_default',
amname: 'btree'
},
schema: [{
id: 'name', label: '{{ _('Name') }}', cell: 'string',
type: 'text', disabled: 'inSchema'
},{
id: 'oid', label:'{{ _('OID') }}', cell: 'string',
type: 'int', disabled: true, mode: ['edit', 'properties']
},{
id: 'spcname', label:'{{ _('Tablespace') }}', cell: 'string',
control: 'node-list-by-name', node: 'tablespace',
select2: {'allowClear': true},
type: 'text', mode: ['properties', 'create', 'edit'],
disabled: 'inSchema', filter: function(d) {
// If tablespace name is not "pg_global" then we need to exclude them
if(d && d.label.match(/pg_global/))
{
return false;
}
return true;
}
},{
id: 'amname', label:'{{ _('Access Method') }}', cell: 'string',
type: 'text', mode: ['properties', 'create', 'edit'],
disabled: 'inSchemaWithModelCheck', url: 'get_access_methods',
group: '{{ _('Definition') }}', select2: {'allowClear': true},
control: Backform.NodeAjaxOptionsControl.extend({
// When access method changes we need to clear columns collection
onChange: function() {
Backform.NodeAjaxOptionsControl.prototype.onChange.apply(this, arguments);
var self = this,
// current access method
current_am = self.model.get('amname'),
// previous access method
previous_am = self.model.previous('amname');
if (current_am != previous_am && self.model.get('columns').length !== 0) {
var msg = '{{ _('Changing access method will clear columns collection') }}';
alertify.confirm(msg, function (e) {
// User clicks Ok, lets clear collection
var column_collection = self.model.get('columns');
column_collection.reset();
}, function() {
// User clicks Cancel set previous value again in combo box
setTimeout(function(){
self.model.set('amname', previous_am);
}, 10);
});
}
}
})
},{
id: 'cols', label:'{{ _('Columns') }}', cell: 'string',
type: 'text', disabled: 'inSchema', mode: ['properties']
},{
id: 'fillfactor', label:'{{ _('Fill factor') }}', cell: 'string',
type: 'int', disabled: 'inSchema', mode: ['create', 'edit', 'properties'],
min: 10, max:100, group: '{{ _('Definition') }}'
},{
id: 'indisunique', label:'{{ _('Unique?') }}', cell: 'string',
type: 'switch', disabled: 'inSchemaWithModelCheck',
group: '{{ _('Definition') }}'
},{
id: 'indisclustered', label:'{{ _('Clustered?') }}', cell: 'string',
type: 'switch', disabled: 'inSchema',
group: '{{ _('Definition') }}'
},{
id: 'indisvalid', label:'{{ _('Valid?') }}', cell: 'string',
type: 'switch', disabled: true, mode: ['properties'],
},{
id: 'indisprimary', label:'{{ _('Primary?') }}', cell: 'string',
type: 'switch', disabled: true, mode: ['properties'],
},{
id: 'is_sys_idx', label:'{{ _('System index?') }}', cell: 'string',
type: 'switch', disabled: true, mode: ['properties'],
},{
id: 'isconcurrent', label:'{{ _('Concurrent build?') }}', cell: 'string',
type: 'switch', disabled: 'inSchemaWithModelCheck',
mode: ['create', 'edit'], group: '{{ _('Definition') }}'
},{
id: 'indconstraint', label:'{{ _('Constraint') }}', cell: 'string',
type: 'text', disabled: 'inSchemaWithModelCheck', mode: ['create', 'edit'],
control: 'sql-field', visible: true, group: '{{ _('Definition') }}'
},{
id: 'columns', label: 'Columns', type: 'collection',
group: '{{ _('Definition') }}', model: ColumnModel, mode: ['edit', 'create'],
canAdd: function(m) {
// We will disable it if it's in 'edit' mode
if (m.isNew()) {
return true;
} else {
return false;
}
},
canEdit: function(m) {
// We will disable it if it's in 'edit' mode
if (m.isNew()) {
return true;
} else {
return false;
}
},
canDelete: function(m) {
// We will disable it if it's in 'edit' mode
if (m.isNew()) {
return true;
} else {
return false;
}
},
control: 'unique-col-collection', uniqueCol : ['colname']
},{
id: 'description', label:'{{ _('Comment') }}', cell: 'string',
type: 'multiline', mode: ['properties', 'create', 'edit'],
disabled: 'inSchema'
}
],
validate: function(keys) {
var err = {},
changedAttrs = this.changed,
msg = undefined;
// Nothing to validate
if (keys && keys.length == 0) {
this.errorModel.clear();
return null;
} else {
this.errorModel.clear();
}
if (_.isUndefined(this.get('name'))
|| String(this.get('name')).replace(/^\s+|\s+$/g, '') == '') {
msg = '{{ _('Name can not be empty.') }}';
this.errorModel.set('name', msg);
return msg;
}
if (_.isUndefined(this.get('spcname'))
|| String(this.get('spcname')).replace(/^\s+|\s+$/g, '') == '') {
msg = '{{ _('Tablespace can not be empty.') }}';
this.errorModel.set('spcname', msg);
return msg;
}
if (_.isUndefined(this.get('amname'))
|| String(this.get('amname')).replace(/^\s+|\s+$/g, '') == '') {
msg = '{{ _('Access method can not be empty.') }}';
this.errorModel.set('amname', msg);
return msg;
}
// Checks if all columns has names
var cols = this.get('columns');
if(cols && cols.length > 0) {
if(!_.every(cols.pluck('colname'))) {
msg = '{{ _('You must specify column name.') }}';
this.errorModel.set('columns', msg);
return msg;
}
} else if(cols){
msg = '{{ _('You must specify at least one column.') }}';
this.errorModel.set('columns', msg);
return msg;
}
return null;
},
// We will check if we are under schema node & in 'create' mode
inSchema: function() {
if(this.node_info && 'catalog' in this.node_info) {
return true;
}
return false;
},
// We will check if we are under schema node & in 'create' mode
inSchemaWithModelCheck: function(m) {
if(this.node_info && 'schema' in this.node_info) {
// We will disable control if it's in 'edit' mode
if (m.isNew()) {
return false;
} else {
return true;
}
}
return true;
},
// Checks weather to enable/disable control
inSchemaWithColumnCheck: function(m) {
if(this.node_info && 'schema' in this.node_info) {
// We will disable control if it's system columns
// ie: it's position is less then 1
if (m.isNew()) {
return false;
} else {
// if we are in edit mode
if (!_.isUndefined(m.get('attnum')) && m.get('attnum') >= 1 ) {
return false;
} else {
return true;
}
}
}
return true;
}
}),
// Below function will enable right click menu for creating column
canCreate: function(itemData, item, data) {
// If check is false then , we will allow create menu
if (data && data.check == false)
return true;
var t = pgBrowser.tree, i = item, d = itemData, parents = [];
// To iterate over tree to check parent node
while (i) {
// If it is schema then allow user to c reate table
if (_.indexOf(['schema'], d._type) > -1)
return true;
parents.push(d._type);
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// If node is under catalog then do not allow 'create' menu
if (_.indexOf(parents, 'catalog') > -1) {
return false;
} else {
return true;
}
}
});
}
return pgBrowser.Nodes['index'];
});

View File

@ -0,0 +1,497 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2016, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Implements Rule Node"""
import json
from flask import render_template, make_response, request, jsonify
from flask.ext.babel import gettext
from pgadmin.utils.ajax import make_json_response, \
make_response as ajax_response, internal_server_error
from pgadmin.browser.utils import PGChildNodeView
import pgadmin.browser.server_groups.servers.databases.schemas as schemas
from pgadmin.browser.server_groups.servers.databases.schemas.utils import \
parse_rule_definition
from pgadmin.browser.collection import CollectionNodeModule
from pgadmin.utils.ajax import precondition_required
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from functools import wraps
class RuleModule(CollectionNodeModule):
"""
class RuleModule(CollectionNodeModule):
A rule collection Node which inherits CollectionNodeModule
class and define methods:
get_nodes - To generate collection node.
script_load - tells when to load js file.
csssnppets - add css to page
"""
NODE_TYPE = 'rule'
COLLECTION_LABEL = gettext("Rules")
def __init__(self, *args, **kwargs):
self.min_ver = None
self.max_ver = None
super(RuleModule, self).__init__(*args, **kwargs)
def BackendSupported(self, manager, **kwargs):
"""
Load this module if tid is view, we will not load it under
material view
"""
if super(RuleModule, self).BackendSupported(manager, **kwargs):
conn = manager.connection(did=kwargs['did'])
# If DB is not connected then return error to browser
if not conn.connected():
return precondition_required(
gettext(
"Connection to the server has been lost!"
)
)
if 'vid' not in kwargs:
return True
self.template_path = 'rules/sql'
SQL = render_template("/".join(
[self.template_path, 'backend_support.sql']
), vid=kwargs['vid'])
status, res = conn.execute_scalar(SQL)
# check if any errors
if not status:
return internal_server_error(errormsg=res)
# Check tid is view not material view
# then true, othewise false
if res is True:
return res
else:
return res
def get_nodes(self, gid, sid, did, scid, **kwargs):
"""
Generate the collection node
"""
assert('tid' in kwargs or 'vid' in kwargs)
yield self.generate_browser_collection_node(
kwargs['tid'] if 'tid' in kwargs else kwargs['vid']
)
@property
def node_inode(self):
"""
If a node has children return True otherwise False
"""
return False
@property
def script_load(self):
"""
Load the module script for rule, when any of the database nodes are
initialized.
"""
return schemas.SchemaModule.NODE_TYPE
@property
def csssnippets(self):
"""
Returns a snippet of css to include in the page
"""
snippets = [
render_template(
"browser/css/collection.css",
node_type=self.node_type,
_=gettext
),
render_template(
"rules/css/rule.css",
node_type=self.node_type,
_=gettext
)
]
for submodule in self.submodules:
snippets.extend(submodule.csssnippets)
return snippets
# Create blueprint of RuleModule.
blueprint = RuleModule(__name__)
class RuleView(PGChildNodeView):
"""
This is a class for rule node which inherits the
properties and methods from PGChildNodeView class and define
various methods to list, create, update and delete rule.
Variables:
---------
* node_type - tells which type of node it is
* parent_ids - id with its type and name of parent nodes
* ids - id with type and name of extension module being used.
* operations - function routes mappings defined.
"""
node_type = blueprint.node_type
parent_ids = [
{'type': 'int', 'id': 'gid'},
{'type': 'int', 'id': 'sid'},
{'type': 'int', 'id': 'did'},
{'type': 'int', 'id': 'scid'},
{'type': 'int', 'id': 'tid'}
]
ids = [
{'type': 'int', 'id': 'rid'}
]
operations = dict({
'obj': [
{'get': 'properties', 'delete': 'delete', 'put': 'update'},
{'get': 'list', 'post': 'create'}
],
'children': [{
'get': 'children'
}],
'delete': [{'delete': 'delete'}],
'nodes': [{'get': 'node'}, {'get': 'nodes'}],
'sql': [{'get': 'sql'}],
'msql': [{'get': 'msql'}, {'get': 'msql'}],
'stats': [{'get': 'statistics'}],
'dependency': [{'get': 'dependencies'}],
'dependent': [{'get': 'dependents'}],
'module.js': [{}, {}, {'get': 'module_js'}],
'configs': [{'get': 'configs'}]
})
def module_js(self):
"""
This property defines whether Javascript exists for this node.
"""
return make_response(
render_template(
"rules/js/rules.js",
_=gettext
),
200, {'Content-Type': 'application/x-javascript'}
)
def check_precondition(f):
"""
This function will behave as a decorator which will check the
database connection before running a view. It will also attach
manager, conn & template_path properties to self
"""
@wraps(f)
def wrap(*args, **kwargs):
# Here args[0] will hold self & kwargs will hold gid,sid,did
self = args[0]
self.manager = get_driver(
PG_DEFAULT_DRIVER).connection_manager(kwargs['sid'])
self.conn = self.manager.connection(did=kwargs['did'])
# If DB not connected then return error to browser
if not self.conn.connected():
return precondition_required(
gettext(
"Connection to the server has been lost!"
)
)
self.datlastsysoid = self.manager.db_info[kwargs['did']]['datlastsysoid']
self.template_path = 'rules/sql'
return f(*args, **kwargs)
return wrap
@check_precondition
def list(self, gid, sid, did, scid, tid):
"""
Fetch all rule properties and render into properties tab
"""
# fetch schema name by schema id
SQL = render_template("/".join(
[self.template_path, 'properties.sql']), tid=tid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return ajax_response(
response=res['rows'],
status=200
)
@check_precondition
def nodes(self, gid, sid, did, scid, tid):
"""
List all the rules under the Rules Collection node
"""
res = []
SQL = render_template("/".join(
[self.template_path, 'properties.sql']), tid=tid)
status, rset = self.conn.execute_2darray(SQL)
if not status:
return internal_server_error(errormsg=rset)
for row in rset['rows']:
res.append(
self.blueprint.generate_browser_node(
row['oid'],
tid,
row['name'],
icon="icon-rule"
))
return make_json_response(
data=res,
status=200
)
@check_precondition
def properties(self, gid, sid, did, scid, tid, rid):
"""
Fetch the properties of an individual rule and render in properties tab
"""
SQL = render_template("/".join(
[self.template_path, 'properties.sql']
), rid=rid, datlastsysoid=self.datlastsysoid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
return ajax_response(
response=parse_rule_definition(res),
status=200
)
@check_precondition
def create(self, gid, sid, did, scid, tid):
"""
This function will create a new rule object
"""
required_args = [
'name',
]
data = request.form if request.form else \
json.loads(request.data.decode())
for arg in required_args:
if arg not in data:
return make_json_response(
status=410,
success=0,
errormsg=gettext(
"Couldn't find the required parameter (%s)." % arg
)
)
try:
SQL = render_template("/".join(
[self.template_path, 'create.sql']), data=data)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
# Fetch the rule id against rule name to display node
# in tree browser
SQL = render_template("/".join(
[self.template_path, 'rule_id.sql']), rule_name=data['name'])
status, rule_id = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=rule_id)
return jsonify(
node=self.blueprint.generate_browser_node(
rule_id,
tid,
data['name'],
icon="icon-rule"
)
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def update(self, gid, sid, did, scid, tid, rid):
"""
This function will update a rule object
"""
data = request.form if request.form else \
json.loads(request.data.decode())
SQL = self.getSQL(gid, sid, data, tid, rid)
try:
if SQL and SQL.strip('\n') and SQL.strip(' '):
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=gettext("Rule updated"),
data={
'id': tid,
'sid': sid,
'gid': gid,
'did': did
}
)
else:
return make_json_response(
success=1,
info="Nothing to update",
data={
'id': tid,
'scid': scid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def delete(self, gid, sid, did, scid, tid, rid):
"""
This function will drop a rule object
"""
# Below will decide if it's simple drop or drop with cascade call
cascade = True if self.cmd == 'delete' else False
try:
# Get name for rule from did
SQL = render_template("/".join(
[self.template_path, 'delete.sql']), rid=rid)
status, res_data = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res_data)
# drop rule
rset = res_data['rows'][0]
SQL = render_template("/".join(
[self.template_path, 'delete.sql']),
rulename=rset['rulename'],
relname=rset['relname'],
nspname=rset['nspname'],
cascade=cascade
)
status, res = self.conn.execute_scalar(SQL)
if not status:
return internal_server_error(errormsg=res)
return make_json_response(
success=1,
info=gettext("Rule dropped"),
data={
'id': tid,
'sid': sid,
'gid': gid,
'did': did
}
)
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def msql(self, gid, sid, did, scid, tid, rid=None):
"""
This function returns modified SQL
"""
data = request.args
SQL = self.getSQL(gid, sid, data, tid, rid)
return make_json_response(
data=SQL,
status=200
)
@check_precondition
def sql(self, gid, sid, did, scid, tid, rid):
"""
This function will generate sql to render into the sql panel
"""
SQL = render_template("/".join(
[self.template_path, 'properties.sql']), rid=rid)
status, res = self.conn.execute_dict(SQL)
if not status:
return internal_server_error(errormsg=res)
res_data = parse_rule_definition(res)
SQL = render_template("/".join(
[self.template_path, 'create.sql']),
data=res_data, display_comments=True)
return ajax_response(response=SQL)
def getSQL(self, gid, sid, data, tid, rid):
"""
This function will generate sql from model data
"""
try:
if rid is not None:
SQL = render_template("/".join(
[self.template_path, 'properties.sql']), rid=rid)
status, res = self.conn.execute_dict(SQL)
res_data = []
res_data = parse_rule_definition(res)
if not status:
return internal_server_error(errormsg=res)
old_data = res_data
SQL = render_template(
"/".join([self.template_path, 'update.sql']),
data=data, o_data=old_data
)
else:
SQL = render_template("/".join(
[self.template_path, 'create.sql']), data=data)
return SQL
except Exception as e:
return internal_server_error(errormsg=str(e))
@check_precondition
def dependents(self, gid, sid, did, scid, tid, rid):
"""
This function gets the dependents and returns an ajax response
for the rule node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
tid: View ID
rid: Rule ID
"""
dependents_result = self.get_dependents(self.conn, rid)
return ajax_response(
response=dependents_result,
status=200
)
@check_precondition
def dependencies(self, gid, sid, did, scid, tid, rid):
"""
This function gets the dependencies and returns sn ajax response
for the rule node.
Args:
gid: Server Group ID
sid: Server ID
did: Database ID
tid: View ID
rid: Rule ID
"""
dependencies_result = self.get_dependencies(self.conn, rid)
return ajax_response(
response=dependencies_result,
status=200
)
RuleView.register_node_view(blueprint)

Binary file not shown.

After

Width:  |  Height:  |  Size: 357 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 373 B

View File

@ -0,0 +1,16 @@
.icon-rule{
background-image: url('{{ url_for('NODE-rule.static', filename='img/rule.png') }}') !important;
border-radius: 10px;
background-repeat: no-repeat;
align-content: center;
vertical-align: middle;
height: 1.3em;
}
.sql_field_height_140 {
height: 140px;
}
.sql_field_height_280 {
height: 280px;
}

View File

@ -0,0 +1,266 @@
define(
['jquery', 'underscore', 'underscore.string', 'pgadmin',
'pgadmin.browser', 'codemirror'],
function($, _, S, pgAdmin, pgBrowser, CodeMirror) {
/**
Create and add a rule collection into nodes
@param {variable} label - Label for Node
@param {variable} type - Type of Node
@param {variable} columns - List of columns to
display under under properties.
*/
if (!pgBrowser.Nodes['coll-rule']) {
var rules = pgAdmin.Browser.Nodes['coll-rule'] =
pgAdmin.Browser.Collection.extend({
node: 'rule',
label: '{{ _("Rules") }}',
type: 'coll-rule',
columns: ["name", "owner", "comment"]
});
}
/**
Create and Add an Rule Node into nodes
@param {variable} parent_type - The list of nodes
under which this node to display
@param {variable} type - Type of Node
@param {variable} hasSQL - To show SQL tab
@param {variable} canDrop - Adds drop rule option
in the context menu
@param {variable} canDropCascade - Adds drop Cascade
rule option in the context menu
*/
if (!pgBrowser.Nodes['rule']) {
pgAdmin.Browser.Nodes['rule'] = pgAdmin.Browser.Node.extend({
parent_type: ['table','view'],
type: 'rule',
sqlAlterHelp: 'sql-alterrule.html',
sqlCreateHelp: 'sql-createrule.html',
label: '{{ _("rule") }}',
collection_type: 'coll-table',
hasSQL: true,
hasDepends: true,
canDrop: function(itemData, item, data){
pgBrowser.Nodes['schema'].canChildDrop.apply(this, [itemData, item, data]);
if(_.has(itemData, 'label') && itemData.label === '_RETURN')
return false;
else {
return true;
}
},
canDropCascade: function(itemData, item, data){
pgBrowser.Nodes['schema'].canChildDrop.apply(this, [itemData, item, data]);
if(_.has(itemData, 'label') && itemData.label === '_RETURN')
return false;
else {
return true;
}
},
Init: function() {
/* Avoid mulitple registration of menus */
if (this.initialized)
return;
this.initialized = true;
/**
Add "create rule" menu option into context and object menu
for the following nodes:
coll-rule, rule and view and table.
@property {data} - Allow create rule option on schema node or
system rules node.
*/
pgBrowser.add_menus([{
name: 'create_rule_on_coll', node: 'coll-rule', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 1, label: '{{ _("Rule...") }}',
icon: 'wcTabIcon icon-rule', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_rule_onView', node: 'view', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 5, label: '{{ _("Rule...") }}',
icon: 'wcTabIcon icon-rule', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_rule', node: 'rule', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 1, label: '{{ _("Rule...") }}',
icon: 'wcTabIcon icon-rule', data: {action: 'create', check: true},
enable: 'canCreate'
},{
name: 'create_rule', node: 'table', module: this,
applies: ['object', 'context'], callback: 'show_obj_properties',
category: 'create', priority: 4, label: '{{ _("Rule...") }}',
icon: 'wcTabIcon icon-rule', data: {action: 'create', check: true},
enable: 'canCreate'
}
]);
},
/**
Define model for the rule node and specify the node
properties of the model in schema.
*/
model: pgAdmin.Browser.Node.Model.extend({
schema: [{
id: 'name', label: '{{ _("Name") }}',
type: 'text', disabled: function(m) {
// disable name field it it is system rule
if (m && m.get('name') == "_RETURN") {
return true;
}
if (m.isNew()) {
return false;
} else if (m.node_info.server.version >= 90400) {
return false;
}
return true;
}
},
{
id: 'oid', label:'{{ _("OID") }}',
type: 'text', disabled: true, mode: ['properties']
},
{
id: 'schema', label:'{{ _("") }}',
type: 'text', visible: false, disabled: function(m) {
// It is used while generating sql
m.set('schema', m.node_info.schema.label);
}
},
{
id: 'view', label:'{{ _("") }}',
type: 'text', visible: false, disabled: function(m){
// It is used while generating sql
m.set('view', this.node_data.label);
}
},
{
id: 'event', label:'{{ _("Event") }}', control: 'select2',
group: '{{ _("Definition") }}', type: 'text',
select2: {
width: '100%',
allowClear: false
},
options:[
{label: 'Select', value: 'Select'},
{label: 'Insert', value: 'Insert'},
{label: 'Update', value: 'Update'},
{label: 'Delete', value: 'Delete'}
]
},
{
id: 'do_instead', label:'{{ _("Do Instead") }}', group: '{{ _("Definition") }}',
type: 'switch'
},
{
id: 'condition', label:'{{ _("Condition") }}',
type: 'text', group: '{{ _("Definition") }}',
control: Backform.SqlFieldControl
},
{
id: 'statements', label:'{{ _("Commands") }}',
type: 'text', group: '{{ _("Definition") }}',
control: Backform.SqlFieldControl
},
{
id: 'system_rule', label:'{{ _("System rule?") }}',
type: 'switch', mode: ['properties']
},
{
id: 'enabled', label:'{{ _("Enabled?") }}',
type: 'switch', mode: ['properties']
},
{
id: 'comment', label:'{{ _("Comment") }}', cell: 'string', type: 'multiline'
}
],
validate: function() {
// Triggers specific error messages for fields
var err = {},
errmsg,
field_name = this.get('name');
if (_.isUndefined(field_name) || _.isNull(field_name) ||
String(field_name).replace(/^\s+|\s+$/g, '') === '')
{
err['name'] = '{{ _("Please specify name.") }}';
errmsg = errmsg || err['name'];
this.errorModel.set('name', errmsg);
return errmsg;
}
else
{
this.errorModel.unset('name');
}
return null;
}
}),
// Show or hide create rule menu option on parent node
canCreate: function(itemData, item, data) {
// If check is false then , we will allow create menu
if (data && data.check === false)
return true;
var t = pgBrowser.tree, i = item, d = itemData;
// To iterate over tree to check parent node
while (i) {
// If it is schema then allow user to create rule
if (_.indexOf(['schema'], d._type) > -1)
return true;
if ('coll-rule' == d._type) {
//Check if we are not child of rule
prev_i = t.hasParent(i) ? t.parent(i) : null;
prev_d = prev_i ? t.itemData(prev_i) : null;
prev_j = t.hasParent(prev_i) ? t.parent(prev_i) : null;
prev_e = prev_j ? t.itemData(prev_j) : null;
prev_k = t.hasParent(prev_j) ? t.parent(prev_j) : null;
prev_f = prev_k ? t.itemData(prev_k) : null;
if( prev_f._type == 'catalog') {
return false;
} else {
return true;
}
}
/**
Check if it is view and its parent node is schema
then allow to create Rule
*/
else if('view' == d._type){
prev_i = t.hasParent(i) ? t.parent(i) : null;
prev_d = prev_i ? t.itemData(prev_i) : null;
prev_j = t.hasParent(prev_i) ? t.parent(prev_i) : null;
prev_e = prev_j ? t.itemData(prev_j) : null;
if(prev_e._type == 'schema') {
return true;
}else{
return false;
}
}
i = t.hasParent(i) ? t.parent(i) : null;
d = i ? t.itemData(i) : null;
}
// By default we do not want to allow create menu
return true;
}
});
}
return pgBrowser.Nodes['coll-rule'];
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 555 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 675 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 839 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 593 B

View File

@ -0,0 +1,4 @@
{% if data.comment %}
COMMENT ON CONSTRAINT {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(data.schema, data.table) }}
IS {{ data.comment|qtLiteral }};
{% endif %}

View File

@ -0,0 +1,4 @@
{% if data %}
ALTER TABLE {{ conn|qtIdent(data.schema, data.table) }}
ADD{% if data.name %} CONSTRAINT {{ conn|qtIdent(data.name) }}{% endif%} CHECK ({{ data.consrc }});
{% endif %}

View File

@ -0,0 +1,3 @@
{% if data %}
ALTER TABLE {{ conn|qtIdent(data.nspname, data.relname) }} DROP CONSTRAINT {{ conn|qtIdent(data.name) }};
{% endif %}

View File

@ -0,0 +1,4 @@
SELECT conname as name
FROM pg_constraint ct
WHERE contype = 'c'
AND ct.oid = {{cid}}::oid

View File

@ -0,0 +1,7 @@
SELECT
oid, conname as name
FROM
pg_constraint
WHERE
conrelid = {{tid}}::oid
AND conname={{ name|qtLiteral }};

View File

@ -0,0 +1,5 @@
SELECT ct.oid,
ct.conname as name
FROM pg_constraint ct
WHERE contype='c' AND
conrelid = {{tid}}::oid LIMIT 1;

View File

@ -0,0 +1,7 @@
SELECT nsp.nspname AS schema,
rel.relname AS table
FROM
pg_class rel
JOIN pg_namespace nsp
ON rel.relnamespace = nsp.oid::int
WHERE rel.oid = {{tid}}::int

View File

@ -0,0 +1,6 @@
SELECT c.oid, conname as name
FROM pg_constraint c
WHERE contype = 'c'
{% if tid %}
AND conrelid = {{ tid }}::oid
{% endif %}

View File

@ -0,0 +1,13 @@
SELECT c.oid, conname as name, relname, nspname, description as comment ,
pg_get_expr(conbin, conrelid, true) as consrc
FROM pg_constraint c
JOIN pg_class cl ON cl.oid=conrelid
JOIN pg_namespace nl ON nl.oid=relnamespace
LEFT OUTER JOIN
pg_description des ON (des.objoid=c.oid AND
des.classoid='pg_constraint'::regclass)
WHERE contype = 'c'
AND conrelid = {{ tid }}::oid
{% if cid %}
AND c.oid = {{ cid }}::oid
{% endif %}

View File

@ -0,0 +1,4 @@
{% if data.comment is defined and data.comment != o_data.comment %}
COMMENT ON CONSTRAINT {{ conn|qtIdent(o_data.name) }} ON {{ conn|qtIdent(o_data.nspname, o_data.relname) }}
IS {{ data.comment|qtLiteral }};
{% endif %}

View File

@ -0,0 +1,4 @@
{% if data.comment %}
COMMENT ON CONSTRAINT {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(data.schema, data.table) }}
IS {{ data.comment|qtLiteral }};
{% endif %}

View File

@ -0,0 +1,6 @@
{% if data %}
ALTER TABLE {{ conn|qtIdent(data.schema, data.table) }}
ADD{% if data.name %} CONSTRAINT {{ conn|qtIdent(data.name) }}{% endif%} CHECK ({{ data.consrc }}){% if data.convalidated %}
NOT VALID{% endif %}{% if data.connoinherit %} NO INHERIT{% endif %};
{% endif %}

View File

@ -0,0 +1,3 @@
{% if data %}
ALTER TABLE {{ conn|qtIdent(data.nspname, data.relname) }} DROP CONSTRAINT {{ conn|qtIdent(data.name) }};
{% endif %}

View File

@ -0,0 +1,5 @@
SELECT conname as name,
NOT convalidated as convalidated
FROM pg_constraint ct
WHERE contype = 'c'
AND ct.oid = {{cid}}::oid

View File

@ -0,0 +1,8 @@
SELECT
oid, conname as name,
NOT convalidated as convalidated
FROM
pg_constraint
WHERE
conrelid = {{tid}}::oid
AND conname={{ name|qtLiteral }};

View File

@ -0,0 +1,6 @@
SELECT ct.oid,
ct.conname as name,
NOT convalidated as convalidated
FROM pg_constraint ct
WHERE contype='c' AND
conrelid = {{tid}}::oid LIMIT 1;

View File

@ -0,0 +1,7 @@
SELECT nsp.nspname AS schema,
rel.relname AS table
FROM
pg_class rel
JOIN pg_namespace nsp
ON rel.relnamespace = nsp.oid::int
WHERE rel.oid = {{tid}}::int

View File

@ -0,0 +1,7 @@
SELECT c.oid, conname as name,
NOT convalidated as convalidated
FROM pg_constraint c
WHERE contype = 'c'
{% if tid %}
AND conrelid = {{ tid }}::oid
{% endif %}

View File

@ -0,0 +1,14 @@
SELECT c.oid, conname as name, relname, nspname, description as comment,
pg_get_expr(conbin, conrelid, true) as consrc,
connoinherit, NOT convalidated as convalidated
FROM pg_constraint c
JOIN pg_class cl ON cl.oid=conrelid
JOIN pg_namespace nl ON nl.oid=relnamespace
LEFT OUTER JOIN
pg_description des ON (des.objoid=c.oid AND
des.classoid='pg_constraint'::regclass)
WHERE contype = 'c'
AND conrelid = {{ tid }}::oid
{% if cid %}
AND c.oid = {{ cid }}::oid
{% endif %}

View File

@ -0,0 +1,13 @@
{% if data %}
{% if data.name != o_data.name %}
ALTER TABLE {{ conn|qtIdent(o_data.nspname, o_data.relname) }}
RENAME CONSTRAINT {{ conn|qtIdent(o_data.name) }} TO {{ conn|qtIdent(data.name) }};{% endif -%}
{% if 'convalidated' in data and o_data.convalidated != data.convalidated and not data.convalidated %}
ALTER TABLE {{ conn|qtIdent(o_data.nspname, o_data.relname) }}
VALIDATE CONSTRAINT {{ conn|qtIdent(data.name) }};{% endif -%}
{% if data.comment is defined and data.comment != o_data.comment %}
COMMENT ON CONSTRAINT {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(o_data.nspname, o_data.relname) }}
IS {{ data.comment|qtLiteral }};{% endif %}
{% endif -%}

View File

@ -0,0 +1,2 @@
ALTER TABLE {{ conn|qtIdent(data.schema, data.table) }}
VALIDATE CONSTRAINT {{ conn|qtIdent(data.name) }};

View File

@ -0,0 +1,13 @@
{% macro APPLY(conn, schema_name, table_object, column_object, role, privs, with_grant_privs) -%}
{% if privs %}
GRANT {% for p in privs %}{% if loop.index != 1 %}, {% endif %}{{p}}({{conn|qtIdent(column_object)}}){% endfor %}
ON {{ conn|qtIdent(schema_name, table_object) }} TO {{ conn|qtIdent(role) }};
{% endif %}
{% if with_grant_privs %}
GRANT {% for p in with_grant_privs %}{% if loop.index != 1 %}, {% endif %}{{p}}({{conn|qtIdent(column_object)}}){% endfor %}
ON {{ conn|qtIdent(schema_name, table_object) }} TO {{ conn|qtIdent(role) }} WITH GRANT OPTION;
{% endif %}
{%- endmacro %}
{% macro RESETALL(conn, schema_name, table_object, column_object, role) -%}
REVOKE ALL({{ conn|qtIdent(column_object) }}) ON {{ conn|qtIdent(schema_name, table_object) }} FROM {{ conn|qtIdent(role) }};
{%- endmacro %}

View File

@ -0,0 +1,6 @@
{% macro APPLY(conn, type, schema_name, parent_object, child_object, provider, label) -%}
SECURITY LABEL FOR {{ conn|qtIdent(provider) }} ON {{ type }} {{ conn|qtIdent(schema_name, parent_object, child_object) }} IS {{ label|qtLiteral }};
{%- endmacro %}
{% macro DROP(conn, type, schema_name, parent_object, child_object, provider) -%}
SECURITY LABEL FOR {{ conn|qtIdent(provider) }} ON {{ type }} {{ conn|qtIdent(schema_name, parent_object, child_object) }} IS NULL;
{%- endmacro %}

View File

@ -0,0 +1,34 @@
SELECT 'attacl' as deftype, COALESCE(gt.rolname, 'public') grantee, g.rolname grantor, array_agg(privilege_type) as privileges, array_agg(is_grantable) as grantable
FROM
(SELECT
d.grantee, d.grantor, d.is_grantable,
CASE d.privilege_type
WHEN 'CONNECT' THEN 'c'
WHEN 'CREATE' THEN 'C'
WHEN 'DELETE' THEN 'd'
WHEN 'EXECUTE' THEN 'X'
WHEN 'INSERT' THEN 'a'
WHEN 'REFERENCES' THEN 'x'
WHEN 'SELECT' THEN 'r'
WHEN 'TEMPORARY' THEN 'T'
WHEN 'TRIGGER' THEN 't'
WHEN 'TRUNCATE' THEN 'D'
WHEN 'UPDATE' THEN 'w'
WHEN 'USAGE' THEN 'U'
ELSE 'UNKNOWN'
END AS privilege_type
FROM
(SELECT attacl
FROM pg_attribute att
WHERE att.attrelid = {{tid}}::oid
AND att.attnum = {{clid}}::int
) acl,
(SELECT (d).grantee AS grantee, (d).grantor AS grantor, (d).is_grantable
AS is_grantable, (d).privilege_type AS privilege_type FROM (SELECT
aclexplode(attacl) as d FROM pg_attribute att
WHERE att.attrelid = {{tid}}::oid
AND att.attnum = {{clid}}::int) a) d
) d
LEFT JOIN pg_catalog.pg_roles g ON (d.grantor = g.oid)
LEFT JOIN pg_catalog.pg_roles gt ON (d.grantee = gt.oid)
GROUP BY g.rolname, gt.rolname

View File

@ -0,0 +1,38 @@
{% import 'column/macros/security.macros' as SECLABLE %}
{% import 'column/macros/privilege.macros' as PRIVILEGE %}
{% import 'macros/variable.macros' as VARIABLE %}
{### Add column ###}
{% if data.name and data.cltype %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ADD COLUMN {{conn|qtIdent(data.name)}} {{data.cltype}}{% if data.attlen %}
({{data.attlen}}{% if data.attprecision%}, {{data.attprecision}}{% endif %}){% endif %}{% if data.hasSqrBracket %}
[]{% endif %}{% if data.collspcname %}
COLLATE {{data.collspcname}}{% endif %}{% if data.attnotnull %}
NOT NULL{% endif %}{% if data.defval %}
DEFAULT {{data.defval}}{% endif %};
{% endif %}
{### Add comments ###}
{% if data and data.description %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};
{% endif %}
{### Add variables to column ###}
{% if data.attoptions %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.SET(conn, 'COLUMN', data.name, data.attoptions) }}
{% endif %}
{### ACL ###}
{% if data.attacl %}
{% for priv in data.attacl %}
{{ PRIVILEGE.APPLY(conn, data.schema, data.table, data.name, priv.grantee, priv.without_grant, priv.with_grant) }}
{% endfor %}
{% endif %}
{### Security Lables ###}
{% if data.seclabels %}
{% for r in data.seclabels %}
{{ SECLABLE.APPLY(conn, 'COLUMN',data.schema, data.table, data.name, r.provider, r.label) }}
{% endfor %}
{% endif %}

View File

@ -0,0 +1 @@
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}} DROP COLUMN {{conn|qtIdent(data.name)}};

View File

@ -0,0 +1,9 @@
SELECT
ref.relname AS refname, d2.refclassid, dep.deptype AS deptype
FROM pg_depend dep
LEFT JOIN pg_depend d2 ON dep.objid=d2.objid AND dep.refobjid != d2.refobjid
LEFT JOIN pg_class ref ON ref.oid=d2.refobjid
LEFT JOIN pg_attribute att ON d2.refclassid=att.attrelid AND d2.refobjsubid=att.attnum
{{ where }} AND
dep.classid=(SELECT oid FROM pg_class WHERE relname='pg_attrdef') AND
dep.refobjid NOT IN (SELECT d3.refobjid FROM pg_depend d3 WHERE d3.objid=d2.refobjid)

View File

@ -0,0 +1,5 @@
SELECT tt.oid, format_type(tt.oid,NULL) AS typname
FROM pg_cast
JOIN pg_type tt ON tt.oid=casttarget
WHERE castsource={{type_id}}
AND castcontext IN ('i', 'a')

View File

@ -0,0 +1,7 @@
SELECT --nspname, collname,
CASE WHEN length(nspname) > 0 AND length(collname) > 0 THEN
concat(quote_ident(nspname), '.', quote_ident(collname))
ELSE '' END AS collation
FROM pg_collation c, pg_namespace n
WHERE c.collnamespace=n.oid
ORDER BY nspname, collname;

View File

@ -0,0 +1,12 @@
SELECT array_to_string(array_agg(inhrelname), ', ') inhrelname, attrname
FROM
(SELECT
inhparent::regclass AS inhrelname,
a.attname AS attrname
FROM pg_inherits i
LEFT JOIN pg_attribute a ON
(attrelid = inhparent AND attnum > 0)
WHERE inhrelid = {{tid}}::oid
ORDER BY inhseqno
) a
GROUP BY attrname;

View File

@ -0,0 +1,5 @@
SELECT nsp.nspname AS schema ,rel.relname AS table
FROM pg_class rel
JOIN pg_namespace nsp
ON rel.relnamespace = nsp.oid::int
WHERE rel.oid = {{tid}}::int

View File

@ -0,0 +1,4 @@
SELECT att.attnum
FROM pg_attribute att
WHERE att.attrelid = {{tid}}::oid
AND att.attname = {{data.name|qtLiteral}}

View File

@ -0,0 +1,14 @@
SELECT * FROM
(SELECT format_type(t.oid,NULL) AS typname,
CASE WHEN typelem > 0 THEN typelem ELSE t.oid END AS elemoid
,typlen, typtype, t.oid, nspname,
(SELECT COUNT(1) FROM pg_type t2 WHERE t2.typname = t.typname) > 1 AS isdup
FROM pg_type t
JOIN pg_namespace nsp ON typnamespace=nsp.oid
WHERE (NOT (typname = 'unknown' AND nspname = 'pg_catalog'))
AND typisdefined AND typtype IN ('b', 'c', 'd', 'e', 'r')
AND NOT EXISTS (select 1 from pg_class where relnamespace=typnamespace and relname = typname and relkind != 'c')
AND (typname not like '_%' OR NOT EXISTS (select 1 from pg_class where relnamespace=typnamespace and relname = substring(typname from 2)::name and relkind != 'c'))
AND nsp.nspname != 'information_schema'
) AS dummy
ORDER BY nspname <> 'pg_catalog', nspname <> 'public', nspname, 1

View File

@ -0,0 +1,5 @@
SELECT COUNT(1)
FROM pg_depend dep
JOIN pg_class cl ON dep.classid=cl.oid AND relname='pg_rewrite'
WHERE refobjid= {{tid}}::oid
AND refobjsubid= {{clid|qtLiteral}};

View File

@ -0,0 +1,18 @@
SELECT att.attname as name, att.attnum as OID, format_type(ty.oid,NULL) AS datatype
FROM pg_attribute att
JOIN pg_type ty ON ty.oid=atttypid
JOIN pg_namespace tn ON tn.oid=ty.typnamespace
JOIN pg_class cl ON cl.oid=att.attrelid
JOIN pg_namespace na ON na.oid=cl.relnamespace
LEFT OUTER JOIN pg_type et ON et.oid=ty.typelem
LEFT OUTER JOIN pg_attrdef def ON adrelid=att.attrelid AND adnum=att.attnum
LEFT OUTER JOIN (pg_depend JOIN pg_class cs ON objid=cs.oid AND cs.relkind='S') ON refobjid=att.attrelid AND refobjsubid=att.attnum
LEFT OUTER JOIN pg_namespace ns ON ns.oid=cs.relnamespace
LEFT OUTER JOIN pg_index pi ON pi.indrelid=att.attrelid AND indisprimary
WHERE att.attrelid = {{tid}}::oid
{### To show system objects ###}
{% if not show_sys_objects %}
AND att.attnum > 0
{% endif %}
AND att.attisdropped IS FALSE
ORDER BY att.attnum

View File

@ -0,0 +1,45 @@
SELECT att.attname as name, att.*, def.*, pg_catalog.pg_get_expr(def.adbin, def.adrelid) AS defval,
CASE WHEN att.attndims > 0 THEN 1 ELSE 0 END AS isarray,
format_type(ty.oid,NULL) AS typname,
format_type(ty.oid,att.atttypmod) AS displaytypname,
tn.nspname as typnspname, et.typname as elemtypname,
ty.typstorage AS defaultstorage, cl.relname, na.nspname,
concat(quote_ident(na.nspname) ,'.', quote_ident(cl.relname)) AS parent_tbl,
att.attstattarget, description, cs.relname AS sername,
ns.nspname AS serschema,
(SELECT count(1) FROM pg_type t2 WHERE t2.typname=ty.typname) > 1 AS isdup,
indkey, coll.collname, nspc.nspname as collnspname , attoptions,
-- Start pgAdmin4, added to save time on client side parsing
CASE WHEN length(coll.collname) > 0 AND length(nspc.nspname) > 0 THEN
concat(quote_ident(coll.collname),'.',quote_ident(nspc.nspname))
ELSE '' END AS collspcname,
CASE WHEN strpos(format_type(ty.oid,att.atttypmod), '.') > 0 THEN
split_part(format_type(ty.oid,att.atttypmod), '.', 2)
ELSE format_type(ty.oid,att.atttypmod) END AS cltype,
-- End pgAdmin4
EXISTS(SELECT 1 FROM pg_constraint WHERE conrelid=att.attrelid AND contype='f' AND att.attnum=ANY(conkey)) As is_fk,
(SELECT array_agg(provider || '=' || label) FROM pg_seclabels sl1 WHERE sl1.objoid=att.atttypid AND sl1.objsubid=0) AS seclabels,
(CASE WHEN (att.attnum < 1) THEN true ElSE false END) AS is_sys_column
FROM pg_attribute att
JOIN pg_type ty ON ty.oid=atttypid
JOIN pg_namespace tn ON tn.oid=ty.typnamespace
JOIN pg_class cl ON cl.oid=att.attrelid
JOIN pg_namespace na ON na.oid=cl.relnamespace
LEFT OUTER JOIN pg_type et ON et.oid=ty.typelem
LEFT OUTER JOIN pg_attrdef def ON adrelid=att.attrelid AND adnum=att.attnum
LEFT OUTER JOIN pg_description des ON (des.objoid=att.attrelid AND des.objsubid=att.attnum AND des.classoid='pg_class'::regclass)
LEFT OUTER JOIN (pg_depend JOIN pg_class cs ON objid=cs.oid AND cs.relkind='S') ON refobjid=att.attrelid AND refobjsubid=att.attnum
LEFT OUTER JOIN pg_namespace ns ON ns.oid=cs.relnamespace
LEFT OUTER JOIN pg_index pi ON pi.indrelid=att.attrelid AND indisprimary
LEFT OUTER JOIN pg_collation coll ON att.attcollation=coll.oid
LEFT OUTER JOIN pg_namespace nspc ON coll.collnamespace=nspc.oid
WHERE att.attrelid = {{tid}}::oid
{% if clid %}
AND att.attnum = {{clid}}::int
{% endif %}
{### To show system objects ###}
{% if not show_sys_objects %}
AND att.attnum > 0
{% endif %}
AND att.attisdropped IS FALSE
ORDER BY att.attnum

View File

@ -0,0 +1,107 @@
{% import 'column/macros/security.macros' as SECLABLE %}
{% import 'column/macros/privilege.macros' as PRIVILEGE %}
{% import 'macros/variable.macros' as VARIABLE %}
{### Rename column name ###}
{% if data.name != o_data.name %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
RENAME {{conn|qtIdent(o_data.name)}} TO {{conn|qtIdent(data.name)}};
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen and data.attlen != o_data.attlen) or (data.attprecision and data.attprecision != o_data.attprecision) %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} TYPE {% if data.cltype %}{{data.cltype}} {% else %}{{o_data.cltype}} {% endif %}{% if data.attlen %}
({{data.attlen}}{% if data.attprecision%}, {{data.attprecision}}{% endif %}){% endif %}{% if data.hasSqrBracket %}
[]{% endif %}{% if data.collspcname and data.collspcname != o_data.collspcname %}
COLLATE {{data.collspcname}}{% endif %};
{% endif %}
{### Alter column default value ###}
{% if data.defval and data.defval != o_data.defval %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} SET DEFAULT {{data.defval}};
{% endif %}
{### Alter column not null value ###}
{% if 'attnotnull' in data and data.attnotnull != o_data.attnotnull %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} {% if data.attnotnull %}SET{% else %}DROP{% endif %} NOT NULL;
{% endif %}
{### Alter column statistics value ###}
{% if data.attstattarget and data.attstattarget != o_data.attstattarget %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} SET STATISTICS {{data.attstattarget}};
{% endif %}
{### Alter column storage value ###}
{% if data.attstorage and data.attstorage != o_data.attstorage %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} SET STORAGE {%if data.attstorage == 'p' %}
PLAIN{% elif data.attstorage == 'm'%}MAIN{% elif data.attstorage == 'e'%}
EXTERNAL{% elif data.attstorage == 'x'%}EXTENDED{% endif %};
{% endif %}
{% if data.description is defined %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};
{% endif %}
{### Update column variables ###}
{% if 'attoptions' in data and data.attoptions|length > 0 %}
{% set variables = data.attoptions %}
{% if 'deleted' in variables and variables.deleted|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.UNSET(conn, 'COLUMN', data.name, variables.deleted) }}
{% endif %}
{% if 'added' in variables and variables.added|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.SET(conn, 'COLUMN', data.name, variables.added) }}
{% endif %}
{% if 'changed' in variables and variables.changed|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.SET(conn, 'COLUMN', data.name, variables.changed) }}
{% endif %}
{% endif %}
{### Update column privileges ###}
{# Change the privileges #}
{% if data.attacl %}
{% if 'deleted' in data.attacl %}
{% for priv in data.attacl.deleted %}
{{ PRIVILEGE.RESETALL(conn, data.schema, data.table, data.name, priv.grantee) }}
{% endfor %}
{% endif %}
{% if 'changed' in data.attacl %}
{% for priv in data.attacl.changed %}
{{ PRIVILEGE.RESETALL(conn, data.schema, data.table, data.name, priv.grantee) }}
{{ PRIVILEGE.APPLY(conn, data.schema, data.table, data.name, priv.grantee, priv.without_grant, priv.with_grant) }}
{% endfor %}
{% endif %}
{% if 'added' in data.attacl %}
{% for priv in data.attacl.added %}
{{ PRIVILEGE.APPLY(conn, data.schema, data.table, data.name, priv.grantee, priv.without_grant, priv.with_grant) }}
{% endfor %}
{% endif %}
{% endif %}
{### Uppdate tablespace securitylabel ###}
{# The SQL generated below will change Security Label #}
{% if data.seclabels and data.seclabels|length > 0 %}
{% set seclabels = data.seclabels %}
{% if 'deleted' in seclabels and seclabels.deleted|length > 0 %}
{% for r in seclabels.deleted %}
{{ SECLABLE.DROP(conn, 'COLUMN', data.schema, data.table, data.name, r.provider) }}
{% endfor %}
{% endif %}
{% if 'added' in seclabels and seclabels.added|length > 0 %}
{% for r in seclabels.added %}
{{ SECLABLE.APPLY(conn, 'COLUMN',data.schema, data.table, data.name, r.provider, r.label) }}
{% endfor %}
{% endif %}
{% if 'changed' in seclabels and seclabels.changed|length > 0 %}
{% for r in seclabels.changed %}
{{ SECLABLE.APPLY(conn, 'COLUMN',data.schema, data.table, data.name, r.provider, r.label) }}
{% endfor %}
{% endif %}
{% endif %}

View File

@ -0,0 +1,34 @@
SELECT 'attacl' as deftype, COALESCE(gt.rolname, 'public') grantee, g.rolname grantor, array_agg(privilege_type) as privileges, array_agg(is_grantable) as grantable
FROM
(SELECT
d.grantee, d.grantor, d.is_grantable,
CASE d.privilege_type
WHEN 'CONNECT' THEN 'c'
WHEN 'CREATE' THEN 'C'
WHEN 'DELETE' THEN 'd'
WHEN 'EXECUTE' THEN 'X'
WHEN 'INSERT' THEN 'a'
WHEN 'REFERENCES' THEN 'x'
WHEN 'SELECT' THEN 'r'
WHEN 'TEMPORARY' THEN 'T'
WHEN 'TRIGGER' THEN 't'
WHEN 'TRUNCATE' THEN 'D'
WHEN 'UPDATE' THEN 'w'
WHEN 'USAGE' THEN 'U'
ELSE 'UNKNOWN'
END AS privilege_type
FROM
(SELECT attacl
FROM pg_attribute att
WHERE att.attrelid = {{tid}}::oid
AND att.attnum = {{clid}}::int
) acl,
(SELECT (d).grantee AS grantee, (d).grantor AS grantor, (d).is_grantable
AS is_grantable, (d).privilege_type AS privilege_type FROM (SELECT
aclexplode(attacl) as d FROM pg_attribute att
WHERE att.attrelid = {{tid}}::oid
AND att.attnum = {{clid}}::int) a) d
) d
LEFT JOIN pg_catalog.pg_roles g ON (d.grantor = g.oid)
LEFT JOIN pg_catalog.pg_roles gt ON (d.grantee = gt.oid)
GROUP BY g.rolname, gt.rolname

View File

@ -0,0 +1,38 @@
{% import 'column/macros/security.macros' as SECLABLE %}
{% import 'column/macros/privilege.macros' as PRIVILEGE %}
{% import 'macros/variable.macros' as VARIABLE %}
{### Add column ###}
{% if data.name and data.cltype %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ADD COLUMN {{conn|qtIdent(data.name)}} {{data.cltype}}{% if data.attlen %}
({{data.attlen}}{% if data.attprecision%}, {{data.attprecision}}{% endif %}){% endif %}{% if data.hasSqrBracket %}
[]{% endif %}{% if data.collspcname %}
COLLATE {{data.collspcname}}{% endif %}{% if data.attnotnull %}
NOT NULL{% endif %}{% if data.defval %}
DEFAULT {{data.defval}}{% endif %};
{% endif %}
{### Add comments ###}
{% if data and data.description %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};
{% endif %}
{### Add variables to column ###}
{% if data.attoptions %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.SET(conn, 'COLUMN', data.name, data.attoptions) }}
{% endif %}
{### ACL ###}
{% if data.attacl %}
{% for priv in data.attacl %}
{{ PRIVILEGE.APPLY(conn, data.schema, data.table, data.name, priv.grantee, priv.without_grant, priv.with_grant) }}
{% endfor %}
{% endif %}
{### Security Lables ###}
{% if data.seclabels %}
{% for r in data.seclabels %}
{{ SECLABLE.APPLY(conn, 'COLUMN',data.schema, data.table, data.name, r.provider, r.label) }}
{% endfor %}
{% endif %}

View File

@ -0,0 +1 @@
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}} DROP COLUMN {{conn|qtIdent(data.name)}};

View File

@ -0,0 +1,9 @@
SELECT
ref.relname AS refname, d2.refclassid, dep.deptype AS deptype
FROM pg_depend dep
LEFT JOIN pg_depend d2 ON dep.objid=d2.objid AND dep.refobjid != d2.refobjid
LEFT JOIN pg_class ref ON ref.oid=d2.refobjid
LEFT JOIN pg_attribute att ON d2.refclassid=att.attrelid AND d2.refobjsubid=att.attnum
{{ where }} AND
dep.classid=(SELECT oid FROM pg_class WHERE relname='pg_attrdef') AND
dep.refobjid NOT IN (SELECT d3.refobjid FROM pg_depend d3 WHERE d3.objid=d2.refobjid)

View File

@ -0,0 +1,5 @@
SELECT tt.oid, format_type(tt.oid,NULL) AS typname
FROM pg_cast
JOIN pg_type tt ON tt.oid=casttarget
WHERE castsource={{type_id}}
AND castcontext IN ('i', 'a')

View File

@ -0,0 +1,7 @@
SELECT --nspname, collname,
CASE WHEN length(nspname) > 0 AND length(collname) > 0 THEN
concat(quote_ident(nspname), '.', quote_ident(collname))
ELSE '' END AS collation
FROM pg_collation c, pg_namespace n
WHERE c.collnamespace=n.oid
ORDER BY nspname, collname;

View File

@ -0,0 +1,12 @@
SELECT array_to_string(array_agg(inhrelname), ', ') inhrelname, attrname
FROM
(SELECT
inhparent::regclass AS inhrelname,
a.attname AS attrname
FROM pg_inherits i
LEFT JOIN pg_attribute a ON
(attrelid = inhparent AND attnum > 0)
WHERE inhrelid = {{tid}}::oid
ORDER BY inhseqno
) a
GROUP BY attrname;

View File

@ -0,0 +1,5 @@
SELECT nsp.nspname AS schema ,rel.relname AS table
FROM pg_class rel
JOIN pg_namespace nsp
ON rel.relnamespace = nsp.oid::int
WHERE rel.oid = {{tid}}::int

View File

@ -0,0 +1,4 @@
SELECT att.attnum
FROM pg_attribute att
WHERE att.attrelid = {{tid}}::oid
AND att.attname = {{data.name|qtLiteral}}

View File

@ -0,0 +1,14 @@
SELECT * FROM
(SELECT format_type(t.oid,NULL) AS typname,
CASE WHEN typelem > 0 THEN typelem ELSE t.oid END AS elemoid
,typlen, typtype, t.oid, nspname,
(SELECT COUNT(1) FROM pg_type t2 WHERE t2.typname = t.typname) > 1 AS isdup
FROM pg_type t
JOIN pg_namespace nsp ON typnamespace=nsp.oid
WHERE (NOT (typname = 'unknown' AND nspname = 'pg_catalog'))
AND typisdefined AND typtype IN ('b', 'c', 'd', 'e', 'r')
AND NOT EXISTS (select 1 from pg_class where relnamespace=typnamespace and relname = typname and relkind != 'c')
AND (typname not like '_%' OR NOT EXISTS (select 1 from pg_class where relnamespace=typnamespace and relname = substring(typname from 2)::name and relkind != 'c'))
AND nsp.nspname != 'information_schema'
) AS dummy
ORDER BY nspname <> 'pg_catalog', nspname <> 'public', nspname, 1

View File

@ -0,0 +1,5 @@
SELECT COUNT(1)
FROM pg_depend dep
JOIN pg_class cl ON dep.classid=cl.oid AND relname='pg_rewrite'
WHERE refobjid= {{tid}}::oid
AND refobjsubid= {{clid|qtLiteral}};

View File

@ -0,0 +1,18 @@
SELECT att.attname as name, att.attnum as OID, format_type(ty.oid,NULL) AS datatype
FROM pg_attribute att
JOIN pg_type ty ON ty.oid=atttypid
JOIN pg_namespace tn ON tn.oid=ty.typnamespace
JOIN pg_class cl ON cl.oid=att.attrelid
JOIN pg_namespace na ON na.oid=cl.relnamespace
LEFT OUTER JOIN pg_type et ON et.oid=ty.typelem
LEFT OUTER JOIN pg_attrdef def ON adrelid=att.attrelid AND adnum=att.attnum
LEFT OUTER JOIN (pg_depend JOIN pg_class cs ON objid=cs.oid AND cs.relkind='S') ON refobjid=att.attrelid AND refobjsubid=att.attnum
LEFT OUTER JOIN pg_namespace ns ON ns.oid=cs.relnamespace
LEFT OUTER JOIN pg_index pi ON pi.indrelid=att.attrelid AND indisprimary
WHERE att.attrelid = {{tid}}::oid
{### To show system objects ###}
{% if not show_sys_objects %}
AND att.attnum > 0
{% endif %}
AND att.attisdropped IS FALSE
ORDER BY att.attnum

View File

@ -0,0 +1,45 @@
SELECT att.attname as name, att.*, def.*, pg_catalog.pg_get_expr(def.adbin, def.adrelid) AS defval,
CASE WHEN att.attndims > 0 THEN 1 ELSE 0 END AS isarray,
format_type(ty.oid,NULL) AS typname,
format_type(ty.oid,att.atttypmod) AS displaytypname,
tn.nspname as typnspname, et.typname as elemtypname,
ty.typstorage AS defaultstorage, cl.relname, na.nspname,
concat(quote_ident(na.nspname) ,'.', quote_ident(cl.relname)) AS parent_tbl,
att.attstattarget, description, cs.relname AS sername,
ns.nspname AS serschema,
(SELECT count(1) FROM pg_type t2 WHERE t2.typname=ty.typname) > 1 AS isdup,
indkey, coll.collname, nspc.nspname as collnspname , attoptions,
-- Start pgAdmin4, added to save time on client side parsing
CASE WHEN length(coll.collname) > 0 AND length(nspc.nspname) > 0 THEN
concat(quote_ident(coll.collname),'.',quote_ident(nspc.nspname))
ELSE '' END AS collspcname,
CASE WHEN strpos(format_type(ty.oid,att.atttypmod), '.') > 0 THEN
split_part(format_type(ty.oid,att.atttypmod), '.', 2)
ELSE format_type(ty.oid,att.atttypmod) END AS cltype,
-- End pgAdmin4
EXISTS(SELECT 1 FROM pg_constraint WHERE conrelid=att.attrelid AND contype='f' AND att.attnum=ANY(conkey)) As is_fk,
(SELECT array_agg(provider || '=' || label) FROM pg_seclabels sl1 WHERE sl1.objoid=att.atttypid AND sl1.objsubid=0) AS seclabels,
(CASE WHEN (att.attnum < 1) THEN true ElSE false END) AS is_sys_column
FROM pg_attribute att
JOIN pg_type ty ON ty.oid=atttypid
JOIN pg_namespace tn ON tn.oid=ty.typnamespace
JOIN pg_class cl ON cl.oid=att.attrelid
JOIN pg_namespace na ON na.oid=cl.relnamespace
LEFT OUTER JOIN pg_type et ON et.oid=ty.typelem
LEFT OUTER JOIN pg_attrdef def ON adrelid=att.attrelid AND adnum=att.attnum
LEFT OUTER JOIN pg_description des ON (des.objoid=att.attrelid AND des.objsubid=att.attnum AND des.classoid='pg_class'::regclass)
LEFT OUTER JOIN (pg_depend JOIN pg_class cs ON objid=cs.oid AND cs.relkind='S') ON refobjid=att.attrelid AND refobjsubid=att.attnum
LEFT OUTER JOIN pg_namespace ns ON ns.oid=cs.relnamespace
LEFT OUTER JOIN pg_index pi ON pi.indrelid=att.attrelid AND indisprimary
LEFT OUTER JOIN pg_collation coll ON att.attcollation=coll.oid
LEFT OUTER JOIN pg_namespace nspc ON coll.collnamespace=nspc.oid
WHERE att.attrelid = {{tid}}::oid
{% if clid %}
AND att.attnum = {{clid}}::int
{% endif %}
{### To show system objects ###}
{% if not show_sys_objects %}
AND att.attnum > 0
{% endif %}
AND att.attisdropped IS FALSE
ORDER BY att.attnum

View File

@ -0,0 +1,105 @@
{% import 'column/macros/security.macros' as SECLABLE %}
{% import 'column/macros/privilege.macros' as PRIVILEGE %}
{% import 'macros/variable.macros' as VARIABLE %}
{### Rename column name ###}
{% if data.name != o_data.name %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
RENAME {{conn|qtIdent(o_data.name)}} TO {{conn|qtIdent(data.name)}};
{% endif %}
{### Alter column type and collation ###}
{% if (data.cltype and data.cltype != o_data.cltype) or (data.attlen and data.attlen != o_data.attlen) or (data.attprecision and data.attprecision != o_data.attprecision) %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} TYPE {% if data.cltype %}{{data.cltype}} {% else %}{{o_data.cltype}} {% endif %}{% if data.attlen %}
({{data.attlen}}{% if data.attprecision%}, {{data.attprecision}}{% endif %}){% endif %}{% if data.hasSqrBracket %}
[]{% endif %}{% if data.collspcname and data.collspcname != o_data.collspcname %}
COLLATE {{data.collspcname}}{% endif %};
{% endif %}
{### Alter column default value ###}
{% if data.defval and data.defval != o_data.defval %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} SET DEFAULT {{data.defval}};
{% endif %}
{### Alter column not null value ###}
{% if 'attnotnull' in data and data.attnotnull != o_data.attnotnull %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} {% if data.attnotnull %}SET{% else %}DROP{% endif %} NOT NULL;
{% endif %}
{### Alter column statistics value ###}
{% if data.attstattarget and data.attstattarget != o_data.attstattarget %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} SET STATISTICS {{data.attstattarget}};
{% endif %}
{### Alter column storage value ###}
{% if data.attstorage and data.attstorage != o_data.attstorage %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ALTER COLUMN {{conn|qtIdent(data.name)}} SET STORAGE {%if data.attstorage == 'p' %}
PLAIN{% elif data.attstorage == 'm'%}MAIN{% elif data.attstorage == 'e'%}
EXTERNAL{% elif data.attstorage == 'x'%}EXTENDED{% endif %};
{% endif %}
{% if data.description is defined %}
COMMENT ON COLUMN {{conn|qtIdent(data.schema, data.table, data.name)}}
IS {{data.description|qtLiteral}};
{% endif %}
{### Update column variables ###}
{% if 'attoptions' in data and data.attoptions|length > 0 %}
{% set variables = data.attoptions %}
{% if 'deleted' in variables and variables.deleted|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.UNSET(conn, 'COLUMN', data.name, variables.deleted) }}
{% endif %}
{% if 'added' in variables and variables.added|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.SET(conn, 'COLUMN', data.name, variables.added) }}
{% endif %}
{% if 'changed' in variables and variables.changed|length > 0 %}
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
{{ VARIABLE.SET(conn, 'COLUMN', data.name, variables.changed) }}
{% endif %}
{% endif %}
{### Update column privileges ###}
{# Change the privileges #}
{% if data.attacl %}
{% if 'deleted' in data.attacl %}
{% for priv in data.attacl.deleted %}
{{ PRIVILEGE.RESETALL(conn, data.schema, data.table, data.name, priv.grantee) }}
{% endfor %}
{% endif %}
{% if 'changed' in data.attacl %}
{% for priv in data.attacl.changed %}
{{ PRIVILEGE.RESETALL(conn, data.schema, data.table, data.name, priv.grantee) }}
{{ PRIVILEGE.APPLY(conn, data.schema, data.table, data.name, priv.grantee, priv.without_grant, priv.with_grant) }}
{% endfor %}
{% endif %}
{% if 'added' in data.attacl %}
{% for priv in data.attacl.added %}
{{ PRIVILEGE.APPLY(conn, data.schema, data.table, data.name, priv.grantee, priv.without_grant, priv.with_grant) }}
{% endfor %}
{% endif %}
{% endif %}
{### Uppdate tablespace securitylabel ###}
{# The SQL generated below will change Security Label #}
{% if data.seclabels and data.seclabels|length > 0 %}
{% set seclabels = data.seclabels %}
{% if 'deleted' in seclabels and seclabels.deleted|length > 0 %}
{% for r in seclabels.deleted %}
{{ SECLABLE.DROP(conn, 'COLUMN', data.schema, data.table, data.name, r.provider) }}
{% endfor %}
{% endif %}
{% if 'added' in seclabels and seclabels.added|length > 0 %}
{% for r in seclabels.added %}
{{ SECLABLE.APPLY(conn, 'COLUMN',data.schema, data.table, data.name, r.provider, r.label) }}
{% endfor %}
{% endif %}
{% if 'changed' in seclabels and seclabels.changed|length > 0 %}
{% for r in seclabels.changed %}
{{ SECLABLE.APPLY(conn, 'COLUMN',data.schema, data.table, data.name, r.provider, r.label) }}
{% endfor %}
{% endif %}
{% endif %}

View File

@ -0,0 +1,4 @@
{% if data.comment %}
COMMENT ON CONSTRAINT {{ conn|qtIdent(data.name) }} ON {{ conn|qtIdent(data.schema, data.table) }}
IS {{ data.comment|qtLiteral }};
{% endif %}

View File

@ -0,0 +1,12 @@
ALTER TABLE {{ conn|qtIdent(data.schema, data.table) }}
ADD{% if data.name %} CONSTRAINT {{ conn|qtIdent(data.name) }}{% endif%} EXCLUDE {% if data.amname and data.amname != '' %}USING {{data.amname}}{% endif %} (
{% for col in data.columns %}{% if loop.index != 1 %},
{% endif %}{{ conn|qtIdent(col.column)}} {% if col.oper_class and col.oper_class != '' %}{{col.oper_class}} {% endif%}{% if col.order %}ASC{% else %}DESC{% endif %} NULLS {% if col.nulls_order %}FIRST{% else %}LAST{% endif %} WITH {{col.operator}}{% endfor %}){% if data.fillfactor %}
WITH (FILLFACTOR={{data.fillfactor}}){% endif %}{% if data.spcname and data.spcname != "pg_default" %}
USING INDEX TABLESPACE {{ conn|qtIdent(data.spcname) }}{% endif %}
{% if data.condeferrable %}
DEFERRABLE{% if data.condeferred %}
INITIALLY DEFERRED{% endif%}
{% endif%}{% if data.constraint %} WHERE ({{data.constraint}}){% endif%};

View File

@ -0,0 +1,3 @@
{% if data %}
ALTER TABLE {{ conn|qtIdent(data.schema, data.table) }} DROP CONSTRAINT {{ conn|qtIdent(data.name) }}{% if cascade%} CASCADE{% endif %};
{% endif %}

View File

@ -0,0 +1,6 @@
SELECT amname
FROM pg_am
WHERE EXISTS (SELECT 1
FROM pg_proc
WHERE oid=amgettuple)
ORDER BY amname;

View File

@ -0,0 +1,22 @@
{% for n in range(colcnt|int) %}
{% if loop.index != 1 %}
UNION
{% endif %}
SELECT
i.indoption[{{loop.index -1}}] AS options,
pg_get_indexdef(i.indexrelid, {{loop.index}}, true) AS coldef,
op.oprname,
CASE WHEN (o.opcdefault = FALSE) THEN o.opcname ELSE null END AS opcname
,
coll.collname,
nspc.nspname as collnspname,
format_type(ty.oid,NULL) AS col_type
FROM pg_index i
JOIN pg_attribute a ON (a.attrelid = i.indexrelid AND attnum = {{loop.index}})
JOIN pg_type ty ON ty.oid=a.atttypid
LEFT OUTER JOIN pg_opclass o ON (o.oid = i.indclass[{{loop.index -1}}])
LEFT OUTER JOIN pg_constraint c ON (c.conindid = i.indexrelid) LEFT OUTER JOIN pg_operator op ON (op.oid = c.conexclop[{{loop.index}}])
LEFT OUTER JOIN pg_collation coll ON a.attcollation=coll.oid
LEFT OUTER JOIN pg_namespace nspc ON coll.collnamespace=nspc.oid
WHERE i.indexrelid = {{cid}}::oid
{% endfor %}

View File

@ -0,0 +1,3 @@
SELECT conname as name
FROM pg_constraint ct
WHERE ct.conindid = {{cid}}::oid

Some files were not shown because too many files have changed in this diff Show More