Fix support for the CLOB datatype in EPAS. Fixes #3596

pull/14/head
Aditya Toshniwal 2018-09-13 13:45:06 +01:00 committed by Dave Page
parent d6c64e8f11
commit 38ddea038a
8 changed files with 74 additions and 48 deletions

View File

@ -18,5 +18,6 @@ Bug fixes
*********
| `Bug #3576 <https://redmine.postgresql.org/issues/3576>`_ - Ensure queries are no longer executed when dashboards are closed.
| `Bug #3596 <https://redmine.postgresql.org/issues/3596>`_ - Fix support for the CLOB datatype in EPAS.
| `Bug #3630 <https://redmine.postgresql.org/issues/3630>`_ - Ensure auto-complete works for objects in schemas other than public and pg_catalog.

View File

@ -26,6 +26,7 @@ from pgadmin.utils.ajax import make_json_response, internal_server_error, \
from pgadmin.utils.driver import get_driver
from config import PG_DEFAULT_DRIVER
from pgadmin.utils import IS_PY2
from pgadmin.utils.ajax import ColParamsJSONDecoder
# If we are in Python3
if not IS_PY2:
unicode = str
@ -427,7 +428,6 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
# we are receiving request when in edit mode
# we will send filtered types related to current type
present_type = data['cltype']
type_id = data['atttypid']
SQL = render_template("/".join([self.template_path,
@ -438,13 +438,7 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
edit_types_list = list()
# We will need present type in edit mode
if data['typnspname'] == "pg_catalog" or \
data['typnspname'] == "public":
edit_types_list.append(present_type)
else:
t = self.qtTypeIdent(self.conn, data['typnspname'], present_type)
edit_types_list.append(t)
data['cltype'] = t
edit_types_list.append(data['cltype'])
if int(is_reference) == 0:
SQL = render_template(
@ -455,8 +449,6 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
for row in rset['rows']:
edit_types_list.append(row['typname'])
else:
edit_types_list.append(present_type)
data['edit_types'] = edit_types_list
@ -517,14 +509,6 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
sql template
"""
# We need to add this exceptional case for manually adding " in type
# in json.loads('"char"') is valid json hence it
# converts '"char"' -> 'char' as string but if we
# send the same in collection json.loads() handles it properly in
# Table & Type nodes, This handling handling is Column node specific
if type == 'char':
type = '"char"'
if '[]' in type:
type = type.replace('[]', '')
self.hasSqrBracket = True
@ -568,10 +552,7 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
)
for k, v in data.items():
try:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
data[k] = json.loads(v, encoding='utf-8', cls=ColParamsJSONDecoder)
required_args = {
'name': 'Name',
@ -747,10 +728,7 @@ class ColumnsView(PGChildNodeView, DataTypeReader):
"""
data = dict()
for k, v in request.args.items():
try:
data[k] = json.loads(v, encoding='utf-8')
except (ValueError, TypeError, KeyError):
data[k] = v
data[k] = json.loads(v, encoding='utf-8', cls=ColParamsJSONDecoder)
# Adding parent into data dict, will be using it while creating sql
data['schema'] = self.schema

View File

@ -1,5 +0,0 @@
SELECT tt.oid, format_type(tt.oid,NULL) AS typname
FROM pg_cast
JOIN pg_type tt ON tt.oid=casttarget
WHERE castsource={{type_id}}
AND castcontext IN ('i', 'a')

View File

@ -0,0 +1,44 @@
SELECT att.attname as name, att.*, def.*, pg_catalog.pg_get_expr(def.adbin, def.adrelid) AS defval,
CASE WHEN att.attndims > 0 THEN 1 ELSE 0 END AS isarray,
format_type(ty.oid,NULL) AS typname,
format_type(ty.oid,att.atttypmod) AS displaytypname,
CASE WHEN ty.typelem > 0 THEN ty.typelem ELSE ty.oid END as elemoid,
tn.nspname as typnspname, et.typname as elemtypname,
ty.typstorage AS defaultstorage, cl.relname, na.nspname,
concat(quote_ident(na.nspname) ,'.', quote_ident(cl.relname)) AS parent_tbl,
att.attstattarget, description, cs.relname AS sername,
ns.nspname AS serschema,
(SELECT count(1) FROM pg_type t2 WHERE t2.typname=ty.typname) > 1 AS isdup,
indkey, coll.collname, nspc.nspname as collnspname , attoptions,
-- Start pgAdmin4, added to save time on client side parsing
CASE WHEN length(coll.collname) > 0 AND length(nspc.nspname) > 0 THEN
concat(quote_ident(nspc.nspname),'.',quote_ident(coll.collname))
ELSE '' END AS collspcname,
format_type(ty.oid,att.atttypmod) AS cltype,
-- End pgAdmin4
EXISTS(SELECT 1 FROM pg_constraint WHERE conrelid=att.attrelid AND contype='f' AND att.attnum=ANY(conkey)) As is_fk,
(SELECT array_agg(provider || '=' || label) FROM pg_seclabels sl1 WHERE sl1.objoid=att.attrelid AND sl1.objsubid=att.attnum) AS seclabels,
(CASE WHEN (att.attnum < 1) THEN true ElSE false END) AS is_sys_column
FROM pg_attribute att
JOIN pg_type ty ON ty.oid=atttypid
JOIN pg_namespace tn ON tn.oid=ty.typnamespace
JOIN pg_class cl ON cl.oid=att.attrelid
JOIN pg_namespace na ON na.oid=cl.relnamespace
LEFT OUTER JOIN pg_type et ON et.oid=ty.typelem
LEFT OUTER JOIN pg_attrdef def ON adrelid=att.attrelid AND adnum=att.attnum
LEFT OUTER JOIN pg_description des ON (des.objoid=att.attrelid AND des.objsubid=att.attnum AND des.classoid='pg_class'::regclass)
LEFT OUTER JOIN (pg_depend JOIN pg_class cs ON classid='pg_class'::regclass AND objid=cs.oid AND cs.relkind='S') ON refobjid=att.attrelid AND refobjsubid=att.attnum
LEFT OUTER JOIN pg_namespace ns ON ns.oid=cs.relnamespace
LEFT OUTER JOIN pg_index pi ON pi.indrelid=att.attrelid AND indisprimary
LEFT OUTER JOIN pg_collation coll ON att.attcollation=coll.oid
LEFT OUTER JOIN pg_namespace nspc ON coll.collnamespace=nspc.oid
WHERE att.attrelid = {{tid}}::oid
{% if clid %}
AND att.attnum = {{clid}}::int
{% endif %}
{### To show system objects ###}
{% if not show_sys_objects %}
AND att.attnum > 0
{% endif %}
AND att.attisdropped IS FALSE
ORDER BY att.attnum

View File

@ -7,7 +7,7 @@
ALTER TABLE {{conn|qtIdent(data.schema, data.table)}}
ADD COLUMN {{conn|qtIdent(data.name)}} {% if is_sql %}{{data.displaytypname}}{% else %}{{ GET_TYPE.CREATE_TYPE_SQL(conn, data.cltype, data.attlen, data.attprecision, data.hasSqrBracket) }}{% endif %}{% if data.collspcname %}
COLLATE {{data.collspcname}}{% endif %}{% if data.attnotnull %}
NOT NULL{% endif %}{% if data.defval is defined and data.defval is not none %}
NOT NULL{% endif %}{% if data.defval and data.defval is not none %}
DEFAULT {{data.defval}}{% endif %};
{% endif %}

View File

@ -1,5 +1,11 @@
SELECT tt.oid, format_type(tt.oid,NULL) AS typname
FROM pg_cast
JOIN pg_type tt ON tt.oid=casttarget
WHERE castsource={{type_id}}
FROM pg_type tt
WHERE tt.oid in (
SELECT casttarget from pg_cast
WHERE castsource = {{type_id}}
AND castcontext IN ('i', 'a')
UNION
SELECT typbasetype from pg_type where oid = {{type_id}}
UNION
SELECT oid FROM pg_type WHERE typbasetype = {{type_id}}
)

View File

@ -250,7 +250,6 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
# we are receiving request when in edit mode
# we will send filtered types related to current type
present_type = column['cltype']
type_id = column['atttypid']
@ -290,15 +289,7 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
edit_types_list = list()
# We will need present type in edit mode
if column['typnspname'] == "pg_catalog" \
or column['typnspname'] == "public":
edit_types_list.append(present_type)
else:
t = self.qtTypeIdent(self.conn, column['typnspname'],
present_type)
edit_types_list.append(t)
column['cltype'] = t
edit_types_list.append(column['cltype'])
if int(is_reference) == 0:
SQL = render_template("/".join([self.column_template_path,
@ -308,8 +299,6 @@ class BaseTableView(PGChildNodeView, BasePartitionTable):
for row in rset['rows']:
edit_types_list.append(row['typname'])
else:
edit_types_list.append(present_type)
column['edit_types'] = edit_types_list
column['cltype'] = DataTypeReader.parse_type_name(

View File

@ -30,6 +30,19 @@ class DataTypeJSONEncoder(json.JSONEncoder):
return json.JSONEncoder.default(self, obj)
class ColParamsJSONDecoder(json.JSONDecoder):
def decode(self, obj):
retval = obj
try:
retval = json.JSONDecoder.decode(self, obj)
if type(retval) == str:
retVal = obj
except (ValueError, TypeError, KeyError):
retval = obj
return retval
def get_no_cache_header():
"""
Prevent browser from caching data every time an