Support EXPLAIN on Greenplum. Fixes #3097

- Extract SQLEditor.execute and SQLEditor._poll into their own files and add test around them
 - Extract SQLEditor backend functions that start executing query to their own files and add tests around it
 - Move the Explain SQL from the front-end and now pass the Explain plan parameters as a JSON object in the start query call.
 - Extract the compile_template_name into a function that can be used by the different places that try to select the version of the template and the server type
pull/8/head
Joao Pedro De Almeida Pereira 2018-02-09 11:54:42 +00:00 committed by Dave Page
parent e60a84c44f
commit e16a952753
30 changed files with 3673 additions and 582 deletions

View File

@ -0,0 +1,287 @@
//////////////////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2018, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////////////////
import gettext from '../gettext';
import $ from 'jquery';
import url_for from '../url_for';
import axios from 'axios';
import * as transaction from './is_new_transaction_required';
class LoadingScreen {
constructor(sqlEditor) {
this.sqlEditor = sqlEditor;
}
setMessage(message) {
this.sqlEditor.trigger(
'pgadmin-sqleditor:loading-icon:message',
gettext(message)
);
}
show(withMessage) {
this.sqlEditor.trigger(
'pgadmin-sqleditor:loading-icon:show',
withMessage
);
}
hide() {
this.sqlEditor.trigger('pgadmin-sqleditor:loading-icon:hide');
}
}
class ExecuteQuery {
constructor(sqlEditor, userManagement) {
this.sqlServerObject = sqlEditor;
this.loadingScreen = new LoadingScreen(sqlEditor);
this.userManagement = userManagement;
}
delayedPoll() {
const self = this;
setTimeout(
() => {
self.poll();
}, self.sqlServerObject.POLL_FALLBACK_TIME());
}
execute(sqlStatement, explainPlan) {
// If it is an empty query, do nothing.
if (sqlStatement.length <= 0) return;
const self = this;
let service = axios.create({});
self.explainPlan = explainPlan;
const sqlStatementWithAnalyze = ExecuteQuery.prepareAnalyzeSql(sqlStatement, explainPlan);
self.initializeExecutionOnSqlEditor(sqlStatementWithAnalyze);
service.post(
url_for('sqleditor.query_tool_start', {
'trans_id': self.sqlServerObject.transId,
}),
JSON.stringify(sqlStatementWithAnalyze),
{headers: {'Content-Type': 'application/json'}})
.then(function (result) {
let httpMessageData = result.data;
self.removeGridViewMarker();
if (ExecuteQuery.isSqlCorrect(httpMessageData)) {
self.loadingScreen.setMessage('Waiting for the query execution to complete...');
self.updateSqlEditorStateWithInformationFromServer(httpMessageData.data);
// If status is True then poll the result.
self.delayedPoll();
} else {
self.loadingScreen.hide();
self.enableSQLEditorButtons();
self.sqlServerObject.update_msg_history(false, httpMessageData.data.result);
// Highlight the error in the sql panel
self.sqlServerObject._highlight_error(httpMessageData.data.result);
}
}).catch(function (error) {
self.onExecuteHTTPError(error.response.data);
}
);
}
poll() {
const self = this;
let service = axios.create({});
service.get(
url_for('sqleditor.poll', {
'trans_id': self.sqlServerObject.transId,
})
).then(
(httpMessage) => {
if (ExecuteQuery.isQueryFinished(httpMessage)) {
self.loadingScreen.setMessage('Loading data from the database server and rendering...');
self.sqlServerObject.call_render_after_poll(httpMessage.data.data);
} else if (ExecuteQuery.isQueryStillRunning(httpMessage)) {
// If status is Busy then poll the result by recursive call to the poll function
this.delayedPoll();
self.sqlServerObject.setIsQueryRunning(true);
if (httpMessage.data.data.result) {
self.sqlServerObject.update_msg_history(httpMessage.data.data.status, httpMessage.data.data.result, false);
}
} else if (ExecuteQuery.isConnectionToServerLostWhilePolling(httpMessage)) {
self.loadingScreen.hide();
// Enable/Disable query tool button only if is_query_tool is true.
if (self.sqlServerObject.is_query_tool) {
self.enableSQLEditorButtons();
}
self.sqlServerObject.update_msg_history(false, httpMessage.data.data.result, true);
} else if (ExecuteQuery.isQueryCancelled(httpMessage)) {
self.loadingScreen.hide();
self.sqlServerObject.update_msg_history(false, 'Execution Cancelled!', true);
}
}
).catch(
error => {
const errorData = error.response.data;
// Enable/Disable query tool button only if is_query_tool is true.
self.sqlServerObject.resetQueryHistoryObject(self.sqlServerObject);
self.loadingScreen.hide();
if (self.sqlServerObject.is_query_tool) {
self.enableSQLEditorButtons();
}
if (ExecuteQuery.wasConnectionLostToServer(errorData)) {
self.handleConnectionToServerLost();
return;
}
if (self.userManagement.is_pga_login_required(errorData)) {
return self.userManagement.pga_login();
}
let msg = ExecuteQuery.extractErrorMessage(errorData);
self.sqlServerObject.update_msg_history(false, msg);
// Highlight the error in the sql panel
self.sqlServerObject._highlight_error(msg);
});
}
initializeExecutionOnSqlEditor(sqlStatement) {
this.loadingScreen.show('Initializing query execution...');
$('#btn-flash').prop('disabled', true);
this.sqlServerObject.query_start_time = new Date();
if(typeof sqlStatement === 'object') {
this.sqlServerObject.query = sqlStatement['sql'];
} else {
this.sqlServerObject.query = sqlStatement;
}
this.sqlServerObject.rows_affected = 0;
this.sqlServerObject._init_polling_flags();
this.disableSQLEditorButtons();
}
static prepareAnalyzeSql(sqlStatement, analyzeSql) {
let sqlStatementWithAnalyze = {
sql: sqlStatement,
explain_plan: analyzeSql,
};
return sqlStatementWithAnalyze;
}
onExecuteHTTPError(httpMessage) {
this.loadingScreen.hide();
this.enableSQLEditorButtons();
if (ExecuteQuery.wasConnectionLostToServer(httpMessage)) {
this.handleConnectionToServerLost();
return;
}
if (this.userManagement.is_pga_login_required(httpMessage)) {
this.sqlServerObject.save_state('execute', [this.explainPlan]);
this.userManagement.pga_login();
}
if (transaction.is_new_transaction_required(httpMessage)) {
this.sqlServerObject.save_state('execute', [this.explainPlan]);
this.sqlServerObject.init_transaction();
}
let msg = httpMessage.errormsg;
if (httpMessage.responseJSON !== undefined) {
if (httpMessage.responseJSON.errormsg !== undefined) {
msg = httpMessage.responseJSON.errormsg;
}
if (httpMessage.status === 503 && httpMessage.responseJSON.info !== undefined &&
httpMessage.responseJSON.info === 'CONNECTION_LOST') {
setTimeout(function () {
this.sqlServerObject.save_state('execute', [this.explainPlan]);
this.sqlServerObject.handle_connection_lost(false, httpMessage);
});
}
}
this.sqlServerObject.update_msg_history(false, msg);
}
removeGridViewMarker() {
if (this.sqlServerObject.gridView.marker) {
this.sqlServerObject.gridView.marker.clear();
delete this.sqlServerObject.gridView.marker;
this.sqlServerObject.gridView.marker = null;
// Remove already existing marker
this.sqlServerObject.gridView.query_tool_obj.removeLineClass(this.sqlServerObject.marked_line_no, 'wrap', 'CodeMirror-activeline-background');
}
}
enableSQLEditorButtons() {
this.sqlServerObject.disable_tool_buttons(false);
$('#btn-cancel-query').prop('disabled', true);
}
disableSQLEditorButtons() {
this.sqlServerObject.disable_tool_buttons(true);
$('#btn-cancel-query').prop('disabled', false);
}
static wasConnectionLostToServer(errorMessage) {
return errorMessage.readyState === 0;
}
handleConnectionToServerLost() {
this.sqlServerObject.update_msg_history(false,
gettext('Not connected to the server or the connection to the server has been closed.')
);
}
updateSqlEditorStateWithInformationFromServer(messageData) {
this.sqlServerObject.can_edit = messageData.can_edit;
this.sqlServerObject.can_filter = messageData.can_filter;
this.sqlServerObject.info_notifier_timeout = messageData.info_notifier_timeout;
}
static isSqlCorrect(httpMessageData) {
return httpMessageData.data.status;
}
static extractErrorMessage(httpMessage) {
let msg = httpMessage.errormsg;
if (httpMessage.responseJSON !== undefined &&
httpMessage.responseJSON.errormsg !== undefined)
msg = httpMessage.responseJSON.errormsg;
return msg;
}
static isQueryFinished(httpMessage) {
return httpMessage.data.data.status === 'Success';
}
static isQueryStillRunning(httpMessage) {
return httpMessage.data.data.status === 'Busy';
}
static isQueryCancelled(httpMessage) {
return httpMessage.data.data.status === 'Cancel';
}
static isConnectionToServerLostWhilePolling(httpMessage) {
return httpMessage.data.data.status === 'NotConnected';
}
}
module.exports = {
ExecuteQuery: ExecuteQuery,
};

View File

@ -0,0 +1,14 @@
//////////////////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2018, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////////////////
export function is_new_transaction_required(xhr) {
return xhr.status === 404 && xhr.responseJSON &&
xhr.responseJSON.info &&
xhr.responseJSON.info === 'DATAGRID_TRANSACTION_REQUIRED';
}

View File

@ -2,19 +2,19 @@ import $ from 'jquery';
let queryToolActions = { let queryToolActions = {
_verbose: function () { _verbose: function () {
return $('.explain-verbose').hasClass('visibility-hidden') ? 'OFF' : 'ON'; return !$('.explain-verbose').hasClass('visibility-hidden');
}, },
_costsEnabled: function () { _costsEnabled: function () {
return $('.explain-costs').hasClass('visibility-hidden') ? 'OFF' : 'ON'; return !$('.explain-costs').hasClass('visibility-hidden');
}, },
_buffers: function () { _buffers: function () {
return $('.explain-buffers').hasClass('visibility-hidden') ? 'OFF' : 'ON'; return !$('.explain-buffers').hasClass('visibility-hidden');
}, },
_timing: function () { _timing: function () {
return $('.explain-timing').hasClass('visibility-hidden') ? 'OFF' : 'ON'; return !$('.explain-timing').hasClass('visibility-hidden');
}, },
_clearMessageTab: function () { _clearMessageTab: function () {
@ -35,18 +35,35 @@ let queryToolActions = {
let verbose = this._verbose(); let verbose = this._verbose();
let buffers = this._buffers(); let buffers = this._buffers();
let timing = this._timing(); let timing = this._timing();
let explainAnalyzeQuery = `EXPLAIN (FORMAT JSON, ANALYZE ON, VERBOSE ${verbose}, COSTS ${costEnabled}, BUFFERS ${buffers}, TIMING ${timing}) `; const explainObject = {
format: 'json',
analyze: true,
verbose: verbose,
costs: costEnabled,
buffers: buffers,
timing: timing,
summary: false,
};
this._clearMessageTab(); this._clearMessageTab();
sqlEditorController.execute(explainAnalyzeQuery); sqlEditorController.execute(explainObject);
}, },
explain: function (sqlEditorController) { explain: function (sqlEditorController) {
let costEnabled = this._costsEnabled(); let costEnabled = this._costsEnabled();
let verbose = this._verbose(); let verbose = this._verbose();
let explainQuery = `EXPLAIN (FORMAT JSON, ANALYZE OFF, VERBOSE ${verbose}, COSTS ${costEnabled}, BUFFERS OFF, TIMING OFF) `; // let explainQuery = `EXPLAIN (FORMAT JSON, ANALYZE OFF, VERBOSE ${verbose}, COSTS ${costEnabled}, BUFFERS OFF, TIMING OFF) `;
const explainObject = {
format: 'json',
analyze: false,
verbose: verbose,
costs: costEnabled,
buffers: false,
timing: false,
summary: false,
};
this._clearMessageTab(); this._clearMessageTab();
sqlEditorController.execute(explainQuery); sqlEditorController.execute(explainObject);
}, },
download: function (sqlEditorController) { download: function (sqlEditorController) {

View File

@ -8,28 +8,32 @@
########################################################################## ##########################################################################
"""A blueprint module implementing the sqleditor frame.""" """A blueprint module implementing the sqleditor frame."""
import simplejson as json import codecs
import os import os
import pickle import pickle
import random import random
import codecs
from flask import Response, url_for, render_template, session, request,\ import simplejson as json
from flask import Response, url_for, render_template, session, request, \
current_app current_app
from flask_babel import gettext from flask_babel import gettext
from flask_security import login_required from flask_security import login_required
from config import PG_DEFAULT_DRIVER, ON_DEMAND_RECORD_COUNT
from pgadmin.misc.file_manager import Filemanager
from pgadmin.tools.sqleditor.command import QueryToolCommand from pgadmin.tools.sqleditor.command import QueryToolCommand
from pgadmin.tools.sqleditor.utils.constant_definition import ASYNC_OK, ASYNC_EXECUTION_ABORTED, \
CONNECTION_STATUS_MESSAGE_MAPPING, TX_STATUS_INERROR
from pgadmin.tools.sqleditor.utils.start_running_query import StartRunningQuery
from pgadmin.tools.sqleditor.utils.update_session_grid_transaction import update_session_grid_transaction
from pgadmin.utils import PgAdminModule from pgadmin.utils import PgAdminModule
from pgadmin.utils import get_storage_directory from pgadmin.utils import get_storage_directory
from pgadmin.utils.ajax import make_json_response, bad_request, \ from pgadmin.utils.ajax import make_json_response, bad_request, \
success_return, internal_server_error, unauthorized success_return, internal_server_error, unauthorized
from pgadmin.utils.driver import get_driver from pgadmin.utils.driver import get_driver
from pgadmin.utils.sqlautocomplete.autocomplete import SQLAutoComplete
from pgadmin.misc.file_manager import Filemanager
from pgadmin.utils.menu import MenuItem from pgadmin.utils.menu import MenuItem
from pgadmin.utils.exception import ConnectionLost from pgadmin.utils.exception import ConnectionLost
from pgadmin.utils.sqlautocomplete.autocomplete import SQLAutoComplete
from config import PG_DEFAULT_DRIVER, ON_DEMAND_RECORD_COUNT
MODULE_NAME = 'sqleditor' MODULE_NAME = 'sqleditor'
@ -39,28 +43,6 @@ try:
except ImportError: except ImportError:
from urllib.parse import unquote from urllib.parse import unquote
# Async Constants
ASYNC_OK = 1
ASYNC_READ_TIMEOUT = 2
ASYNC_WRITE_TIMEOUT = 3
ASYNC_NOT_CONNECTED = 4
ASYNC_EXECUTION_ABORTED = 5
# Transaction status constants
TX_STATUS_IDLE = 0
TX_STATUS__ACTIVE = 1
TX_STATUS_INTRANS = 2
TX_STATUS_INERROR = 3
# Connection status codes mapping
CONNECTION_STATUS_MESSAGE_MAPPING = dict([
(0, 'The session is idle and there is no current transaction.'),
(1, 'A command is currently in progress.'),
(2, 'The session is idle in a valid transaction block.'),
(3, 'The session is idle in a failed transaction block.'),
(4, 'The connection with the server is bad.')
])
class SqlEditorModule(PgAdminModule): class SqlEditorModule(PgAdminModule):
""" """
@ -376,13 +358,6 @@ def index():
) )
def update_session_grid_transaction(trans_id, data):
if 'gridData' in session:
grid_data = session['gridData']
grid_data[str(trans_id)] = data
session['gridData'] = grid_data
def check_transaction_status(trans_id): def check_transaction_status(trans_id):
""" """
This function is used to check the transaction id This function is used to check the transaction id
@ -458,7 +433,7 @@ def start_view_data(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
@ -486,7 +461,7 @@ def start_view_data(trans_id):
) )
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
# set fetched row count to 0 as we are executing query again. # set fetched row count to 0 as we are executing query again.
trans_obj.update_fetched_row_cnt(0) trans_obj.update_fetched_row_cnt(0)
session_obj['command_obj'] = pickle.dumps(trans_obj, -1) session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
@ -554,107 +529,19 @@ def start_query_tool(trans_id):
trans_id: unique transaction id trans_id: unique transaction id
""" """
if request.data: sql = extract_sql_from_network_parameters(request.data, request.args, request.form)
sql = json.loads(request.data, encoding='utf-8')
return StartRunningQuery(blueprint, current_app).execute(sql, trans_id, session)
def extract_sql_from_network_parameters(request_data, request_arguments, request_form_data):
if request_data:
sql_parameters = json.loads(request_data, encoding='utf-8')
if type(sql_parameters) is str:
return dict(sql=sql_parameters, explain_plan=None)
return sql_parameters
else: else:
sql = request.args or request.form return request_arguments or request_form_data
connect = True if 'connect' in request.args and \
request.args['connect'] == '1' else False
if 'gridData' not in session:
return make_json_response(
success=0,
errormsg=gettext('Transaction ID not found in the session.'),
info='DATAGRID_TRANSACTION_REQUIRED', status=404)
grid_data = session['gridData']
# Return from the function if transaction id not found
if str(trans_id) not in grid_data:
return make_json_response(
success=0,
errormsg=gettext('Transaction ID not found in the session.'),
info='DATAGRID_TRANSACTION_REQUIRED',
status=404)
# Fetch the object for the specified transaction id.
# Use pickle.loads function to get the command object
session_obj = grid_data[str(trans_id)]
trans_obj = pickle.loads(session_obj['command_obj'])
# set fetched row count to 0 as we are executing query again.
trans_obj.update_fetched_row_cnt(0)
can_edit = False
can_filter = False
if trans_obj is not None and session_obj is not None:
conn_id = trans_obj.conn_id
try:
manager = get_driver(
PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid)
conn = manager.connection(did=trans_obj.did, conn_id=conn_id,
auto_reconnect=False,
use_binary_placeholder=True,
array_to_string=True)
except ConnectionLost as e:
raise
except Exception as e:
current_app.logger.error(e)
return internal_server_error(errormsg=str(e))
# Connect to the Server if not connected.
if connect and not conn.connected():
status, msg = conn.connect()
if not status:
current_app.logger.error(msg)
return internal_server_error(errormsg=str(msg))
# on successful connection set the connection id to the
# transaction object
trans_obj.set_connection_id(conn_id)
# As we changed the transaction object we need to
# restore it and update the session variable.
session_obj['command_obj'] = pickle.dumps(trans_obj, -1)
update_session_grid_transaction(trans_id, session_obj)
# If auto commit is False and transaction status is Idle
# then call is_begin_not_required() function to check BEGIN
# is required or not.
if not trans_obj.auto_commit \
and conn.transaction_status() == TX_STATUS_IDLE \
and is_begin_required(sql):
conn.execute_void("BEGIN;")
# Execute sql asynchronously with params is None
# and formatted_error is True.
try:
status, result = conn.execute_async(sql)
except ConnectionLost as e:
raise
# If the transaction aborted for some reason and
# Auto RollBack is True then issue a rollback to cleanup.
trans_status = conn.transaction_status()
if trans_status == TX_STATUS_INERROR and trans_obj.auto_rollback:
conn.execute_void("ROLLBACK;")
can_edit = trans_obj.can_edit()
can_filter = trans_obj.can_filter()
else:
status = False
result = gettext(
'Either transaction object or session object not found.')
return make_json_response(
data={
'status': status, 'result': result,
'can_edit': can_edit, 'can_filter': can_filter,
'info_notifier_timeout': blueprint.info_notifier_timeout.get()
}
)
@blueprint.route( @blueprint.route(
@ -675,13 +562,13 @@ def preferences(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
# Call the set_auto_commit and set_auto_rollback method of # Call the set_auto_commit and set_auto_rollback method of
# transaction object # transaction object
trans_obj.set_auto_commit(blueprint.auto_commit.get()) trans_obj.set_auto_commit(blueprint.auto_commit.get())
@ -751,7 +638,7 @@ def poll(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
@ -779,7 +666,7 @@ def poll(trans_id):
if isinstance(trans_obj, QueryToolCommand): if isinstance(trans_obj, QueryToolCommand):
trans_status = conn.transaction_status() trans_status = conn.transaction_status()
if (trans_status == TX_STATUS_INERROR and if (trans_status == TX_STATUS_INERROR and
trans_obj.auto_rollback): trans_obj.auto_rollback):
conn.execute_void("ROLLBACK;") conn.execute_void("ROLLBACK;")
st, result = conn.async_fetchmany_2darray(ON_DEMAND_RECORD_COUNT) st, result = conn.async_fetchmany_2darray(ON_DEMAND_RECORD_COUNT)
@ -854,15 +741,15 @@ def poll(trans_id):
typname == 'character varying' typname == 'character varying'
): ):
typname = typname + '(' + \ typname = typname + '(' + \
str(col_info['internal_size']) + \ str(col_info['internal_size']) + \
')' ')'
elif ( elif (
typname == 'character[]' or typname == 'character[]' or
typname == 'character varying[]' typname == 'character varying[]'
): ):
typname = typname[:-2] + '(' + \ typname = typname[:-2] + '(' + \
str(col_info['internal_size']) + \ str(col_info['internal_size']) + \
')[]' ')[]'
col_info['type_name'] = typname col_info['type_name'] = typname
@ -913,7 +800,7 @@ def poll(trans_id):
if status == 'Success' and result is None: if status == 'Success' and result is None:
result = conn.status_message() result = conn.status_message()
if (result != 'SELECT 1' or result != 'SELECT 0') \ if (result != 'SELECT 1' or result != 'SELECT 0') \
and result is not None and additional_messages: and result is not None and additional_messages:
result = additional_messages + result result = additional_messages + result
return make_json_response( return make_json_response(
@ -954,7 +841,7 @@ def fetch(trans_id, fetch_all=None):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
@ -1018,7 +905,7 @@ def fetch_pg_types(columns_info, trans_obj):
if oids: if oids:
status, res = default_conn.execute_dict( status, res = default_conn.execute_dict(
u"SELECT oid, format_type(oid,null) as typname FROM pg_type " u"SELECT oid, format_type(oid, NULL) AS typname FROM pg_type "
u"WHERE oid IN %s ORDER BY oid;", [tuple(oids)] u"WHERE oid IN %s ORDER BY oid;", [tuple(oids)]
) )
@ -1077,17 +964,17 @@ def save(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
# If there is no primary key found then return from the function. # If there is no primary key found then return from the function.
if (len(session_obj['primary_keys']) <= 0 or len(changed_data) <= 0) \ if (len(session_obj['primary_keys']) <= 0 or len(changed_data) <= 0) \
and 'has_oids' not in session_obj: and 'has_oids' not in session_obj:
return make_json_response( return make_json_response(
data={ data={
'status': False, 'status': False,
@ -1146,12 +1033,12 @@ def get_filter(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = trans_obj.get_filter() res = trans_obj.get_filter()
else: else:
@ -1183,13 +1070,13 @@ def apply_filter(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
status, res = trans_obj.set_filter(filter_sql) status, res = trans_obj.set_filter(filter_sql)
@ -1226,13 +1113,13 @@ def append_filter_inclusive(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = None res = None
filter_sql = '' filter_sql = ''
@ -1282,12 +1169,12 @@ def append_filter_exclusive(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = None res = None
filter_sql = '' filter_sql = ''
@ -1335,13 +1222,13 @@ def remove_filter(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = None res = None
@ -1380,13 +1267,13 @@ def set_limit(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = None res = None
@ -1501,13 +1388,13 @@ def get_object_name(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = trans_obj.object_name res = trans_obj.object_name
else: else:
status = False status = False
@ -1538,13 +1425,13 @@ def set_auto_commit(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = None res = None
@ -1587,13 +1474,13 @@ def set_auto_rollback(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
res = None res = None
@ -1643,13 +1530,13 @@ def auto_complete(trans_id):
check_transaction_status(trans_id) check_transaction_status(trans_id)
if error_msg == gettext( if error_msg == gettext(
'Transaction ID not found in the session.'): 'Transaction ID not found in the session.'):
return make_json_response(success=0, errormsg=error_msg, return make_json_response(success=0, errormsg=error_msg,
info='DATAGRID_TRANSACTION_REQUIRED', info='DATAGRID_TRANSACTION_REQUIRED',
status=404) status=404)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
# Create object of SQLAutoComplete class and pass connection object # Create object of SQLAutoComplete class and pass connection object
auto_complete_obj = SQLAutoComplete( auto_complete_obj = SQLAutoComplete(
@ -1680,165 +1567,6 @@ def script():
) )
def is_begin_required(query):
word_len = 0
query = query.strip()
query_len = len(query)
# Check word length (since "beginx" is not "begin").
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
# Transaction control commands. These should include every keyword that
# gives rise to a TransactionStmt in the backend grammar, except for the
# savepoint-related commands.
#
# (We assume that START must be START TRANSACTION, since there is
# presently no other "START foo" command.)
keyword = query[0:word_len]
if word_len == 5 and keyword.lower() == "abort":
return False
if word_len == 5 and keyword.lower() == "begin":
return False
if word_len == 5 and keyword.lower() == "start":
return False
if word_len == 6:
# SELECT is protected from dirty reads hence don't require transaction
if keyword.lower() in ["select", "commit"]:
return False
if word_len == 3 and keyword.lower() == "end":
return False
if word_len == 8 and keyword.lower() == "rollback":
return False
if word_len == 7 and keyword.lower() == "prepare":
# PREPARE TRANSACTION is a TC command, PREPARE foo is not
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 11 and keyword.lower() == "transaction":
return False
return True
# Commands not allowed within transactions. The statements checked for
# here should be exactly those that call PreventTransactionChain() in the
# backend.
if word_len == 6 and keyword.lower() == "vacuum":
return False
if word_len == 7 and keyword.lower() == "cluster":
# CLUSTER with any arguments is allowed in transactions
query = query[word_len:query_len]
query = query.strip()
if query[0].isalpha():
return True # has additional words
return False # it's CLUSTER without arguments
if word_len == 6 and keyword.lower() == "create":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 8 and keyword.lower() == "database":
return False
if word_len == 10 and keyword.lower() == "tablespace":
return False
# CREATE [UNIQUE] INDEX CONCURRENTLY isn't allowed in xacts
if word_len == 7 and keyword.lower() == "cluster":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 5 and keyword.lower() == "index":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 12 and keyword.lower() == "concurrently":
return False
return True
if word_len == 5 and keyword.lower() == "alter":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
# ALTER SYSTEM isn't allowed in xacts
if word_len == 6 and keyword.lower() == "system":
return False
return True
# Note: these tests will match DROP SYSTEM and REINDEX TABLESPACE, which
# aren't really valid commands so we don't care much. The other four
# possible matches are correct.
if word_len == 4 and keyword.lower() == "drop" \
or word_len == 7 and keyword.lower() == "reindex":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 8 and keyword.lower() == "database":
return False
if word_len == 6 and keyword.lower() == "system":
return False
if word_len == 10 and keyword.lower() == "tablespace":
return False
return True
# DISCARD ALL isn't allowed in xacts, but other variants are allowed.
if word_len == 7 and keyword.lower() == "discard":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 3 and keyword.lower() == "all":
return False
return True
return True
@blueprint.route('/load_file/', methods=["PUT", "POST"], endpoint='load_file') @blueprint.route('/load_file/', methods=["PUT", "POST"], endpoint='load_file')
@login_required @login_required
def load_file(): def load_file():
@ -1865,9 +1593,9 @@ def load_file():
) )
status, err_msg, is_binary, \ status, err_msg, is_binary, \
is_startswith_bom, enc = Filemanager.check_file_for_bom_and_binary( is_startswith_bom, enc = Filemanager.check_file_for_bom_and_binary(
file_path file_path
) )
if not status: if not status:
return internal_server_error( return internal_server_error(
@ -1960,10 +1688,10 @@ def save_file():
def start_query_download_tool(trans_id): def start_query_download_tool(trans_id):
sync_conn = None sync_conn = None
status, error_msg, conn, trans_obj, \ status, error_msg, conn, trans_obj, \
session_obj = check_transaction_status(trans_id) session_obj = check_transaction_status(trans_id)
if status and conn is not None \ if status and conn is not None \
and trans_obj is not None and session_obj is not None: and trans_obj is not None and session_obj is not None:
data = request.args if request.args else None data = request.args if request.args else None
try: try:
@ -2063,7 +1791,7 @@ def query_tool_status(trans_id):
TRANSACTION_STATUS_UNKNOWN = 4 TRANSACTION_STATUS_UNKNOWN = 4
""" """
status, error_msg, conn, trans_obj, \ status, error_msg, conn, trans_obj, \
session_obj = check_transaction_status(trans_id) session_obj = check_transaction_status(trans_id)
if not status and error_msg and type(error_msg) == str: if not status and error_msg and type(error_msg) == str:
return internal_server_error( return internal_server_error(

View File

@ -12,6 +12,8 @@ define('tools.querytool', [
'sources/selection/xcell_selection_model', 'sources/selection/xcell_selection_model',
'sources/selection/set_staged_rows', 'sources/selection/set_staged_rows',
'sources/sqleditor_utils', 'sources/sqleditor_utils',
'sources/sqleditor/execute_query',
'sources/sqleditor/is_new_transaction_required',
'sources/history/index.js', 'sources/history/index.js',
'sources/../jsx/history/query_history', 'sources/../jsx/history/query_history',
'react', 'react-dom', 'react', 'react-dom',
@ -28,7 +30,8 @@ define('tools.querytool', [
], function( ], function(
babelPollyfill, gettext, url_for, $, _, S, alertify, pgAdmin, Backbone, codemirror, babelPollyfill, gettext, url_for, $, _, S, alertify, pgAdmin, Backbone, codemirror,
pgExplain, GridSelector, ActiveCellCapture, clipboard, copyData, RangeSelectionHelper, handleQueryOutputKeyboardEvent, pgExplain, GridSelector, ActiveCellCapture, clipboard, copyData, RangeSelectionHelper, handleQueryOutputKeyboardEvent,
XCellSelectionModel, setStagedRows, SqlEditorUtils, HistoryBundle, queryHistory, React, ReactDOM, XCellSelectionModel, setStagedRows, SqlEditorUtils, ExecuteQuery, transaction,
HistoryBundle, queryHistory, React, ReactDOM,
keyboardShortcuts, queryToolActions, Datagrid) { keyboardShortcuts, queryToolActions, Datagrid) {
/* Return back, this has been called more than once */ /* Return back, this has been called more than once */
if (pgAdmin.SqlEditor) if (pgAdmin.SqlEditor)
@ -43,12 +46,6 @@ define('tools.querytool', [
var is_query_running = false; var is_query_running = false;
var is_new_transaction_required = function(xhr) {
return xhr.status == 404 && xhr.responseJSON &&
xhr.responseJSON.info &&
xhr.responseJSON.info == 'DATAGRID_TRANSACTION_REQUIRED';
};
// Defining Backbone view for the sql grid. // Defining Backbone view for the sql grid.
var SQLEditorView = Backbone.View.extend({ var SQLEditorView = Backbone.View.extend({
initialize: function(opts) { initialize: function(opts) {
@ -485,7 +482,7 @@ define('tools.querytool', [
if (pgAdmin.Browser.UserManagement.is_pga_login_required(e)) { if (pgAdmin.Browser.UserManagement.is_pga_login_required(e)) {
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
return self.init_transaction(); return self.init_transaction();
} }
}, },
@ -2188,7 +2185,7 @@ define('tools.querytool', [
pgAdmin.Browser.UserManagement.pga_login(); pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_run_query', []); self.save_state('_run_query', []);
self.init_transaction(); self.init_transaction();
} }
@ -2256,74 +2253,8 @@ define('tools.querytool', [
* 'Success' then call the render method to render the data. * 'Success' then call the render method to render the data.
*/ */
_poll: function() { _poll: function() {
var self = this; const executeQuery = new ExecuteQuery.ExecuteQuery(this, pgAdmin.Browser.UserManagement);
executeQuery.delayedPoll(this);
setTimeout(
function() {
$.ajax({
url: url_for('sqleditor.poll', {
'trans_id': self.transId,
}),
method: 'GET',
success: function(res) {
if (res.data.status === 'Success') {
self.trigger(
'pgadmin-sqleditor:loading-icon:message',
gettext('Loading data from the database server and rendering...')
);
self.call_render_after_poll(res.data);
} else if (res.data.status === 'Busy') {
// If status is Busy then poll the result by recursive call to the poll function
self._poll();
is_query_running = true;
if (res.data.result) {
self.update_msg_history(res.data.status, res.data.result, false);
}
} else if (res.data.status === 'NotConnected') {
self.trigger('pgadmin-sqleditor:loading-icon:hide');
// Enable/Disable query tool button only if is_query_tool is true.
if (self.is_query_tool) {
self.disable_tool_buttons(false);
$('#btn-cancel-query').prop('disabled', true);
}
self.update_msg_history(false, res.data.result, true);
} else if (res.data.status === 'Cancel') {
self.trigger('pgadmin-sqleditor:loading-icon:hide');
self.update_msg_history(false, 'Execution Cancelled!', true);
}
},
error: function(e) {
// Enable/Disable query tool button only if is_query_tool is true.
self.resetQueryHistoryObject(self);
self.trigger('pgadmin-sqleditor:loading-icon:hide');
if (self.is_query_tool) {
self.disable_tool_buttons(false);
$('#btn-cancel-query').prop('disabled', true);
}
if (e.readyState == 0) {
self.update_msg_history(false,
gettext('Not connected to the server or the connection to the server has been closed.')
);
return;
}
if (pgAdmin.Browser.UserManagement.is_pga_login_required(e)) {
return pgAdmin.Browser.UserManagement.pga_login();
}
var msg = e.responseText;
if (e.responseJSON != undefined &&
e.responseJSON.errormsg != undefined)
msg = e.responseJSON.errormsg;
self.update_msg_history(false, msg);
// Highlight the error in the sql panel
self._highlight_error(msg);
},
});
}, self.POLL_FALLBACK_TIME());
}, },
/* This function is used to create the backgrid columns, /* This function is used to create the backgrid columns,
@ -2941,7 +2872,7 @@ define('tools.querytool', [
pgAdmin.Browser.UserManagement.pga_login(); pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_save', [view, controller, save_as]); self.save_state('_save', [view, controller, save_as]);
self.init_transaction(); self.init_transaction();
} }
@ -3267,7 +3198,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_show_filter', []); self.save_state('_show_filter', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -3356,7 +3287,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_include_filter', []); self.save_state('_include_filter', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -3447,7 +3378,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_exclude_filter', []); self.save_state('_exclude_filter', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -3517,7 +3448,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_remove_filter', []); self.save_state('_remove_filter', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -3592,7 +3523,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_apply_filter', []); self.save_state('_apply_filter', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -3747,7 +3678,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_set_limit', []); self.save_state('_set_limit', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -3795,10 +3726,7 @@ define('tools.querytool', [
// and execute the query. // and execute the query.
execute: function(explain_prefix) { execute: function(explain_prefix) {
var self = this, var self = this,
sql = '', sql = '';
url = url_for('sqleditor.query_tool_start', {
'trans_id': self.transId,
});
self.has_more_rows = false; self.has_more_rows = false;
self.fetching_rows = false; self.fetching_rows = false;
@ -3812,109 +3740,8 @@ define('tools.querytool', [
else else
sql = self.gridView.query_tool_obj.getValue(); sql = self.gridView.query_tool_obj.getValue();
// If it is an empty query, do nothing. const executeQuery = new ExecuteQuery.ExecuteQuery(this, pgAdmin.Browser.UserManagement);
if (sql.length <= 0) return; executeQuery.execute(sql, explain_prefix);
self.trigger(
'pgadmin-sqleditor:loading-icon:show',
gettext('Initializing query execution...')
);
$('#btn-flash').prop('disabled', true);
if (explain_prefix != undefined &&
!S.startsWith(sql.trim().toUpperCase(), 'EXPLAIN')) {
sql = explain_prefix + ' ' + sql;
}
self.query_start_time = new Date();
self.query = sql;
self.rows_affected = 0;
self._init_polling_flags();
self.disable_tool_buttons(true);
$('#btn-cancel-query').prop('disabled', false);
if (arguments.length > 0 &&
arguments[arguments.length - 1] == 'connect') {
url += '?connect=1';
}
$.ajax({
url: url,
method: 'POST',
contentType: 'application/json',
data: JSON.stringify(sql),
success: function(res) {
// Remove marker
if (self.gridView.marker) {
self.gridView.marker.clear();
delete self.gridView.marker;
self.gridView.marker = null;
// Remove already existing marker
self.gridView.query_tool_obj.removeLineClass(self.marked_line_no, 'wrap', 'CodeMirror-activeline-background');
}
if (res.data.status) {
self.trigger(
'pgadmin-sqleditor:loading-icon:message',
gettext('Waiting for the query execution to complete...')
);
self.can_edit = res.data.can_edit;
self.can_filter = res.data.can_filter;
self.info_notifier_timeout = res.data.info_notifier_timeout;
// If status is True then poll the result.
self._poll();
} else {
self.trigger('pgadmin-sqleditor:loading-icon:hide');
self.disable_tool_buttons(false);
$('#btn-cancel-query').prop('disabled', true);
self.update_msg_history(false, res.data.result);
// Highlight the error in the sql panel
self._highlight_error(res.data.result);
}
},
error: function(e) {
self.trigger('pgadmin-sqleditor:loading-icon:hide');
self.disable_tool_buttons(false);
$('#btn-cancel-query').prop('disabled', true);
if (e.readyState == 0) {
self.update_msg_history(false,
gettext('Not connected to the server or the connection to the server has been closed.')
);
return;
}
if (pgAdmin.Browser.UserManagement.is_pga_login_required(e)) {
self.save_state('execute', [explain_prefix]);
pgAdmin.Browser.UserManagement.pga_login();
}
if(is_new_transaction_required(e)) {
self.save_state('execute', [explain_prefix]);
self.init_transaction();
}
var msg = e.responseText;
if (e.responseJSON != undefined) {
if(e.responseJSON.errormsg != undefined) {
msg = e.responseJSON.errormsg;
}
if(e.status == 503 && e.responseJSON.info != undefined &&
e.responseJSON.info == 'CONNECTION_LOST') {
setTimeout(function() {
self.save_state('execute', [explain_prefix]);
self.handle_connection_lost(false, e);
});
}
}
self.update_msg_history(false, msg);
},
});
}, },
/* This function is used to highlight the error line and /* This function is used to highlight the error line and
@ -4078,7 +3905,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_auto_rollback', []); self.save_state('_auto_rollback', []);
self.init_transaction(); self.init_transaction();
} }
@ -4139,7 +3966,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_auto_commit', []); self.save_state('_auto_commit', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -4201,7 +4028,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_explain_verbose', []); self.save_state('_explain_verbose', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -4250,7 +4077,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_explain_costs', []); self.save_state('_explain_costs', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -4298,7 +4125,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_explain_buffers', []); self.save_state('_explain_buffers', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -4345,7 +4172,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('_explain_timing', []); self.save_state('_explain_timing', []);
return self.init_transaction(); return self.init_transaction();
} }
@ -4379,6 +4206,10 @@ define('tools.querytool', [
return is_query_running; return is_query_running;
}, },
setIsQueryRunning: function(value) {
is_query_running = value;
},
/* /*
* This function get explain options and auto rollback/auto commit * This function get explain options and auto rollback/auto commit
* values from preferences * values from preferences
@ -4455,7 +4286,7 @@ define('tools.querytool', [
return pgAdmin.Browser.UserManagement.pga_login(); return pgAdmin.Browser.UserManagement.pga_login();
} }
if(is_new_transaction_required(e)) { if(transaction.is_new_transaction_required(e)) {
self.save_state('get_preferences', []); self.save_state('get_preferences', []);
return self.init_transaction(); return self.init_transaction();
} }

View File

@ -0,0 +1,23 @@
EXPLAIN (
{% if format %}
FORMAT {{ format.upper() }},
{% endif %}
{% if analyze is defined %}
ANALYZE {{ analyze }},
{% endif %}
{% if verbose is defined %}
VERBOSE {{ verbose }},
{% endif %}
{% if costs is defined %}
COSTS {{ costs }},
{% endif %}
{% if timing is defined %}
TIMING {{ timing }},
{% endif %}
{% if summary is defined %}
SUMMARY {{ summary }},
{% endif %}
{% if buffers is defined %}
BUFFERS {{ buffers }}
{% endif %}
) {{ sql }}

View File

@ -0,0 +1,20 @@
EXPLAIN (
{% if format %}
FORMAT {{ format.upper() }},
{% endif %}
{% if analyze is defined %}
ANALYZE {{ analyze }},
{% endif %}
{% if verbose is defined %}
VERBOSE {{ verbose }},
{% endif %}
{% if costs is defined %}
COSTS {{ costs }},
{% endif %}
{% if timing is defined %}
TIMING {{ timing }},
{% endif %}
{% if buffers is defined %}
BUFFERS {{ buffers }}
{% endif %}
) {{ sql }}

View File

@ -0,0 +1,17 @@
EXPLAIN (
{% if format %}
FORMAT {{ format.upper() }},
{% endif %}
{% if analyze is defined %}
ANALYZE {{ analyze }},
{% endif %}
{% if verbose is defined %}
VERBOSE {{ verbose }},
{% endif %}
{% if costs is defined %}
COSTS {{ costs }},
{% endif %}
{% if buffers is defined %}
BUFFERS {{ buffers }}
{% endif %}
) {{ sql }}

View File

@ -0,0 +1,5 @@
EXPLAIN
{% if analyze %}
ANALYZE
{% endif %}
{{ sql }}

View File

@ -0,0 +1,8 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################

View File

@ -0,0 +1,152 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import os
from flask import Flask, render_template
from jinja2 import FileSystemLoader
from pgadmin import VersionedTemplateLoader
from pgadmin.utils.route import BaseTestGenerator
class TestExplainPlanTemplates(BaseTestGenerator):
scenarios = [
(
'When rendering Postgres 9.0 template, '
'when passing all parameters,'
'it returns the explain plan with all parameters',
dict(
template_path=os.path.join('sqleditor', 'sql', 'default', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
format='xml',
analyze=True,
verbose=True,
costs=False,
buffers=True
),
sql_statement='select * from places',
expected_return_value='EXPLAIN '
'( FORMAT XML, ANALYZE True, VERBOSE True, '
'COSTS False, BUFFERS True) select * from places'
)
),
(
'When rendering Postgres 9.0 template, '
'when not all parameters are present,'
'it returns the explain plan with the present parameters',
dict(
template_path=os.path.join('sqleditor', 'sql', 'default', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
format='json',
buffers=True
),
sql_statement='select * from places',
expected_return_value='EXPLAIN '
'( FORMAT JSON, BUFFERS True) select * from places'
)
),
(
'When rendering Postgres 9.2 template, '
'when timing is present,'
'it returns the explain plan with timing',
dict(
template_path=os.path.join('sqleditor', 'sql', '9.2_plus', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
format='json',
buffers=True,
timing=False
),
sql_statement='select * from places',
expected_return_value='EXPLAIN '
'( FORMAT JSON, TIMING False, BUFFERS True) select * from places'
)
),
(
'When rendering Postgres 10 template, '
'when summary is present,'
'it returns the explain plan with summary',
dict(
template_path=os.path.join('sqleditor', 'sql', '10_plus', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
format='yaml',
buffers=True,
timing=False,
summary=True
),
sql_statement='select * from places',
expected_return_value='EXPLAIN '
'( FORMAT YAML, TIMING False, SUMMARY True, BUFFERS True) select * from places'
)
),
(
'When rendering GreenPlum 5.3 template, '
'when all parameters are present,'
'it returns the explain without parameters',
dict(
template_path=os.path.join('sqleditor', 'sql', 'gpdb_5.0_plus', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
format='json',
buffers=True
),
sql_statement='select * from places',
expected_return_value='EXPLAIN select * from places'
)
),
(
'When rendering GreenPlum 5.3 template, '
'when analyze is true,'
'it returns the explain analyze',
dict(
template_path=os.path.join('sqleditor', 'sql', 'gpdb_5.0_plus', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
analyze=True
),
sql_statement='select * from places',
expected_return_value='EXPLAIN ANALYZE select * from places'
)
),
(
'When rendering GreenPlum 5.3 template, '
'when analyze is false,'
'it returns the only explain',
dict(
template_path=os.path.join('sqleditor', 'sql', 'gpdb_5.0_plus', 'explain_plan.sql'),
input_parameters=dict(
sql='select * from places',
analyze=False
),
sql_statement='select * from places',
expected_return_value='EXPLAIN select * from places'
)
),
]
def setUp(self):
self.loader = VersionedTemplateLoader(FakeApp())
def runTest(self):
with FakeApp().app_context():
result = render_template(self.template_path, **self.input_parameters)
self.assertEqual(
str(result).replace("\n", ""), self.expected_return_value)
class FakeApp(Flask):
def __init__(self):
super(FakeApp, self).__init__("")
self.jinja_loader = FileSystemLoader(
os.path.dirname(os.path.realpath(__file__)) + "/../templates"
)

View File

@ -0,0 +1,59 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from werkzeug.datastructures import ImmutableMultiDict
from pgadmin.tools.sqleditor import extract_sql_from_network_parameters
from pgadmin.utils.route import BaseTestGenerator
class ExtractSQLFromNetworkParametersTest(BaseTestGenerator):
"""
This class validates the change password functionality
by defining change password scenarios; where dict of
parameters describes the scenario appended by test name.
"""
scenarios = [
('Single string in the payload', dict(
request_strigified_data='"some sql"',
request_arguments=ImmutableMultiDict(),
request_form_data=ImmutableMultiDict(),
expected_result=dict(sql='some sql', explain_plan=None)
)),
('Payload that requests explain plan using json', dict(
request_strigified_data='{"sql": "some sql", "explain_plan": {"format": "json", "analyze": false, "verbose": false, "costs": false, "buffers": false, "timing": false}}',
request_arguments=ImmutableMultiDict(),
request_form_data=ImmutableMultiDict(),
expected_result=dict(
sql='some sql',
explain_plan=dict(
format='json',
analyze=False,
verbose=False,
buffers=False,
costs=False,
timing=False
)
)
))
]
def runTest(self):
"""Check correct function is called to handle to run query."""
result = extract_sql_from_network_parameters(
self.request_strigified_data,
self.request_arguments,
self.request_form_data
)
self.assertEquals(result, self.expected_result)

View File

@ -0,0 +1,38 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import sys
from pgadmin.utils.route import BaseTestGenerator
from pgadmin.tools.sqleditor import StartRunningQuery
if sys.version_info < (3, 3):
from mock import patch, ANY
else:
from unittest.mock import patch, ANY
class StartQueryTool(BaseTestGenerator):
"""
Ensures that the call to the backend to start running a query
calls the needed functions
"""
@patch('pgadmin.tools.sqleditor.extract_sql_from_network_parameters')
def runTest(self, extract_sql_from_network_parameters_mock):
"""Check correct function is called to handle to run query."""
extract_sql_from_network_parameters_mock.return_value = 'transformed sql'
with patch.object(StartRunningQuery, 'execute', return_value='some result') as StartRunningQuery_execute_mock:
response = self.tester.post('/sqleditor/query_tool/start/1234', data='"some sql statement"')
self.assertEquals(response.status, '200 OK')
self.assertEquals(response.data, 'some result')
StartRunningQuery_execute_mock.assert_called_with('transformed sql', 1234, ANY)
extract_sql_from_network_parameters_mock.assert_called_with('"some sql statement"', ANY, ANY)

View File

@ -0,0 +1,14 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from .constant_definition import *
from .is_begin_required import is_begin_required
from .update_session_grid_transaction import update_session_grid_transaction
from .start_running_query import *
from .apply_explain_plan_wrapper import *

View File

@ -0,0 +1,24 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Apply Explain plan wrapper to sql object."""
from flask import render_template
from pgadmin.utils.compile_template_name import compile_template_name
def apply_explain_plan_wrapper_if_needed(manager, sql):
if 'explain_plan' in sql and sql['explain_plan']:
explain_plan = sql['explain_plan']
ver = manager.version
server_type = manager.server_type
template_path = compile_template_name('sqleditor/sql', 'explain_plan.sql', server_type, ver)
return render_template(template_path, sql=sql['sql'], **explain_plan)
else:
return sql['sql']

View File

@ -0,0 +1,32 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Definition of constants for SQLEditor."""
# Async Constants
ASYNC_OK = 1
ASYNC_READ_TIMEOUT = 2
ASYNC_WRITE_TIMEOUT = 3
ASYNC_NOT_CONNECTED = 4
ASYNC_EXECUTION_ABORTED = 5
# Transaction status constants
TX_STATUS_IDLE = 0
TX_STATUS__ACTIVE = 1
TX_STATUS_INTRANS = 2
TX_STATUS_INERROR = 3
# Connection status codes mapping
CONNECTION_STATUS_MESSAGE_MAPPING = dict({
0: 'The session is idle and there is no current transaction.',
1: 'A command is currently in progress.',
2: 'The session is idle in a valid transaction block.',
3: 'The session is idle in a failed transaction block.',
4: 'The connection with the server is bad.'
})

View File

@ -0,0 +1,169 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Check if requires BEGIN in the current query."""
def is_begin_required(query):
word_len = 0
query = query.strip()
query_len = len(query)
# Check word length (since "beginx" is not "begin").
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
# Transaction control commands. These should include every keyword that
# gives rise to a TransactionStmt in the backend grammar, except for the
# savepoint-related commands.
#
# (We assume that START must be START TRANSACTION, since there is
# presently no other "START foo" command.)
keyword = query[0:word_len]
if word_len == 5 and keyword.lower() == "abort":
return False
if word_len == 5 and keyword.lower() == "begin":
return False
if word_len == 5 and keyword.lower() == "start":
return False
if word_len == 6:
# SELECT is protected from dirty reads hence don't require transaction
if keyword.lower() in ["select", "commit"]:
return False
if word_len == 3 and keyword.lower() == "end":
return False
if word_len == 8 and keyword.lower() == "rollback":
return False
if word_len == 7 and keyword.lower() == "prepare":
# PREPARE TRANSACTION is a TC command, PREPARE foo is not
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 11 and keyword.lower() == "transaction":
return False
return True
# Commands not allowed within transactions. The statements checked for
# here should be exactly those that call PreventTransactionChain() in the
# backend.
if word_len == 6 and keyword.lower() == "vacuum":
return False
if word_len == 7 and keyword.lower() == "cluster":
# CLUSTER with any arguments is allowed in transactions
query = query[word_len:query_len]
query = query.strip()
if query[0].isalpha():
return True # has additional words
return False # it's CLUSTER without arguments
if word_len == 6 and keyword.lower() == "create":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 8 and keyword.lower() == "database":
return False
if word_len == 10 and keyword.lower() == "tablespace":
return False
# CREATE [UNIQUE] INDEX CONCURRENTLY isn't allowed in xacts
if word_len == 7 and keyword.lower() == "cluster":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 5 and keyword.lower() == "index":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 12 and keyword.lower() == "concurrently":
return False
return True
if word_len == 5 and keyword.lower() == "alter":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
# ALTER SYSTEM isn't allowed in xacts
if word_len == 6 and keyword.lower() == "system":
return False
return True
# Note: these tests will match DROP SYSTEM and REINDEX TABLESPACE, which
# aren't really valid commands so we don't care much. The other four
# possible matches are correct.
if word_len == 4 and keyword.lower() == "drop" \
or word_len == 7 and keyword.lower() == "reindex":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 8 and keyword.lower() == "database":
return False
if word_len == 6 and keyword.lower() == "system":
return False
if word_len == 10 and keyword.lower() == "tablespace":
return False
return True
# DISCARD ALL isn't allowed in xacts, but other variants are allowed.
if word_len == 7 and keyword.lower() == "discard":
query = query[word_len:query_len]
query = query.strip()
query_len = len(query)
word_len = 0
while (word_len < query_len) and query[word_len].isalpha():
word_len += 1
keyword = query[0:word_len]
if word_len == 3 and keyword.lower() == "all":
return False
return True
return True

View File

@ -0,0 +1,172 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Start executing the query in async mode."""
import pickle
import random
from flask import Response
from flask_babel import gettext
from config import PG_DEFAULT_DRIVER
from pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper import apply_explain_plan_wrapper_if_needed
from pgadmin.tools.sqleditor.utils.update_session_grid_transaction import update_session_grid_transaction
from pgadmin.tools.sqleditor.utils.is_begin_required import is_begin_required
from pgadmin.tools.sqleditor.utils.constant_definition import TX_STATUS_IDLE, TX_STATUS_INERROR
from pgadmin.utils.ajax import make_json_response, internal_server_error
from pgadmin.utils.driver import get_driver
from pgadmin.utils.exception import ConnectionLost
class StartRunningQuery:
def __init__(self, blueprint_object, logger):
self.http_session = None
self.blueprint_object = blueprint_object
self.connection_id = str(random.randint(1, 9999999))
self.logger = logger
def execute(self, sql, trans_id, http_session):
session_obj = StartRunningQuery.retrieve_session_information(http_session, trans_id)
if type(session_obj) is Response:
return session_obj
transaction_object = pickle.loads(session_obj['command_obj'])
can_edit = False
can_filter = False
if transaction_object is not None and session_obj is not None:
# set fetched row count to 0 as we are executing query again.
transaction_object.update_fetched_row_cnt(0)
self.__retrieve_connection_id(transaction_object)
try:
manager = get_driver(
PG_DEFAULT_DRIVER).connection_manager(transaction_object.sid)
conn = manager.connection(did=transaction_object.did, conn_id=self.connection_id,
auto_reconnect=False,
use_binary_placeholder=True,
array_to_string=True)
except ConnectionLost:
raise
except Exception as e:
self.logger.error(e)
return internal_server_error(errormsg=str(e))
# Connect to the Server if not connected.
if not conn.connected():
status, msg = conn.connect()
if not status:
self.logger.error(msg)
return internal_server_error(errormsg=str(msg))
effective_sql_statement = apply_explain_plan_wrapper_if_needed(manager, sql)
result, status = self.__execute_query(
conn,
session_obj,
effective_sql_statement,
trans_id,
transaction_object
)
can_edit = transaction_object.can_edit()
can_filter = transaction_object.can_filter()
else:
status = False
result = gettext(
'Either transaction object or session object not found.')
return make_json_response(
data={
'status': status, 'result': result,
'can_edit': can_edit, 'can_filter': can_filter,
'info_notifier_timeout': self.blueprint_object.info_notifier_timeout.get()
}
)
def __retrieve_connection_id(self, trans_obj):
conn_id = trans_obj.conn_id
# if conn_id is None then we will have to create a new connection
if conn_id is not None:
self.connection_id = conn_id
def __execute_query(self, conn, session_obj, sql, trans_id, trans_obj):
if conn.connected():
# on successful connection set the connection id to the
# transaction object
trans_obj.set_connection_id(self.connection_id)
StartRunningQuery.save_transaction_in_session(session_obj, trans_id, trans_obj)
# If auto commit is False and transaction status is Idle
# then call is_begin_not_required() function to check BEGIN
# is required or not.
if StartRunningQuery.is_begin_required_for_sql_query(trans_obj, conn, sql):
conn.execute_void("BEGIN;")
# Execute sql asynchronously with params is None
# and formatted_error is True.
try:
status, result = conn.execute_async(sql)
except ConnectionLost:
raise
# If the transaction aborted for some reason and
# Auto RollBack is True then issue a rollback to cleanup.
if StartRunningQuery.is_rollback_statement_required(trans_obj, conn):
conn.execute_void("ROLLBACK;")
else:
status = False
result = gettext(
'Not connected to server or connection with the server has '
'been closed.')
return result, status
@staticmethod
def is_begin_required_for_sql_query(trans_obj, conn, sql):
return not trans_obj.auto_commit \
and conn.transaction_status() == TX_STATUS_IDLE \
and is_begin_required(sql)
@staticmethod
def is_rollback_statement_required(trans_obj, conn):
return conn.transaction_status() == TX_STATUS_INERROR and trans_obj.auto_rollback
@staticmethod
def save_transaction_in_session(session, transaction_id, transaction):
# As we changed the transaction object we need to
# restore it and update the session variable.
session['command_obj'] = pickle.dumps(transaction, -1)
update_session_grid_transaction(transaction_id, session)
@staticmethod
def retrieve_session_information(http_session, transaction_id):
if 'gridData' not in http_session:
return make_json_response(
success=0,
errormsg=gettext('Transaction ID not found in the session.'),
info='DATAGRID_TRANSACTION_REQUIRED', status=404
)
grid_data = http_session['gridData']
# Return from the function if transaction id not found
if str(transaction_id) not in grid_data:
return make_json_response(
success=0,
errormsg=gettext('Transaction ID not found in the session.'),
info='DATAGRID_TRANSACTION_REQUIRED',
status=404
)
# Fetch the object for the specified transaction id.
# Use pickle.loads function to get the command object
return grid_data[str(transaction_id)]

View File

@ -0,0 +1,8 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################

View File

@ -0,0 +1,121 @@
#######################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Apply Explain plan wrapper to sql object."""
import sys
from pgadmin.tools.sqleditor.utils import apply_explain_plan_wrapper_if_needed
from pgadmin.utils.route import BaseTestGenerator
if sys.version_info < (3, 3):
from mock import patch, MagicMock
else:
from unittest.mock import patch, MagicMock
class StartRunningQueryTest(BaseTestGenerator):
"""
Check that the apply_explain_plan_weapper_if_needed method works as intended
"""
scenarios = [
('When explain_plan is none, it should return unaltered SQL', dict(
function_input_parameters={
'manager': MagicMock(),
'sql': {
'sql': 'some sql',
'explain_plan': None
}
},
expect_render_template_mock_parameters=None,
expected_return_value='some sql'
)),
('When explain_plan is not present, it should return unaltered SQL', dict(
function_input_parameters={
'manager': MagicMock(),
'sql': {
'sql': 'some sql'
}
},
expect_render_template_mock_parameters=None,
expected_return_value='some sql'
)),
('When explain_plan is present for a Postgres server version 10, it should return SQL with explain plan', dict(
function_input_parameters={
'manager': MagicMock(version=10, server_type='pg'),
'sql': {
'sql': 'some sql',
'explain_plan': {
'format': 'json',
'analyze': False,
'verbose': True,
'buffers': False,
'timing': True
}
}
},
expect_render_template_mock_parameters=dict(
template_name_or_list='sqleditor/sql/#10#/explain_plan.sql',
named_parameters=dict(
format='json',
analyze=False,
verbose=True,
buffers=False,
timing=True
)),
expected_return_value='EXPLAIN (FORMAT JSON, ANALYZE FALSE, VERBOSE TRUE, COSTS FALSE, BUFFERS FALSE, '
'TIMING TRUE) some sql'
)),
('When explain_plan is present for a GreenPlum server version 5, it should return SQL with explain plan', dict(
function_input_parameters={
'manager': MagicMock(version=80323, server_type='gpdb'),
'sql': {
'sql': 'some sql',
'explain_plan': {
'format': 'json',
'analyze': False,
'verbose': True,
'buffers': False,
'timing': True
}
}
},
expect_render_template_mock_parameters=dict(
template_name_or_list='sqleditor/sql/#gpdb#80323#/explain_plan.sql',
named_parameters=dict(
format='json',
analyze=False,
verbose=True,
buffers=False,
timing=True
)),
expected_return_value='EXPLAIN some sql'
))
]
def runTest(self):
with patch('pgadmin.tools.sqleditor.utils.apply_explain_plan_wrapper.render_template') as render_template_mock:
render_template_mock.return_value = self.expected_return_value
result = apply_explain_plan_wrapper_if_needed(**self.function_input_parameters)
self.assertEquals(result, self.expected_return_value)
if self.expect_render_template_mock_parameters:
render_template_mock.assert_called_with(
self.expect_render_template_mock_parameters['template_name_or_list'],
sql=self.function_input_parameters['sql']['sql'],
**self.expect_render_template_mock_parameters['named_parameters']
)
else:
render_template_mock.assert_not_called()

View File

@ -0,0 +1,445 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import sys
from flask import Response
import simplejson as json
from pgadmin.tools.sqleditor.utils.start_running_query import StartRunningQuery
from pgadmin.utils.exception import ConnectionLost
from pgadmin.utils.route import BaseTestGenerator
if sys.version_info < (3, 3):
from mock import patch, MagicMock
else:
from unittest.mock import patch, MagicMock
get_driver_exception = Exception('get_driver exception')
class StartRunningQueryTest(BaseTestGenerator):
"""
Check that the start_running_query method works as intended
"""
scenarios = [
('When gridData is not present in the session, it returns an error', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict()
),
pickle_load_return=None,
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=False,
connection_connect_return=None,
execute_async_return_value=None,
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=dict(
success=0,
errormsg='Transaction ID not found in the session.',
info='DATAGRID_TRANSACTION_REQUIRED',
status=404,
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When transactionId is not present in the gridData, it returns an error', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData=dict())
),
pickle_load_return=None,
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=False,
connection_connect_return=None,
execute_async_return_value=None,
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=dict(
success=0,
errormsg='Transaction ID not found in the session.',
info='DATAGRID_TRANSACTION_REQUIRED',
status=404,
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When the command information for the transaction cannot be retrieved, it returns an error', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=None,
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=False,
connection_connect_return=None,
execute_async_return_value=None,
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=dict(
data=dict(
status=False,
result='Either transaction object or session object not found.',
can_edit=False,
can_filter=False,
info_notifier_timeout=5
)
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When exception happens while retrieving the database driver, it returns an error', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(conn_id=1, update_fetched_row_cnt=MagicMock()),
get_driver_exception=True,
manager_connection_exception=None,
is_connected_to_server=False,
connection_connect_return=None,
execute_async_return_value=None,
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=None,
expect_internal_server_error_to_have_been_called_with=dict(
errormsg='get_driver exception'
),
expected_logger_error=get_driver_exception,
expect_execute_void_called_with='some sql',
)),
('When ConnectionLost happens while retrieving the database connection, it returns an error', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(conn_id=1, update_fetched_row_cnt=MagicMock()),
get_driver_exception=False,
manager_connection_exception=ConnectionLost('1', '2', '3'),
is_connected_to_server=False,
connection_connect_return=None,
execute_async_return_value=None,
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=None,
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When is not connected to the server and fails to connect, it returns an error', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(conn_id=1, update_fetched_row_cnt=MagicMock()),
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=False,
connection_connect_return=[False, 'Unable to connect to server'],
execute_async_return_value=None,
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=None,
expect_internal_server_error_to_have_been_called_with=dict(
errormsg='Unable to connect to server'
),
expected_logger_error='Unable to connect to server',
expect_execute_void_called_with='some sql',
)),
('When server is connected and start query async request, it returns an success message', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(
conn_id=1,
update_fetched_row_cnt=MagicMock(),
set_connection_id=MagicMock(),
auto_commit=True,
auto_rollback=False,
can_edit=lambda: True,
can_filter=lambda: True
),
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=True,
connection_connect_return=None,
execute_async_return_value=[True, 'async function result output'],
is_begin_required=False,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=dict(
data=dict(
status=True,
result='async function result output',
can_edit=True,
can_filter=True,
info_notifier_timeout=5
)
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When server is connected and start query async request and begin is required, '
'it returns an success message', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(
conn_id=1,
update_fetched_row_cnt=MagicMock(),
set_connection_id=MagicMock(),
auto_commit=True,
auto_rollback=False,
can_edit=lambda: True,
can_filter=lambda: True
),
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=True,
connection_connect_return=None,
execute_async_return_value=[True, 'async function result output'],
is_begin_required=True,
is_rollback_required=False,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=dict(
data=dict(
status=True,
result='async function result output',
can_edit=True,
can_filter=True,
info_notifier_timeout=5
)
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When server is connected and start query async request and rollback is required, '
'it returns an success message', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(
conn_id=1,
update_fetched_row_cnt=MagicMock(),
set_connection_id=MagicMock(),
auto_commit=True,
auto_rollback=False,
can_edit=lambda: True,
can_filter=lambda: True
),
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=True,
connection_connect_return=None,
execute_async_return_value=[True, 'async function result output'],
is_begin_required=False,
is_rollback_required=True,
apply_explain_plan_wrapper_if_needed_return_value='some sql',
expect_make_json_response_to_have_been_called_with=dict(
data=dict(
status=True,
result='async function result output',
can_edit=True,
can_filter=True,
info_notifier_timeout=5
)
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='some sql',
)),
('When server is connected and start query async request with explain plan wrapper, '
'it returns an success message', dict(
function_parameters=dict(
sql=dict(sql='some sql', explain_plan=None),
trans_id=123,
http_session=dict(gridData={'123': dict(command_obj='')})
),
pickle_load_return=MagicMock(
conn_id=1,
update_fetched_row_cnt=MagicMock(),
set_connection_id=MagicMock(),
auto_commit=True,
auto_rollback=False,
can_edit=lambda: True,
can_filter=lambda: True
),
get_driver_exception=False,
manager_connection_exception=None,
is_connected_to_server=True,
connection_connect_return=None,
execute_async_return_value=[True, 'async function result output'],
is_begin_required=False,
is_rollback_required=True,
apply_explain_plan_wrapper_if_needed_return_value='EXPLAIN PLAN some sql',
expect_make_json_response_to_have_been_called_with=dict(
data=dict(
status=True,
result='async function result output',
can_edit=True,
can_filter=True,
info_notifier_timeout=5
)
),
expect_internal_server_error_to_have_been_called_with=None,
expected_logger_error=None,
expect_execute_void_called_with='EXPLAIN PLAN some sql',
)),
]
@patch('pgadmin.tools.sqleditor.utils.start_running_query.apply_explain_plan_wrapper_if_needed')
@patch('pgadmin.tools.sqleditor.utils.start_running_query.make_json_response')
@patch('pgadmin.tools.sqleditor.utils.start_running_query.pickle')
@patch('pgadmin.tools.sqleditor.utils.start_running_query.get_driver')
@patch('pgadmin.tools.sqleditor.utils.start_running_query.internal_server_error')
@patch('pgadmin.tools.sqleditor.utils.start_running_query.update_session_grid_transaction')
def runTest(self, update_session_grid_transaction_mock,
internal_server_error_mock, get_driver_mock, pickle_mock,
make_json_response_mock, apply_explain_plan_wrapper_if_needed_mock):
"""Check correct function is called to handle to run query."""
self.connection = None
self.loggerMock = MagicMock(error=MagicMock())
expected_response = Response(response=json.dumps({'errormsg': 'some value'}))
make_json_response_mock.return_value = expected_response
if self.expect_internal_server_error_to_have_been_called_with is not None:
internal_server_error_mock.return_value = expected_response
pickle_mock.loads.return_value = self.pickle_load_return
blueprint_mock = MagicMock(info_notifier_timeout=MagicMock(get=lambda: 5))
if self.is_begin_required:
StartRunningQuery.is_begin_required_for_sql_query = MagicMock(return_value=True)
else:
StartRunningQuery.is_begin_required_for_sql_query = MagicMock(return_value=False)
if self.is_rollback_required:
StartRunningQuery.is_rollback_statement_required = MagicMock(return_value=True)
else:
StartRunningQuery.is_rollback_statement_required = MagicMock(return_value=False)
apply_explain_plan_wrapper_if_needed_mock.return_value = self.apply_explain_plan_wrapper_if_needed_return_value
manager = self.__create_manager()
if self.get_driver_exception:
get_driver_mock.side_effect = get_driver_exception
else:
get_driver_mock.return_value = MagicMock(connection_manager=lambda session_id: manager)
try:
result = StartRunningQuery(
blueprint_mock,
self.loggerMock
).execute(
**self.function_parameters
)
if self.manager_connection_exception is not None:
self.fail('Exception: "' + str(self.manager_connection_exception) + '" excepted but not raised')
self.assertEquals(result, expected_response)
except AssertionError:
raise
except Exception as exception:
self.assertEquals(self.manager_connection_exception, exception)
self.__mock_assertions(internal_server_error_mock, make_json_response_mock)
if self.is_connected_to_server:
apply_explain_plan_wrapper_if_needed_mock.assert_called_with(manager, self.function_parameters['sql'])
def __create_manager(self):
self.connection = MagicMock(
connected=lambda: self.is_connected_to_server,
connect=MagicMock(),
execute_async=MagicMock(),
execute_void=MagicMock(),
)
self.connection.connect.return_value = self.connection_connect_return
self.connection.execute_async.return_value = self.execute_async_return_value
if self.manager_connection_exception is None:
manager = MagicMock(
connection=lambda did, conn_id, use_binary_placeholder, array_to_string, auto_reconnect: self.connection
)
else:
manager = MagicMock()
manager.connection.side_effect = self.manager_connection_exception
return manager
def __mock_assertions(self, internal_server_error_mock, make_json_response_mock):
if self.expect_make_json_response_to_have_been_called_with is not None:
make_json_response_mock.assert_called_with(**self.expect_make_json_response_to_have_been_called_with)
else:
make_json_response_mock.assert_not_called()
if self.expect_internal_server_error_to_have_been_called_with is not None:
internal_server_error_mock.assert_called_with(**self.expect_internal_server_error_to_have_been_called_with)
else:
internal_server_error_mock.assert_not_called()
if self.execute_async_return_value is not None:
self.connection.execute_async.assert_called_with(self.expect_execute_void_called_with)
else:
self.connection.execute_async.assert_not_called()
if self.expected_logger_error is not None:
self.loggerMock.error.assert_called_with(self.expected_logger_error)
else:
self.loggerMock.error.assert_not_called()
if self.is_begin_required:
self.connection.execute_void.assert_called_with('BEGIN;')
elif not self.is_rollback_required:
self.connection.execute_void.assert_not_called()
if self.is_rollback_required:
self.connection.execute_void.assert_called_with('ROLLBACK;')
elif not self.is_begin_required:
self.connection.execute_void.assert_not_called()

View File

@ -0,0 +1,18 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
"""Update session with gridData."""
from flask import session
def update_session_grid_transaction(trans_id, data):
if 'gridData' in session:
grid_data = session['gridData']
grid_data[str(trans_id)] = data
session['gridData'] = grid_data

View File

@ -0,0 +1,17 @@
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import os
def compile_template_name(template_prefix, template_file_name, server_type, version):
if server_type == 'gpdb':
version_path = '#{0}#{1}#'.format(server_type, version)
else:
version_path = '#{0}#'.format(version)
return os.path.join(template_prefix, version_path, template_file_name)

View File

@ -0,0 +1,34 @@
#######################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2018, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
from pgadmin.utils.compile_template_name import compile_template_name
from pgadmin.utils.route import BaseTestGenerator
class StartRunningQueryTest(BaseTestGenerator):
"""
Check that the apply_explain_plan_weapper_if_needed method works as intended
"""
scenarios = [
('When server is Postgres and version is 10, it returns the path to the postgres template', dict(
server_type='pg',
version=100000,
expected_return_value='some/prefix/#100000#/some_file.sql'
)),
('When server is GreenPlum and version is 5, it returns the path to the GreenPlum template', dict(
server_type='gpdb',
version=80323,
expected_return_value='some/prefix/#gpdb#80323#/some_file.sql'
)),
]
def runTest(self):
result = compile_template_name('some/prefix', 'some_file.sql', self.server_type, self.version)
self.assertEquals(result, self.expected_return_value)

View File

@ -6,7 +6,6 @@
# This software is released under the PostgreSQL Licence # This software is released under the PostgreSQL Licence
# #
########################################################################## ##########################################################################
from flask.templating import DispatchingJinjaLoader from flask.templating import DispatchingJinjaLoader
from jinja2 import TemplateNotFound from jinja2 import TemplateNotFound
@ -54,6 +53,7 @@ class VersionedTemplateLoader(DispatchingJinjaLoader):
template_path = path_start + '/' + \ template_path = path_start + '/' + \
server_version['name'] + '/' + file_name server_version['name'] + '/' + file_name
try: try:
return super(VersionedTemplateLoader, self).get_source( return super(VersionedTemplateLoader, self).get_source(
environment, template_path environment, template_path

View File

@ -8,5 +8,9 @@
////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////
define(function () { define(function () {
return {'static': '/base/pgadmin/static/<path:filename>'}; return {
'static': '/base/pgadmin/static/<path:filename>',
'sqleditor.poll': '/sqleditor/query_tool/poll/<path:trans_id>',
'sqleditor.query_tool_start': '/sqleditor/query_tool/start/<path:trans_id>'
};
}); });

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,65 @@
//////////////////////////////////////////////////////////////////////////
//
// pgAdmin 4 - PostgreSQL Tools
//
// Copyright (C) 2013 - 2018, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////////////////
import {is_new_transaction_required} from '../../../pgadmin/static/js/sqleditor/is_new_transaction_required';
describe('#is_new_transaction_required', () => {
describe('when status is not 404', () => {
it('should return false', () => {
expect(is_new_transaction_required({
status: 300,
})).toBe(false);
});
});
describe('when status is 404', () => {
describe('when responseJSON is not present', () => {
it('should return false', () => {
expect(is_new_transaction_required({
status: 404,
})).toBeFalsy();
});
});
describe('when responseJSON is present', () => {
describe('when info is not present inside responseJSON', () => {
it('should return false', () => {
expect(is_new_transaction_required({
status: 404,
responseJSON: {},
})).toBeFalsy();
});
});
describe('when info is present inside responseJSON', () => {
describe('when info value is not "DATAGRID_TRANSACTION_REQUIRED"', () => {
it('should return false', () => {
expect(is_new_transaction_required({
status: 404,
responseJSON: {
info: 'some information',
},
})).toBe(false);
});
});
describe('when info value is "DATAGRID_TRANSACTION_REQUIRED"', () => {
it('should return false', () => {
expect(is_new_transaction_required({
status: 404,
responseJSON: {
info: 'DATAGRID_TRANSACTION_REQUIRED',
},
})).toBe(true);
});
});
});
});
});
});

View File

@ -43,7 +43,6 @@ describe('queryToolActions', () => {
expect(sqlEditorController.execute_data_query).toHaveBeenCalled(); expect(sqlEditorController.execute_data_query).toHaveBeenCalled();
}); });
}); });
}); });
@ -51,60 +50,100 @@ describe('queryToolActions', () => {
describe('when verbose and costs are not selected and buffers and timing are not selected', () => { describe('when verbose and costs are not selected and buffers and timing are not selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('OFF'); spyOn(queryToolActions, '_verbose').and.returnValue(false);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('OFF'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(false);
spyOn(queryToolActions, '_buffers').and.returnValue('OFF'); spyOn(queryToolActions, '_buffers').and.returnValue(false);
spyOn(queryToolActions, '_timing').and.returnValue('OFF'); spyOn(queryToolActions, '_timing').and.returnValue(false);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explainAnalyze(sqlEditorController); queryToolActions.explainAnalyze(sqlEditorController);
let explainAnalyzeQuery = 'EXPLAIN (FORMAT JSON, ANALYZE ON, VERBOSE OFF, COSTS OFF, BUFFERS OFF, TIMING OFF) ';
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainAnalyzeQuery); // let explainAnalyzeQuery = 'EXPLAIN (FORMAT JSON, ANALYZE ON, VERBOSE OFF, COSTS OFF, BUFFERS OFF, TIMING OFF) ';
const explainObject = {
format: 'json',
analyze: true,
verbose: false,
costs: false,
buffers: false,
timing: false,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
describe('when verbose and costs and buffers and timing are all selected', () => { describe('when verbose and costs and buffers and timing are all selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('ON'); spyOn(queryToolActions, '_verbose').and.returnValue(true);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('ON'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(true);
spyOn(queryToolActions, '_buffers').and.returnValue('ON'); spyOn(queryToolActions, '_buffers').and.returnValue(true);
spyOn(queryToolActions, '_timing').and.returnValue('ON'); spyOn(queryToolActions, '_timing').and.returnValue(true);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explainAnalyze(sqlEditorController); queryToolActions.explainAnalyze(sqlEditorController);
let explainAnalyzeQuery = 'EXPLAIN (FORMAT JSON, ANALYZE ON, VERBOSE ON, COSTS ON, BUFFERS ON, TIMING ON) '; const explainObject = {
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainAnalyzeQuery); format: 'json',
analyze: true,
verbose: true,
costs: true,
buffers: true,
timing: true,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
describe('when verbose is selected and costs is not selected and buffer is selected and timing is not selected', () => { describe('when verbose is selected and costs is not selected and buffer is selected and timing is not selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('ON'); spyOn(queryToolActions, '_verbose').and.returnValue(true);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('OFF'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(false);
spyOn(queryToolActions, '_buffers').and.returnValue('ON'); spyOn(queryToolActions, '_buffers').and.returnValue(true);
spyOn(queryToolActions, '_timing').and.returnValue('OFF'); spyOn(queryToolActions, '_timing').and.returnValue(false);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explainAnalyze(sqlEditorController); queryToolActions.explainAnalyze(sqlEditorController);
let explainAnalyzeQuery = 'EXPLAIN (FORMAT JSON, ANALYZE ON, VERBOSE ON, COSTS OFF, BUFFERS ON, TIMING OFF) ';
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainAnalyzeQuery); const explainObject = {
format: 'json',
analyze: true,
verbose: true,
costs: false,
buffers: true,
timing: false,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
describe('when verbose is not selected and costs is selected and buffer is not selected and timing is selected', () => { describe('when verbose is not selected and costs is selected and buffer is not selected and timing is selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('OFF'); spyOn(queryToolActions, '_verbose').and.returnValue(false);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('ON'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(true);
spyOn(queryToolActions, '_buffers').and.returnValue('OFF'); spyOn(queryToolActions, '_buffers').and.returnValue(false);
spyOn(queryToolActions, '_timing').and.returnValue('ON'); spyOn(queryToolActions, '_timing').and.returnValue(true);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explainAnalyze(sqlEditorController); queryToolActions.explainAnalyze(sqlEditorController);
let explainAnalyzeQuery = 'EXPLAIN (FORMAT JSON, ANALYZE ON, VERBOSE OFF, COSTS ON, BUFFERS OFF, TIMING ON) ';
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainAnalyzeQuery); const explainObject = {
format: 'json',
analyze: true,
verbose: false,
costs: true,
buffers: false,
timing: true,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
}); });
@ -113,39 +152,67 @@ describe('queryToolActions', () => {
describe('when verbose and costs are selected', () => { describe('when verbose and costs are selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('ON'); spyOn(queryToolActions, '_verbose').and.returnValue(true);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('ON'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(true);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explain(sqlEditorController); queryToolActions.explain(sqlEditorController);
let explainQuery = 'EXPLAIN (FORMAT JSON, ANALYZE OFF, VERBOSE ON, COSTS ON, BUFFERS OFF, TIMING OFF) '; const explainObject = {
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainQuery); format: 'json',
analyze: false,
verbose: true,
costs: true,
buffers: false,
timing: false,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
describe('when verbose and costs are not selected', () => { describe('when verbose and costs are not selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('OFF'); spyOn(queryToolActions, '_verbose').and.returnValue(false);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('OFF'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(false);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explain(sqlEditorController); queryToolActions.explain(sqlEditorController);
let explainQuery = 'EXPLAIN (FORMAT JSON, ANALYZE OFF, VERBOSE OFF, COSTS OFF, BUFFERS OFF, TIMING OFF) '; const explainObject = {
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainQuery); format: 'json',
analyze: false,
verbose: false,
costs: false,
buffers: false,
timing: false,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
describe('when verbose is selected and costs is not selected', () => { describe('when verbose is selected and costs is not selected', () => {
beforeEach(() => { beforeEach(() => {
setUpSpies('', ''); setUpSpies('', '');
spyOn(queryToolActions, '_verbose').and.returnValue('ON'); spyOn(queryToolActions, '_verbose').and.returnValue(true);
spyOn(queryToolActions, '_costsEnabled').and.returnValue('OFF'); spyOn(queryToolActions, '_costsEnabled').and.returnValue(false);
}); });
it('calls the execute function', () => { it('calls the execute function', () => {
queryToolActions.explain(sqlEditorController); queryToolActions.explain(sqlEditorController);
let explainQuery = 'EXPLAIN (FORMAT JSON, ANALYZE OFF, VERBOSE ON, COSTS OFF, BUFFERS OFF, TIMING OFF) '; const explainObject = {
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainQuery); format: 'json',
analyze: false,
verbose: true,
costs: false,
buffers: false,
timing: false,
summary: false,
};
expect(sqlEditorController.execute).toHaveBeenCalledWith(explainObject);
}); });
}); });
}); });