Some clumsy coding related fixes reported by SonarQube.
parent
a23fad0ba8
commit
641f7bbe9d
|
@ -172,12 +172,7 @@ define('pgadmin.node.language', [
|
|||
return res;
|
||||
}, disabled: function(m) {
|
||||
if (m.isNew()) {
|
||||
if (m.get('template_list').indexOf(m.get('name')) == -1) {
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
return m.get('template_list').indexOf(m.get('name')) != -1;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
@ -202,12 +197,7 @@ define('pgadmin.node.language', [
|
|||
return res;
|
||||
}, disabled: function(m) {
|
||||
if (m.isNew()) {
|
||||
if (m.get('template_list').indexOf(m.get('name')) == -1) {
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
return m.get('template_list').indexOf(m.get('name')) != -1;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
@ -232,12 +222,7 @@ define('pgadmin.node.language', [
|
|||
return res;
|
||||
}, disabled: function(m) {
|
||||
if (m.isNew()) {
|
||||
if (m.get('template_list').indexOf(m.get('name')) == -1) {
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
return m.get('template_list').indexOf(m.get('name')) != -1;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
|
|
|
@ -79,9 +79,9 @@ def is_version_in_range(sversion, min_ver, max_ver):
|
|||
if min_ver is None and max_ver is None:
|
||||
return True
|
||||
|
||||
if min_ver is None or min_ver <= sversion:
|
||||
if max_ver is None or max_ver >= sversion:
|
||||
return True
|
||||
if (min_ver is None or min_ver <= sversion) and \
|
||||
(max_ver is None or max_ver >= sversion):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
@ -212,9 +212,8 @@ class PGUtilitiesBackupFeatureTest(BaseFeatureTest):
|
|||
|
||||
test_gui_helper.close_process_watcher(self)
|
||||
|
||||
if backup_file is not None:
|
||||
if os.path.isfile(backup_file):
|
||||
os.remove(backup_file)
|
||||
if backup_file is not None and os.path.isfile(backup_file):
|
||||
os.remove(backup_file)
|
||||
|
||||
def after(self):
|
||||
test_gui_helper.close_process_watcher(self)
|
||||
|
|
|
@ -648,11 +648,11 @@ class Filemanager(object):
|
|||
user_path = u"{0}/".format(user_path)
|
||||
else:
|
||||
# filter files based on file_type
|
||||
if file_type is not None and file_type != "*":
|
||||
if folders_only or len(supported_types) > 0 and \
|
||||
file_extension not in supported_types or \
|
||||
file_type != file_extension:
|
||||
continue
|
||||
if file_type is not None and file_type != "*" and \
|
||||
(folders_only or len(supported_types) > 0 and
|
||||
file_extension not in supported_types or
|
||||
file_type != file_extension):
|
||||
continue
|
||||
|
||||
# create a list of files and folders
|
||||
files[f] = {
|
||||
|
|
|
@ -570,21 +570,21 @@ def direct_new(trans_id):
|
|||
user_agent = UserAgent(request.headers.get('User-Agent'))
|
||||
|
||||
function_arguments = '('
|
||||
if de_inst.function_data is not None:
|
||||
if 'args_name' in de_inst.function_data and \
|
||||
de_inst.function_data['args_name'] is not None and \
|
||||
de_inst.function_data['args_name'] != '':
|
||||
args_name_list = de_inst.function_data['args_name'].split(",")
|
||||
args_type_list = de_inst.function_data['args_type'].split(",")
|
||||
index = 0
|
||||
for args_name in args_name_list:
|
||||
function_arguments = '{}{} {}, '.format(function_arguments,
|
||||
args_name,
|
||||
args_type_list[index])
|
||||
index += 1
|
||||
# Remove extra comma and space from the arguments list
|
||||
if len(args_name_list) > 0:
|
||||
function_arguments = function_arguments[:-2]
|
||||
if de_inst.function_data is not None and \
|
||||
'args_name' in de_inst.function_data and \
|
||||
de_inst.function_data['args_name'] is not None and \
|
||||
de_inst.function_data['args_name'] != '':
|
||||
args_name_list = de_inst.function_data['args_name'].split(",")
|
||||
args_type_list = de_inst.function_data['args_type'].split(",")
|
||||
index = 0
|
||||
for args_name in args_name_list:
|
||||
function_arguments = '{}{} {}, '.format(function_arguments,
|
||||
args_name,
|
||||
args_type_list[index])
|
||||
index += 1
|
||||
# Remove extra comma and space from the arguments list
|
||||
if len(args_name_list) > 0:
|
||||
function_arguments = function_arguments[:-2]
|
||||
|
||||
function_arguments += ')'
|
||||
|
||||
|
|
|
@ -58,11 +58,11 @@ class DebuggerInstance(object):
|
|||
return []
|
||||
|
||||
def load_from_session(self):
|
||||
if '__debugger_sessions' in session:
|
||||
if str(self.trans_id) in session['__debugger_sessions']:
|
||||
trans_data = session['__debugger_sessions'][str(self.trans_id)]
|
||||
self.function_data = trans_data.get('function_data', None)
|
||||
self.debugger_data = trans_data.get('debugger_data', None)
|
||||
if '__debugger_sessions' in session and \
|
||||
str(self.trans_id) in session['__debugger_sessions']:
|
||||
trans_data = session['__debugger_sessions'][str(self.trans_id)]
|
||||
self.function_data = trans_data.get('function_data', None)
|
||||
self.debugger_data = trans_data.get('debugger_data', None)
|
||||
|
||||
def update_session(self):
|
||||
with debugger_sessions_lock:
|
||||
|
@ -76,6 +76,6 @@ class DebuggerInstance(object):
|
|||
|
||||
def clear(self):
|
||||
with debugger_sessions_lock:
|
||||
if '__debugger_sessions' in session:
|
||||
if str(self.trans_id) in session['__debugger_sessions']:
|
||||
session['__debugger_sessions'].pop(str(self.trans_id))
|
||||
if '__debugger_sessions' in session and \
|
||||
str(self.trans_id) in session['__debugger_sessions']:
|
||||
session['__debugger_sessions'].pop(str(self.trans_id))
|
||||
|
|
|
@ -259,32 +259,32 @@ def create_restore_job(sid):
|
|||
args.append(default_value)
|
||||
|
||||
def set_multiple(key, param, with_schema=True):
|
||||
if key in data:
|
||||
if len(data[key]) > 0:
|
||||
if with_schema:
|
||||
# TODO:// This is temporary
|
||||
# Once object tree is implemented then we will use
|
||||
# list of tuples 'else' part
|
||||
if isinstance(data[key], list):
|
||||
s, t = data[key]
|
||||
if key in data and \
|
||||
len(data[key]) > 0:
|
||||
if with_schema:
|
||||
# TODO:// This is temporary
|
||||
# Once object tree is implemented then we will use
|
||||
# list of tuples 'else' part
|
||||
if isinstance(data[key], list):
|
||||
s, t = data[key]
|
||||
args.extend([
|
||||
param,
|
||||
driver.qtIdent(
|
||||
conn, s
|
||||
) + '.' + driver.qtIdent(conn, t)
|
||||
])
|
||||
else:
|
||||
for s, o in data[key]:
|
||||
args.extend([
|
||||
param,
|
||||
driver.qtIdent(
|
||||
conn, s
|
||||
) + '.' + driver.qtIdent(conn, t)
|
||||
) + '.' + driver.qtIdent(conn, o)
|
||||
])
|
||||
else:
|
||||
for s, o in data[key]:
|
||||
args.extend([
|
||||
param,
|
||||
driver.qtIdent(
|
||||
conn, s
|
||||
) + '.' + driver.qtIdent(conn, o)
|
||||
])
|
||||
else:
|
||||
for o in data[key]:
|
||||
args.extend([param, o])
|
||||
return True
|
||||
else:
|
||||
for o in data[key]:
|
||||
args.extend([param, o])
|
||||
return True
|
||||
return False
|
||||
|
||||
args.extend([
|
||||
|
|
|
@ -401,17 +401,17 @@ def poll(trans_id):
|
|||
|
||||
# If trans_obj is a QueryToolCommand then check for updatable
|
||||
# resultsets and primary keys
|
||||
if isinstance(trans_obj, QueryToolCommand):
|
||||
if trans_obj.check_updatable_results_pkeys_oids():
|
||||
pk_names, primary_keys = trans_obj.get_primary_keys()
|
||||
session_obj['has_oids'] = trans_obj.has_oids()
|
||||
# Update command_obj in session obj
|
||||
session_obj['command_obj'] = pickle.dumps(
|
||||
trans_obj, -1)
|
||||
# If primary_keys exist, add them to the session_obj to
|
||||
# allow for saving any changes to the data
|
||||
if primary_keys is not None:
|
||||
session_obj['primary_keys'] = primary_keys
|
||||
if isinstance(trans_obj, QueryToolCommand) and \
|
||||
trans_obj.check_updatable_results_pkeys_oids():
|
||||
pk_names, primary_keys = trans_obj.get_primary_keys()
|
||||
session_obj['has_oids'] = trans_obj.has_oids()
|
||||
# Update command_obj in session obj
|
||||
session_obj['command_obj'] = pickle.dumps(
|
||||
trans_obj, -1)
|
||||
# If primary_keys exist, add them to the session_obj to
|
||||
# allow for saving any changes to the data
|
||||
if primary_keys is not None:
|
||||
session_obj['primary_keys'] = primary_keys
|
||||
|
||||
if 'has_oids' in session_obj:
|
||||
has_oids = session_obj['has_oids']
|
||||
|
|
|
@ -252,9 +252,9 @@ class SQLFilter(object):
|
|||
if self._row_filter is None or self._row_filter == '':
|
||||
is_filter_applied = False
|
||||
|
||||
if not is_filter_applied:
|
||||
if self._data_sorting and len(self._data_sorting) > 0:
|
||||
is_filter_applied = True
|
||||
if not is_filter_applied and \
|
||||
self._data_sorting and len(self._data_sorting) > 0:
|
||||
is_filter_applied = True
|
||||
|
||||
return is_filter_applied
|
||||
|
||||
|
|
|
@ -323,10 +323,9 @@ class Driver(BaseDriver):
|
|||
return False
|
||||
|
||||
# If already quoted?, If yes then do not quote again
|
||||
if forTypes and valNoArray:
|
||||
if valNoArray.startswith('"') \
|
||||
or valNoArray.endswith('"'):
|
||||
return False
|
||||
if forTypes and valNoArray and \
|
||||
(valNoArray.startswith('"') or valNoArray.endswith('"')):
|
||||
return False
|
||||
|
||||
if u'0' <= valNoArray[0] <= u'9':
|
||||
return True
|
||||
|
|
|
@ -539,9 +539,9 @@ WHERE
|
|||
self.conn_id.encode('utf-8')
|
||||
), None)
|
||||
|
||||
if self.connected() and cur and not cur.closed:
|
||||
if not server_cursor or (server_cursor and cur.name):
|
||||
return True, cur
|
||||
if self.connected() and cur and not cur.closed and \
|
||||
(not server_cursor or (server_cursor and cur.name)):
|
||||
return True, cur
|
||||
|
||||
if not self.connected():
|
||||
errmsg = ""
|
||||
|
@ -618,21 +618,21 @@ WHERE
|
|||
# We need to esacpe the data so that it does not fail when
|
||||
# it is encoded with python ascii
|
||||
# unicode_escape helps in escaping and unescaping
|
||||
if self.conn:
|
||||
if self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
|
||||
'MULE_INTERNAL', 'MULEINTERNAL')\
|
||||
and params is not None and type(params) == dict:
|
||||
for key, val in params.items():
|
||||
modified_val = val
|
||||
# "unicode_escape" will convert single backslash to double
|
||||
# backslash, so we will have to replace/revert them again
|
||||
# to store the correct value into the database.
|
||||
if isinstance(val, six.string_types):
|
||||
modified_val = val.encode('unicode_escape')\
|
||||
.decode('raw_unicode_escape')\
|
||||
.replace("\\\\", "\\")
|
||||
if self.conn and \
|
||||
self.conn.encoding in ('SQL_ASCII', 'SQLASCII',
|
||||
'MULE_INTERNAL', 'MULEINTERNAL')\
|
||||
and params is not None and type(params) == dict:
|
||||
for key, val in params.items():
|
||||
modified_val = val
|
||||
# "unicode_escape" will convert single backslash to double
|
||||
# backslash, so we will have to replace/revert them again
|
||||
# to store the correct value into the database.
|
||||
if isinstance(val, six.string_types):
|
||||
modified_val = val.encode('unicode_escape')\
|
||||
.decode('raw_unicode_escape')\
|
||||
.replace("\\\\", "\\")
|
||||
|
||||
params[key] = modified_val
|
||||
params[key] = modified_val
|
||||
|
||||
return params
|
||||
|
||||
|
@ -1084,13 +1084,12 @@ WHERE
|
|||
self.__internal_blocking_execute(cur, query, params)
|
||||
except psycopg2.Error as pe:
|
||||
cur.close()
|
||||
if not self.connected():
|
||||
if self.auto_reconnect and \
|
||||
not self.reconnecting:
|
||||
return self.__attempt_execution_reconnect(
|
||||
self.execute_2darray, query, params,
|
||||
formatted_exception_msg
|
||||
)
|
||||
if not self.connected() and self.auto_reconnect and \
|
||||
not self.reconnecting:
|
||||
return self.__attempt_execution_reconnect(
|
||||
self.execute_2darray, query, params,
|
||||
formatted_exception_msg
|
||||
)
|
||||
errmsg = self._formatted_exception_msg(pe, formatted_exception_msg)
|
||||
current_app.logger.error(
|
||||
u"Failed to execute query (execute_2darray) for the server "
|
||||
|
@ -1233,9 +1232,8 @@ WHERE
|
|||
return False
|
||||
|
||||
def reset(self):
|
||||
if self.conn:
|
||||
if self.conn.closed:
|
||||
self.conn = None
|
||||
if self.conn and self.conn.closed:
|
||||
self.conn = None
|
||||
pg_conn = None
|
||||
manager = self.manager
|
||||
|
||||
|
@ -1463,23 +1461,22 @@ Failed to reset the connection to the server due to following error:
|
|||
pos += 1
|
||||
|
||||
self.row_count = cur.rowcount
|
||||
if not no_result:
|
||||
if cur.rowcount > 0:
|
||||
result = []
|
||||
# For DDL operation, we may not have result.
|
||||
#
|
||||
# Because - there is not direct way to differentiate DML
|
||||
# and DDL operations, we need to rely on exception to
|
||||
# figure that out at the moment.
|
||||
try:
|
||||
for row in cur:
|
||||
new_row = []
|
||||
for col in self.column_info:
|
||||
new_row.append(row[col['name']])
|
||||
result.append(new_row)
|
||||
if not no_result and cur.rowcount > 0:
|
||||
result = []
|
||||
# For DDL operation, we may not have result.
|
||||
#
|
||||
# Because - there is not direct way to differentiate DML
|
||||
# and DDL operations, we need to rely on exception to
|
||||
# figure that out at the moment.
|
||||
try:
|
||||
for row in cur:
|
||||
new_row = []
|
||||
for col in self.column_info:
|
||||
new_row.append(row[col['name']])
|
||||
result.append(new_row)
|
||||
|
||||
except psycopg2.ProgrammingError:
|
||||
result = None
|
||||
except psycopg2.ProgrammingError:
|
||||
result = None
|
||||
|
||||
return status, result
|
||||
|
||||
|
@ -1726,37 +1723,37 @@ Failed to reset the connection to the server due to following error:
|
|||
errmsg += gettext('SQL state: ')
|
||||
errmsg += self.decode_to_utf8(exception_obj.diag.sqlstate)
|
||||
|
||||
if exception_obj.diag.message_detail is not None:
|
||||
if 'Detail:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Detail: ')
|
||||
errmsg += self.decode_to_utf8(
|
||||
exception_obj.diag.message_detail
|
||||
)
|
||||
if exception_obj.diag.message_detail is not None and \
|
||||
'Detail:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Detail: ')
|
||||
errmsg += self.decode_to_utf8(
|
||||
exception_obj.diag.message_detail
|
||||
)
|
||||
|
||||
if exception_obj.diag.message_hint is not None:
|
||||
if 'Hint:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Hint: ')
|
||||
errmsg += self.decode_to_utf8(exception_obj.diag.message_hint)
|
||||
if exception_obj.diag.message_hint is not None and \
|
||||
'Hint:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Hint: ')
|
||||
errmsg += self.decode_to_utf8(exception_obj.diag.message_hint)
|
||||
|
||||
if exception_obj.diag.statement_position is not None:
|
||||
if 'Character:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Character: ')
|
||||
errmsg += self.decode_to_utf8(
|
||||
exception_obj.diag.statement_position
|
||||
)
|
||||
if exception_obj.diag.statement_position is not None and \
|
||||
'Character:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Character: ')
|
||||
errmsg += self.decode_to_utf8(
|
||||
exception_obj.diag.statement_position
|
||||
)
|
||||
|
||||
if exception_obj.diag.context is not None:
|
||||
if 'Context:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Context: ')
|
||||
errmsg += self.decode_to_utf8(exception_obj.diag.context)
|
||||
if exception_obj.diag.context is not None and \
|
||||
'Context:'.lower() not in errmsg.lower():
|
||||
if not errmsg.endswith('\n'):
|
||||
errmsg += '\n'
|
||||
errmsg += gettext('Context: ')
|
||||
errmsg += self.decode_to_utf8(exception_obj.diag.context)
|
||||
|
||||
notices = self.get_notices()
|
||||
return errmsg if notices == '' else notices + '\n' + errmsg
|
||||
|
|
|
@ -178,9 +178,8 @@ class ServerManager(object):
|
|||
if hasattr(str, 'decode') and \
|
||||
not isinstance(database, unicode):
|
||||
database = database.decode('utf-8')
|
||||
if did is not None:
|
||||
if did in self.db_info:
|
||||
self.db_info[did]['datname'] = database
|
||||
if did is not None and did in self.db_info:
|
||||
self.db_info[did]['datname'] = database
|
||||
else:
|
||||
if did is None:
|
||||
database = self.db
|
||||
|
@ -274,9 +273,9 @@ WHERE db.oid = {0}""".format(did))
|
|||
# first connection for identifications.
|
||||
self.pinged = datetime.datetime.now()
|
||||
try:
|
||||
if 'password' in data and data['password']:
|
||||
if hasattr(data['password'], 'encode'):
|
||||
data['password'] = data['password'].encode('utf-8')
|
||||
if 'password' in data and data['password'] and \
|
||||
hasattr(data['password'], 'encode'):
|
||||
data['password'] = data['password'].encode('utf-8')
|
||||
if 'tunnel_password' in data and data['tunnel_password']:
|
||||
data['tunnel_password'] = \
|
||||
data['tunnel_password'].encode('utf-8')
|
||||
|
|
|
@ -141,10 +141,9 @@ class _Preference(object):
|
|||
if self.select2 and self.select2['tags']:
|
||||
return res.value
|
||||
return self.default
|
||||
if self._type == 'text':
|
||||
if res.value == '' and (self.allow_blanks is None or
|
||||
not self.allow_blanks):
|
||||
return self.default
|
||||
if self._type == 'text' and res.value == '' and \
|
||||
(self.allow_blanks is None or not self.allow_blanks):
|
||||
return self.default
|
||||
if self._type == 'keyboardshortcut':
|
||||
try:
|
||||
return json.loads(res.value)
|
||||
|
|
|
@ -114,11 +114,10 @@ class BaseTestGenerator(unittest.TestCase):
|
|||
super(BaseTestGenerator, self).setUp()
|
||||
self.server_id = self.server_information["server_id"]
|
||||
server_con = server_utils.connect_server(self, self.server_id)
|
||||
if hasattr(self, 'skip_on_database'):
|
||||
if 'data' in server_con and 'type' in server_con['data']:
|
||||
if server_con['data']['type'] in self.skip_on_database:
|
||||
self.skipTest('cannot run in: %s' %
|
||||
server_con['data']['type'])
|
||||
if hasattr(self, 'skip_on_database') and \
|
||||
'data' in server_con and 'type' in server_con['data'] and \
|
||||
server_con['data']['type'] in self.skip_on_database:
|
||||
self.skipTest('cannot run in: %s' % server_con['data']['type'])
|
||||
|
||||
def setTestServer(self, server):
|
||||
self.server = server
|
||||
|
|
|
@ -257,11 +257,10 @@ class FileBackedSessionManager(SessionManager):
|
|||
current_time = time.time()
|
||||
if not session.hmac_digest:
|
||||
session.sign(self.secret)
|
||||
elif not session.force_write:
|
||||
if session.last_write is not None and \
|
||||
(current_time - float(session.last_write)) < \
|
||||
self.disk_write_delay:
|
||||
return
|
||||
elif not session.force_write and session.last_write is not None and \
|
||||
(current_time - float(session.last_write)) < \
|
||||
self.disk_write_delay:
|
||||
return
|
||||
|
||||
session.last_write = current_time
|
||||
session.force_write = False
|
||||
|
@ -402,6 +401,6 @@ def cleanup_session_files():
|
|||
current_app.permanent_session_lifetime + \
|
||||
datetime.timedelta(days=1)
|
||||
|
||||
if file_expiration_time <= datetime.datetime.now():
|
||||
if os.path.exists(absolute_file_name):
|
||||
os.unlink(absolute_file_name)
|
||||
if file_expiration_time <= datetime.datetime.now() and \
|
||||
os.path.exists(absolute_file_name):
|
||||
os.unlink(absolute_file_name)
|
||||
|
|
|
@ -306,12 +306,12 @@ def suggest_based_on_last_token(token, stmt):
|
|||
require_last_table=True,
|
||||
local_tables=stmt.local_tables),)
|
||||
|
||||
elif p.token_first().value.lower() == 'select':
|
||||
# If the lparen is preceeded by a space chances are we're about to
|
||||
# do a sub-select.
|
||||
if last_word(stmt.text_before_cursor,
|
||||
'all_punctuations').startswith('('):
|
||||
return (Keyword(),)
|
||||
# If the lparen is preceeded by a space chances are we're about to
|
||||
# do a sub-select.
|
||||
elif p.token_first().value.lower() == 'select' and \
|
||||
last_word(stmt.text_before_cursor,
|
||||
'all_punctuations').startswith('('):
|
||||
return (Keyword(),)
|
||||
prev_prev_tok = prev_tok and p.token_prev(p.token_index(prev_tok))[1]
|
||||
if prev_prev_tok and prev_prev_tok.normalized == 'INTO':
|
||||
return (
|
||||
|
|
|
@ -78,16 +78,16 @@ pgadmin_credentials = test_setup.config_data
|
|||
# Set environment variables for email and password
|
||||
os.environ['PGADMIN_SETUP_EMAIL'] = ''
|
||||
os.environ['PGADMIN_SETUP_PASSWORD'] = ''
|
||||
if pgadmin_credentials:
|
||||
if 'pgAdmin4_login_credentials' in pgadmin_credentials:
|
||||
if all(item in pgadmin_credentials['pgAdmin4_login_credentials']
|
||||
for item in ['login_username', 'login_password']):
|
||||
pgadmin_credentials = pgadmin_credentials[
|
||||
'pgAdmin4_login_credentials']
|
||||
os.environ['PGADMIN_SETUP_EMAIL'] = str(pgadmin_credentials[
|
||||
'login_username'])
|
||||
os.environ['PGADMIN_SETUP_PASSWORD'] = str(pgadmin_credentials[
|
||||
'login_password'])
|
||||
if pgadmin_credentials and \
|
||||
'pgAdmin4_login_credentials' in pgadmin_credentials and \
|
||||
all(item in pgadmin_credentials['pgAdmin4_login_credentials']
|
||||
for item in ['login_username', 'login_password']):
|
||||
pgadmin_credentials = pgadmin_credentials[
|
||||
'pgAdmin4_login_credentials']
|
||||
os.environ['PGADMIN_SETUP_EMAIL'] = str(pgadmin_credentials[
|
||||
'login_username'])
|
||||
os.environ['PGADMIN_SETUP_PASSWORD'] = str(pgadmin_credentials[
|
||||
'login_password'])
|
||||
|
||||
# Execute the setup file
|
||||
exec(open("setup.py").read())
|
||||
|
@ -248,10 +248,10 @@ def get_test_modules(arguments):
|
|||
driver.implicitly_wait(1)
|
||||
else:
|
||||
options = Options()
|
||||
if test_setup.config_data:
|
||||
if 'headless_chrome' in test_setup.config_data:
|
||||
if test_setup.config_data['headless_chrome']:
|
||||
options.add_argument("--headless")
|
||||
if test_setup.config_data and \
|
||||
'headless_chrome' in test_setup.config_data and \
|
||||
test_setup.config_data['headless_chrome']:
|
||||
options.add_argument("--headless")
|
||||
options.add_argument("--no-sandbox")
|
||||
options.add_argument("--disable-setuid-sandbox")
|
||||
options.add_argument("--window-size=1280,1024")
|
||||
|
|
Loading…
Reference in New Issue