diff --git a/web/config.py b/web/config.py index dd95bcd..caf57df 100644 --- a/web/config.py +++ b/web/config.py @@ -371,4 +371,3 @@ try: from config_local import * except ImportError: pass - diff --git a/web/pgAdmin4.py b/web/pgAdmin4.py index 6a3a7cc..6990d14 100644 --- a/web/pgAdmin4.py +++ b/web/pgAdmin4.py @@ -36,7 +36,8 @@ from pgadmin import create_app from pgadmin.utils import u, fs_encoding, file_quote if config.DEBUG: - from pgadmin.utils.javascript.javascript_bundler import JavascriptBundler, JsState + from pgadmin.utils.javascript.javascript_bundler import \ + JavascriptBundler, JsState # Get the config database schema version. We store this in pgadmin.model # as it turns out that putting it in the config files isn't a great idea @@ -50,7 +51,8 @@ config.SETTINGS_SCHEMA_VERSION = SCHEMA_VERSION # Check if the database exists. If it does not, create it. if not os.path.isfile(config.SQLITE_PATH): setupfile = os.path.join( - os.path.dirname(os.path.realpath(u(__file__, fs_encoding))), u'setup.py' + os.path.dirname(os.path.realpath(u(__file__, fs_encoding))), + u'setup.py' ) exec(open(file_quote(setupfile), 'r').read()) @@ -75,7 +77,9 @@ else: if config.DEBUG: if javascriptBundler.report() == JsState.NONE: app.logger.error("Unable to generate javascript") - app.logger.error("To run the app ensure that yarn install command runs successfully") + app.logger.error( + "To run the app ensure that yarn install command runs successfully" + ) raise Exception("No generated javascript, aborting") # Start the web server. The port number should have already been set by the @@ -83,20 +87,24 @@ if config.DEBUG: # Flask default. PGADMIN_RUNTIME = False if 'PGADMIN_PORT' in globals(): - app.logger.debug('Running under the desktop runtime, port: %s', - globals()['PGADMIN_PORT']) + app.logger.debug( + 'Running under the desktop runtime, port: %s', + globals()['PGADMIN_PORT'] + ) server_port = int(globals()['PGADMIN_PORT']) PGADMIN_RUNTIME = True elif 'PGADMIN_PORT' in os.environ: port = os.environ['PGADMIN_PORT'] app.logger.debug( 'Not running under the desktop runtime, port: %s', - port) + port + ) server_port = int(port) else: app.logger.debug( 'Not running under the desktop runtime, port: %s', - config.DEFAULT_SERVER_PORT) + config.DEFAULT_SERVER_PORT + ) server_port = config.DEFAULT_SERVER_PORT # Let the application save the status about the runtime for using it later. @@ -113,8 +121,10 @@ else: # If we're under WSGI, we don't need to worry about this if __name__ == '__main__': if not PGADMIN_RUNTIME: - print("Starting %s. Please navigate to http://%s:%d in your browser." % - (config.APP_NAME, config.DEFAULT_SERVER, server_port)) + print( + "Starting %s. Please navigate to http://%s:%d in your browser." % + (config.APP_NAME, config.DEFAULT_SERVER, server_port) + ) sys.stdout.flush() else: # For unknown reason the Qt runtime does not pass the environment diff --git a/web/pgadmin/__init__.py b/web/pgadmin/__init__.py index 4a16ea1..a1dc33c 100644 --- a/web/pgadmin/__init__.py +++ b/web/pgadmin/__init__.py @@ -10,7 +10,8 @@ """The main pgAdmin module. This handles the application initialisation tasks, such as setup of logging, dynamic loading of modules etc.""" import logging -import os, sys +import os +import sys from collections import defaultdict from importlib import import_module @@ -217,7 +218,7 @@ def create_app(app_name=None): # Set SQLITE_PATH to TEST_SQLITE_PATH while running test cases if "PGADMIN_TESTING_MODE" in os. environ and \ - os.environ["PGADMIN_TESTING_MODE"] == "1": + os.environ["PGADMIN_TESTING_MODE"] == "1": config.SQLITE_PATH = config.TEST_SQLITE_PATH # Ensure the various working directories exist @@ -280,7 +281,9 @@ def create_app(app_name=None): elif hasattr(session, 'PGADMIN_LANGUAGE'): language = getattr(session, 'PGADMIN_LANGUAGE', language) elif hasattr(request.cookies, 'PGADMIN_LANGUAGE'): - language = getattr(request.cookies, 'PGADMIN_LANGUAGE', language) + language = getattr( + request.cookies, 'PGADMIN_LANGUAGE', language + ) return language @@ -332,15 +335,20 @@ def create_app(app_name=None): # Setup security ########################################################################## with app.app_context(): - config.CSRF_SESSION_KEY = Keys.query.filter_by(name = 'CSRF_SESSION_KEY').first().value - config.SECRET_KEY = Keys.query.filter_by(name = 'SECRET_KEY').first().value - config.SECURITY_PASSWORD_SALT = Keys.query.filter_by(name = 'SECURITY_PASSWORD_SALT').first().value + config.CSRF_SESSION_KEY = Keys.query.filter_by( + name='CSRF_SESSION_KEY').first().value + config.SECRET_KEY = Keys.query.filter_by( + name='SECRET_KEY').first().value + config.SECURITY_PASSWORD_SALT = Keys.query.filter_by( + name='SECURITY_PASSWORD_SALT').first().value # Update the app.config with proper security keyes for signing CSRF data, # signing cookies, and the SALT for hashing the passwords. - app.config.update(dict(CSRF_SESSION_KEY=config.CSRF_SESSION_KEY)) - app.config.update(dict(SECRET_KEY=config.SECRET_KEY)) - app.config.update(dict(SECURITY_PASSWORD_SALT=config.SECURITY_PASSWORD_SALT)) + app.config.update(dict({ + 'CSRF_SESSION_KEY': config.CSRF_SESSION_KEY, + 'SECRET_KEY': config.SECRET_KEY, + 'SECURITY_PASSWORD_SALT': config.SECURITY_PASSWORD_SALT + })) security.init_app(app, user_datastore) @@ -376,7 +384,6 @@ def create_app(app_name=None): if user_languages and language: language = user_languages.set(language) - ########################################################################## # Register any local servers we can discover ########################################################################## @@ -397,25 +404,25 @@ def create_app(app_name=None): '''Add a server to the config database''' def add_server(user_id, servergroup_id, name, superuser, port, discovery_id, comment): - # Create a server object if needed, and store it. + # Create a server object if needed, and store it. servers = Server.query.filter_by( user_id=user_id, discovery_id=svr_discovery_id ).order_by("id") if servers.count() > 0: - return; + return svr = Server(user_id=user_id, - servergroup_id=servergroup_id, - name=name, - host='localhost', - port=port, - maintenance_db='postgres', - username=superuser, - ssl_mode='prefer', - comment=svr_comment, - discovery_id=discovery_id) + servergroup_id=servergroup_id, + name=name, + host='localhost', + port=port, + maintenance_db='postgres', + username=superuser, + ssl_mode='prefer', + comment=svr_comment, + discovery_id=discovery_id) db.session.add(svr) db.session.commit() @@ -505,7 +512,8 @@ def create_app(app_name=None): description, data_directory )) - add_server(user_id, servergroup_id, svr_name, svr_superuser, svr_port, svr_discovery_id, svr_comment) + add_server(user_id, servergroup_id, svr_name, + svr_superuser, svr_port, svr_discovery_id, svr_comment) except: pass @@ -564,7 +572,7 @@ def create_app(app_name=None): # Minify output ########################################################################## # HTMLMIN doesn't work with Python 2.6. - if not config.DEBUG and sys.version_info >= (2,7): + if not config.DEBUG and sys.version_info >= (2, 7): from flask_htmlmin import HTMLMIN HTMLMIN(app) diff --git a/web/pgadmin/tools/__init__.py b/web/pgadmin/tools/__init__.py index f4dffb1..200d16e 100644 --- a/web/pgadmin/tools/__init__.py +++ b/web/pgadmin/tools/__init__.py @@ -18,27 +18,28 @@ from pgadmin.utils.ajax import bad_request MODULE_NAME = 'tools' + class ToolsModule(PgAdminModule): def get_own_javascripts(self): return [{ 'name': 'translations', 'path': url_for('tools.index') + "translations", 'when': None - },{ + }, { 'name': 'pgadmin-sqlfoldcode', 'path': url_for( 'static', filename='js/codemirror/addon/fold/pgadmin-sqlfoldcode' ), 'when': 'debugger' - },{ + }, { 'name': 'slick.pgadmin.editors', 'path': url_for( 'static', filename='js/slickgrid/slick.pgadmin.editors' ), 'when': 'debugger' - },{ + }, { 'name': 'slick.pgadmin.formatters', 'path': url_for( 'static', @@ -47,6 +48,7 @@ class ToolsModule(PgAdminModule): 'when': 'debugger' }] + # Initialise the module blueprint = ToolsModule(MODULE_NAME, __name__) @@ -56,10 +58,16 @@ def index(): """Calling tools index URL directly is not allowed.""" return bad_request(gettext('This URL cannot be requested directly.')) + @blueprint.route("/translations.js") def translations(): - """Return a js file that will handle translations so Flask interpolation can be isolated""" - template = render_template("js/translations.js", translations=get_translations()._catalog) + """Return a js file that will handle translations so Flask interpolation + can be isolated + """ + template = render_template( + "js/translations.js", + translations=get_translations()._catalog + ) return Response( response=template, status=200, diff --git a/web/pgadmin/tools/backup/__init__.py b/web/pgadmin/tools/backup/__init__.py index a28cf0d..715ff6e 100644 --- a/web/pgadmin/tools/backup/__init__.py +++ b/web/pgadmin/tools/backup/__init__.py @@ -194,6 +194,7 @@ class BackupMessage(IProcessDesc): return res + @blueprint.route("/") @login_required def index(): @@ -348,7 +349,8 @@ def create_backup_objects_job(sid): Args: sid: Server ID - Creates a new job for backup task (Backup Database(s)/Schema(s)/Table(s)) + Creates a new job for backup task + (Backup Database(s)/Schema(s)/Table(s)) Returns: None diff --git a/web/pgadmin/tools/datagrid/__init__.py b/web/pgadmin/tools/datagrid/__init__.py index fbd63e6..f16ecc6 100644 --- a/web/pgadmin/tools/datagrid/__init__.py +++ b/web/pgadmin/tools/datagrid/__init__.py @@ -28,6 +28,7 @@ from config import PG_DEFAULT_DRIVER from pgadmin.utils.preferences import Preferences from pgadmin.model import Server + class DataGridModule(PgAdminModule): """ class DataGridModule(PgAdminModule) @@ -72,7 +73,9 @@ blueprint = DataGridModule(MODULE_NAME, __name__, static_url_path='/static') @blueprint.route("/") @login_required def index(): - return bad_request(errormsg=gettext('This URL cannot be requested directly.')) + return bad_request( + errormsg=gettext('This URL cannot be requested directly.') + ) @blueprint.route("/css/datagrid.css") @@ -90,8 +93,10 @@ def show_filter(): @blueprint.route( - '/initialize/datagrid/////', - methods=["PUT", "POST"], endpoint="initialize_datagrid" + '/initialize/datagrid/////' + '', + methods=["PUT", "POST"], + endpoint="initialize_datagrid" ) @login_required def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id): @@ -103,7 +108,9 @@ def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id): Args: cmd_type: Contains value for which menu item is clicked. - obj_type: Contains type of selected object for which data grid to be render + obj_type: Contains type of selected object for which data grid to + be render + sid: Server Id did: Database Id obj_id: Id of currently selected object @@ -135,9 +142,11 @@ def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id): obj_type = 'table' # Get the object as per the object type - command_obj = ObjectRegistry.get_object(obj_type, conn_id=conn_id, sid=sid, - did=did, obj_id=obj_id, cmd_type=cmd_type, - sql_filter=filter_sql) + command_obj = ObjectRegistry.get_object( + obj_type, conn_id=conn_id, sid=sid, + did=did, obj_id=obj_id, cmd_type=cmd_type, + sql_filter=filter_sql + ) except Exception as e: return internal_server_error(errormsg=str(e)) @@ -162,8 +171,12 @@ def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id): pref = Preferences.module('sqleditor') new_browser_tab = pref.preference('new_browser_tab').get() - return make_json_response(data={'gridTransId': trans_id, - 'newBrowserTab': new_browser_tab}) + return make_json_response( + data={ + 'gridTransId': trans_id, + 'newBrowserTab': new_browser_tab + } + ) @blueprint.route( @@ -199,9 +212,9 @@ def panel(trans_id, is_query_tool, editor_title): Animations and transitions are not automatically GPU accelerated and by default use browser's slow rendering engine. We need to set 'translate3d' value of '-webkit-transform' property in order to use GPU. After applying - this property under linux, Webkit calculates wrong position of the elements - so panel contents are not visible. To make it work, we need to explicitly - set '-webkit-transform' property to 'none' for .ajs-notifier, + this property under linux, Webkit calculates wrong position of the + elements so panel contents are not visible. To make it work, we need to + explicitly set '-webkit-transform' property to 'none' for .ajs-notifier, .ajs-message, .ajs-modal classes. This issue is only with linux runtime application and observed in Query @@ -228,7 +241,9 @@ def panel(trans_id, is_query_tool, editor_title): 'prompt_save_query_changes' ).get() else: - prompt_save_changes = pref.preference('prompt_save_data_changes').get() + prompt_save_changes = pref.preference( + 'prompt_save_data_changes' + ).get() display_connection_status = pref.preference('connection_status').get() @@ -242,8 +257,9 @@ def panel(trans_id, is_query_tool, editor_title): trans_obj = pickle.loads(session_obj['command_obj']) s = Server.query.filter_by(id=trans_obj.sid).first() if s and s.bgcolor: - # If background is set to white means we do not have to change the - # title background else change it as per user specified background + # If background is set to white means we do not have to change + # the title background else change it as per user specified + # background if s.bgcolor != '#ffffff': bgcolor = s.bgcolor fgcolor = s.fgcolor or 'black' @@ -262,8 +278,8 @@ def panel(trans_id, is_query_tool, editor_title): client_platform=user_agent.platform, bgcolor=bgcolor, fgcolor=fgcolor, - # convert python boolean value to equivalent js boolean literal before - # passing it to html template. + # convert python boolean value to equivalent js boolean literal + # before passing it to html template. prompt_save_changes='true' if prompt_save_changes else 'false', display_connection_status=display_connection_status ) @@ -305,7 +321,9 @@ def initialize_query_tool(sid, did=None): ) try: - command_obj = ObjectRegistry.get_object('query_tool', sid=sid, did=did) + command_obj = ObjectRegistry.get_object( + 'query_tool', sid=sid, did=did + ) except Exception as e: return internal_server_error(errormsg=str(e)) @@ -320,7 +338,8 @@ def initialize_query_tool(sid, did=None): # Use pickle to store the command object which will be used # later by the sql grid module. sql_grid_data[trans_id] = { - 'command_obj': pickle.dumps(command_obj, -1) # -1 specify the highest protocol version available + # -1 specify the highest protocol version available + 'command_obj': pickle.dumps(command_obj, -1) } # Store the grid dictionary into the session variable @@ -329,8 +348,12 @@ def initialize_query_tool(sid, did=None): pref = Preferences.module('sqleditor') new_browser_tab = pref.preference('new_browser_tab').get() - return make_json_response(data={'gridTransId': trans_id, - 'newBrowserTab': new_browser_tab}) + return make_json_response( + data={ + 'gridTransId': trans_id, + 'newBrowserTab': new_browser_tab + } + ) @blueprint.route('/close/', methods=["GET"], endpoint='close') @@ -356,8 +379,10 @@ def close(trans_id): # if connection id is None then no need to release the connection if cmd_obj.conn_id is not None: try: - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(cmd_obj.sid) - conn = manager.connection(did=cmd_obj.did, conn_id=cmd_obj.conn_id) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(cmd_obj.sid) + conn = manager.connection( + did=cmd_obj.did, conn_id=cmd_obj.conn_id) except Exception as e: return internal_server_error(errormsg=str(e)) @@ -365,16 +390,18 @@ def close(trans_id): if conn.connected(): manager.release(did=cmd_obj.did, conn_id=cmd_obj.conn_id) - # Remove the information of unique transaction id from the session variable. + # Remove the information of unique transaction id from the + # session variable. grid_data.pop(str(trans_id), None) session['gridData'] = grid_data return make_json_response(data={'status': True}) -@blueprint.route('/filter/validate///', - methods=["PUT", "POST"], endpoint='filter_validate' - ) +@blueprint.route( + '/filter/validate///', + methods=["PUT", "POST"], endpoint='filter_validate' +) @login_required def validate_filter(sid, did, obj_id): """ @@ -408,6 +435,6 @@ def script(): """render the required javascript""" return Response( response=render_template("datagrid/js/datagrid.js", _=gettext), - status=200, mimetype="application/javascript" + status=200, + mimetype="application/javascript" ) - diff --git a/web/pgadmin/tools/debugger/__init__.py b/web/pgadmin/tools/debugger/__init__.py index 099adb4..b4ee825 100644 --- a/web/pgadmin/tools/debugger/__init__.py +++ b/web/pgadmin/tools/debugger/__init__.py @@ -14,7 +14,8 @@ MODULE_NAME = 'debugger' import simplejson as json import random -from flask import url_for, Response, render_template, request, session, current_app +from flask import url_for, Response, render_template, request, session, \ + current_app from flask_babel import gettext from flask_security import login_required from werkzeug.useragents import UserAgent @@ -42,7 +43,8 @@ class DebuggerModule(PgAdminModule): Methods: ------- * get_own_javascripts(self) - - Method is used to load the required javascript files for debugger module + - Method is used to load the required javascript files for debugger + module """ LABEL = gettext("Debugger") @@ -71,12 +73,11 @@ class DebuggerModule(PgAdminModule): 'will be opened in a new browser tab.') ) - def get_exposed_url_endpoints(self): """ Returns the list of URLs exposed to the client. """ - return ['debugger.index','debugger.init_for_function', + return ['debugger.index', 'debugger.init_for_function', 'debugger.init_for_trigger', 'debugger.direct', 'debugger.initialize_target_for_function', 'debugger.initialize_target_for_trigger', 'debugger.close', @@ -88,7 +89,8 @@ class DebuggerModule(PgAdminModule): 'debugger.select_frame', 'debugger.get_arguments', 'debugger.set_arguments', 'debugger.poll_end_execution_result', 'debugger.poll_result' - ] + ] + blueprint = DebuggerModule(MODULE_NAME, __name__) @@ -96,38 +98,51 @@ blueprint = DebuggerModule(MODULE_NAME, __name__) @blueprint.route("/", endpoint='index') @login_required def index(): - return bad_request(errormsg=gettext("This URL cannot be called directly.")) + return bad_request( + errormsg=gettext("This URL cannot be called directly.") + ) @blueprint.route("/js/debugger.js") @login_required def script(): """render the main debugger javascript file""" - return Response(response=render_template("debugger/js/debugger.js", _=gettext), - status=200, - mimetype="application/javascript") + return Response( + response=render_template("debugger/js/debugger.js", _=gettext), + status=200, + mimetype="application/javascript" + ) @blueprint.route("/js/debugger_ui.js") @login_required def script_debugger_js(): """render the debugger UI javascript file""" - return Response(response=render_template("debugger/js/debugger_ui.js", _=gettext), - status=200, - mimetype="application/javascript") + return Response( + response=render_template("debugger/js/debugger_ui.js", _=gettext), + status=200, + mimetype="application/javascript" + ) @blueprint.route("/js/direct.js") @login_required def script_debugger_direct_js(): - """render the javascript file required send and receive the response from server for debugging""" - return Response(response=render_template("debugger/js/direct.js", _=gettext), - status=200, - mimetype="application/javascript") + """ + Render the javascript file required send and receive the response + from server for debugging + """ + return Response( + response=render_template("debugger/js/direct.js", _=gettext), + status=200, + mimetype="application/javascript" + ) def update_session_debugger_transaction(trans_id, data): - """Update the session variables required for debugger with transaction ID""" + """ + Update the session variables required for debugger with transaction ID + """ debugger_data = session['debuggerData'] debugger_data[str(trans_id)] = data @@ -135,7 +150,10 @@ def update_session_debugger_transaction(trans_id, data): def update_session_function_transaction(trans_id, data): - """Update the session variables of functions required to debug with transaction ID""" + """ + Update the session variables of functions required to debug with + transaction ID + """ function_data = session['functionData'] function_data[str(trans_id)] = data session['functionData'] = function_data @@ -154,10 +172,14 @@ def init_function(node_type, sid, did, scid, fid, trid=None): """ init_function(node_type, sid, did, scid, fid, trid) - This method is responsible to initialize the function required for debugging. - This method is also responsible for storing the all functions data to session variable. - This is only required for direct debugging. As Indirect debugging does not require these data because user will - provide all the arguments and other functions information through another session to invoke the target. + This method is responsible to initialize the function required for + debugging. + This method is also responsible for storing the all functions data to + session variable. + This is only required for direct debugging. As Indirect debugging does + not require these data because user will + provide all the arguments and other functions information through another + session to invoke the target. Parameters: node_type @@ -190,25 +212,34 @@ def init_function(node_type, sid, did, scid, fid, trid=None): if node_type == 'trigger': # Find trigger function id from trigger id - sql = render_template("/".join([template_path, 'get_trigger_function_info.sql']), table_id=fid, trigger_id=trid) + sql = render_template( + "/".join([template_path, 'get_trigger_function_info.sql']), + table_id=fid, trigger_id=trid + ) status, tr_set = conn.execute_dict(sql) if not status: - current_app.logger.debug("Error retrieving trigger function information from database") + current_app.logger.debug( + "Error retrieving trigger function information from database") return internal_server_error(errormsg=tr_set) fid = tr_set['rows'][0]['tgfoid'] - # if ppas server and node type is edb function or procedure then extract last argument as function id + # if ppas server and node type is edb function or procedure then extract + # last argument as function id if node_type == 'edbfunc' or node_type == 'edbproc': fid = trid sql = '' - sql = render_template("/".join([template_path, 'get_function_debug_info.sql']), is_ppas_database=ppas_server, - hasFeatureFunctionDefaults=True, fid=fid) + sql = render_template( + "/".join([template_path, 'get_function_debug_info.sql']), + is_ppas_database=ppas_server, + hasFeatureFunctionDefaults=True, fid=fid + ) status, r_set = conn.execute_dict(sql) if not status: - current_app.logger.debug("Error retrieving function information from database") + current_app.logger.debug( + "Error retrieving function information from database") return internal_server_error(errormsg=r_set) ret_status = status @@ -220,10 +251,13 @@ def init_function(node_type, sid, did, scid, fid, trid=None): # Function with return type "trigger" cannot be debugged. if ":" in r_set['rows'][0]['name']: ret_status = False - msg = gettext("Functions with a colon in the name cannot be debugged.") - elif ppas_server and r_set['rows'][0]['prosrc'].lstrip().startswith('$__EDBwrapped__$'): + msg = gettext( + "Functions with a colon in the name cannot be debugged.") + elif ppas_server and r_set['rows'][0]['prosrc'].lstrip().startswith( + '$__EDBwrapped__$'): ret_status = False - msg = gettext("EDB Advanced Server wrapped functions cannot be debugged.") + msg = gettext( + "EDB Advanced Server wrapped functions cannot be debugged.") # We cannot debug if PPAS and argument mode is VARIADIC elif ppas_server and r_set['rows'][0]['lanname'] == 'edbspl' and \ r_set['rows'][0]['proargmodes'] is not None and \ @@ -235,21 +269,38 @@ def init_function(node_type, sid, did, scid, fid, trid=None): ) else: status_in, rid_tar = conn.execute_scalar( - "SELECT count(*) FROM pg_proc WHERE proname = 'pldbg_get_target_info'") + "SELECT count(*) FROM pg_proc WHERE " + "proname = 'pldbg_get_target_info'" + ) if not status_in: - current_app.logger.debug("Failed to find the pldbgapi extension in this database.") - return internal_server_error(gettext("Failed to find the pldbgapi extension in this database.")) + current_app.logger.debug( + "Failed to find the pldbgapi extension in this database.") + return internal_server_error( + gettext("Failed to find the pldbgapi extension in " + "this database.") + ) - #We also need to check to make sure that the debugger library is also available. + # We also need to check to make sure that the debugger library is + # also available. status_in, ret_oid = conn.execute_scalar( - "SELECT count(*) FROM pg_proc WHERE proname = 'plpgsql_oid_debug'") + "SELECT count(*) FROM pg_proc WHERE " + "proname = 'plpgsql_oid_debug'" + ) if not status_in: - current_app.logger.debug("Failed to find the pldbgapi extension in this database.") - return internal_server_error(gettext("Failed to find the pldbgapi extension in this database.")) + current_app.logger.debug( + "Failed to find the pldbgapi extension in this database.") + return internal_server_error( + gettext("Failed to find the pldbgapi extension in " + "this database.") + ) - # Debugger plugin is configured but pldggapi extension is not created so return error + # Debugger plugin is configured but pldggapi extension is not + # created so return error if rid_tar == '0' or ret_oid == '0': - msg = gettext("The debugger plugin is not enabled. Please create the pldbgapi extension in this database.") + msg = gettext( + "The debugger plugin is not enabled. Please create the " + "pldbgapi extension in this database." + ) ret_status = False else: ret_status = False @@ -272,11 +323,13 @@ def init_function(node_type, sid, did, scid, fid, trid=None): 'default_value': r_set['rows'][0]['proargdefaults'], 'require_input': True} - # Below will check do we really required for the user input arguments and show input dialog + # Below will check do we really required for the user input arguments and + # show input dialog if not r_set['rows'][0]['proargtypenames']: data['require_input'] = False else: - if r_set['rows'][0]['pkg'] != 0 and r_set['rows'][0]['pkgconsoid'] != 0: + if r_set['rows'][0]['pkg'] != 0 and \ + r_set['rows'][0]['pkgconsoid'] != 0: data['require_input'] = True if r_set['rows'][0]['proargmodes']: @@ -328,19 +381,26 @@ def direct_new(trans_id): obj = debugger_data[str(trans_id)] - # if indirect debugging pass value 0 to client and for direct debugging pass it to 1 + # if indirect debugging pass value 0 to client and for direct debugging + # pass it to 1 debug_type = 0 if obj['debug_type'] == 'indirect' else 1 """ - Animations and transitions are not automatically GPU accelerated and by default use browser's slow rendering engine. - We need to set 'translate3d' value of '-webkit-transform' property in order to use GPU. - After applying this property under linux, Webkit calculates wrong position of the elements so panel contents are not visible. - To make it work, we need to explicitly set '-webkit-transform' property to 'none' for .ajs-notifier, .ajs-message, .ajs-modal classes. - - This issue is only with linux runtime application and observed in Query tool and debugger. - When we open 'Open File' dialog then whole Query-tool panel content is not visible though it contains HTML element in back end. - - The port number should have already been set by the runtime if we're running in desktop mode. + Animations and transitions are not automatically GPU accelerated and by + default use browser's slow rendering engine. + We need to set 'translate3d' value of '-webkit-transform' property in + order to use GPU. + After applying this property under linux, Webkit calculates wrong position + of the elements so panel contents are not visible. + To make it work, we need to explicitly set '-webkit-transform' property + to 'none' for .ajs-notifier, .ajs-message, .ajs-modal classes. + + This issue is only with linux runtime application and observed in Query + tool and debugger. When we open 'Open File' dialog then whole Query-tool + panel content is not visible though it contains HTML element in back end. + + The port number should have already been set by the runtime if we're + running in desktop mode. """ is_linux_platform = False @@ -365,12 +425,14 @@ def direct_new(trans_id): @blueprint.route( - '/initialize_target/////', + '/initialize_target/////' + '', methods=['GET', 'POST'], endpoint='initialize_target_for_function' ) @blueprint.route( - '/initialize_target//////', + '/initialize_target/////' + '/', methods=['GET', 'POST'], endpoint='initialize_target_for_trigger' ) @@ -416,8 +478,8 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): # If user is super user then we should check debugger library is # loaded or not if not user['is_superuser']: - msg = gettext("You must be a superuser to set a global breakpoint " - "and perform indirect debugging.") + msg = gettext("You must be a superuser to set a global breakpoint" + " and perform indirect debugging.") return internal_server_error(errormsg=msg) else: status_in, rid_pre = conn.execute_scalar( @@ -445,11 +507,15 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): if tri_id is not None: # Find trigger function id from trigger id - sql = render_template("/".join([template_path, 'get_trigger_function_info.sql']), table_id=func_id, trigger_id=tri_id) + sql = render_template( + "/".join([template_path, 'get_trigger_function_info.sql']), + table_id=func_id, trigger_id=tri_id + ) status, tr_set = conn.execute_dict(sql) if not status: - current_app.logger.debug("Error retrieving trigger function information from database") + current_app.logger.debug( + "Error retrieving trigger function information from database") return internal_server_error(errormsg=tr_set) func_id = tr_set['rows'][0]['tgfoid'] @@ -457,7 +523,8 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) - # If there is no debugging information in session variable then create the store that information + # If there is no debugging information in session variable then create + # the store that information if 'debuggerData' not in session: debugger_data = dict() else: @@ -467,9 +534,10 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): # Find out the debugger version and store it in session variables status, rid = conn.execute_scalar( - "SELECT COUNT(*) FROM pg_catalog.pg_proc p \ - LEFT JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid \ - WHERE n.nspname = ANY(current_schemas(false)) AND p.proname = 'pldbg_get_proxy_info';" + "SELECT COUNT(*) FROM pg_catalog.pg_proc p" + " LEFT JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid" + " WHERE n.nspname = ANY(current_schemas(false)) AND" + " p.proname = 'pldbg_get_proxy_info';" ) if not status: @@ -478,7 +546,8 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): if rid == 0: debugger_version = 1 - status, rid = conn.execute_scalar("SELECT proxyapiver FROM pldbg_get_proxy_info();") + status, rid = conn.execute_scalar( + "SELECT proxyapiver FROM pldbg_get_proxy_info();") if status: if rid == 2 or rid == 3: @@ -489,9 +558,11 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): # Add the debugger version information to pgadmin4 log file current_app.logger.debug("Debugger version is: %d", debugger_version) - # We need to pass the value entered by the user in dialog for direct debugging - # Here we get the value in case of direct debugging so update the session variables accordingly - # For indirect debugging user will provide the data from another session so below condition will be be required + # We need to pass the value entered by the user in dialog for direct + # debugging, Here we get the value in case of direct debugging so update + # the session variables accordingly, For indirect debugging user will + # provide the data from another session so below condition will + # be be required if request.method == 'POST': data = json.loads(request.values['data'], encoding='utf-8') if data: @@ -500,8 +571,9 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): session['funcData'] = d # Update the debugger data session variable - # Here frame_id is required when user debug the multilevel function. When user select the frame from client we - # need to update the frame here and set the breakpoint information on that function oid + # Here frame_id is required when user debug the multilevel function. + # When user select the frame from client we need to update the frame + # here and set the breakpoint information on that function oid debugger_data[str(trans_id)] = { 'conn_id': conn_id, 'server_id': sid, @@ -549,19 +621,21 @@ def initialize_target(debug_type, sid, did, scid, func_id, tri_id=None): # Update the session variable of function information session['functionData'] = function_data - # Delete the 'funcData' session variables as it is not used now as target is initialized + # Delete the 'funcData' session variables as it is not used now as target + # is initialized del session['funcData'] pref = Preferences.module('debugger') new_browser_tab = pref.preference('debugger_new_browser_tab').get() - return make_json_response(data={'status': status, 'debuggerTransId': trans_id, 'newBrowserTab': new_browser_tab}) -@blueprint.route('/close/', methods=["DELETE"], endpoint='close') +@blueprint.route( + '/close/', methods=["DELETE"], endpoint='close' +) def close(trans_id): """ close(trans_id) @@ -586,10 +660,13 @@ def close(trans_id): obj = debugger_data[str(trans_id)] try: - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['conn_id']) conn.cancel_transaction(obj['conn_id'], obj['database_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) conn.cancel_transaction(obj['exe_conn_id'], obj['database_id']) manager.release(conn_id=obj['conn_id']) manager.release(conn_id=obj['exe_conn_id']) @@ -601,7 +678,9 @@ def close(trans_id): return internal_server_error(errormsg=str(e)) -@blueprint.route('/restart/', methods=['GET'], endpoint='restart') +@blueprint.route( + '/restart/', methods=['GET'], endpoint='restart' +) @login_required def restart_debugging(trans_id): """ @@ -617,17 +696,24 @@ def restart_debugging(trans_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) conn = manager.connection(did=obj['database_id'], conn_id=obj['conn_id']) if conn.connected(): - # Update the session variable "restart_debug" to know that same function debugging has been restarted. - # Delete the existing debugger data in session variable and update with new data + # Update the session variable "restart_debug" to know that same + # function debugging has been restarted. Delete the existing debugger + # data in session variable and update with new data if obj['restart_debug'] == 0: debugger_data = session['debuggerData'] session_obj = debugger_data[str(trans_id)] @@ -638,24 +724,32 @@ def restart_debugging(trans_id): if 'functionData' not in session: function_data = dict() else: + session_function_data = session['functionData'][str(trans_id)] function_data = { 'server_id': obj['server_id'], 'database_id': obj['database_id'], 'schema_id': obj['schema_id'], 'function_id': obj['function_id'], 'trans_id': str(trans_id), - 'proargmodes': session['functionData'][str(trans_id)]['arg_mode'], - 'proargtypenames': session['functionData'][str(trans_id)]['args_type'], - 'pronargdefaults': session['functionData'][str(trans_id)]['use_default'], - 'proargdefaults': session['functionData'][str(trans_id)]['default_value'], - 'proargnames': session['functionData'][str(trans_id)]['args_name'], - 'require_input': session['functionData'][str(trans_id)]['require_input'] + 'proargmodes': session_function_data['arg_mode'], + 'proargtypenames': session_function_data['args_type'], + 'pronargdefaults': session_function_data['use_default'], + 'proargdefaults': session_function_data['default_value'], + 'proargnames': session_function_data['args_name'], + 'require_input': session_function_data['require_input'] } - return make_json_response(data={'status': True, 'restart_debug': True, 'result': function_data}) + return make_json_response( + data={ + 'status': True, 'restart_debug': True, 'result': function_data + } + ) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server has ' + 'been closed.' + ) return make_json_response(data={'status': status}) @@ -669,7 +763,8 @@ def start_debugger_listener(trans_id): """ start_debugger_listener(trans_id) - This method is responsible to listen and get the required information requested by user during debugging + This method is responsible to listen and get the required information + requested by user during debugging Parameters: trans_id @@ -679,8 +774,13 @@ def start_debugger_listener(trans_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext( + 'Not connected to server or connection with the server has' + 'been closed.' + ) + } ) obj = debugger_data[str(trans_id)] @@ -698,8 +798,8 @@ def start_debugger_listener(trans_id): else: template_path = 'debugger/sql/v2' - # If user again start the same debug function with different arguments then we need to save that values to session - # variable and database. + # If user again start the same debug function with different arguments + # then we need to save that values to session variable and database. if request.method == 'POST': data = json.loads(request.values['data'], encoding='utf-8') if data: @@ -710,95 +810,115 @@ def start_debugger_listener(trans_id): if conn.connected(): - # For the direct debugging extract the function arguments values from user and pass to jinja template - # to create the query for execution. + # For the direct debugging extract the function arguments values from + # user and pass to jinja template to create the query for execution. if obj['debug_type'] == 'direct': str_query = '' - - if session['functionData'][str(trans_id)]['pkg'] == 0: + session_function_data = session['functionData'][str(trans_id)] + if session_function_data['pkg'] == 0: # Form the function name with schema name - func_name = driver.qtIdent(conn, session['functionData'][str(trans_id)]['schema']) + '.' + driver.qtIdent( - conn, session['functionData'][str(trans_id)]['name']) + func_name = driver.qtIdent( + conn, + session_function_data['schema'], + session_function_data['name'] + ) else: # Form the edb package function/procedure name with schema name - func_name = driver.qtIdent(conn, session['functionData'][str(trans_id)]['schema']) + '.' + \ - driver.qtIdent(conn, session['functionData'][str(trans_id)]['pkgname']) + '.' + \ - driver.qtIdent(conn, session['functionData'][str(trans_id)]['name']) + func_name = driver.qtIdent( + conn, session_function_data['schema'], + session_function_data['pkgname'], + session_function_data['name'] + ) if obj['restart_debug'] == 0: # render the SQL template and send the query to server - if session['functionData'][str(trans_id)]['language'] == 'plpgsql': - sql = render_template("/".join([template_path, 'debug_plpgsql_execute_target.sql']), - packge_oid=session['functionData'][str(trans_id)]['pkg'], - function_oid=obj['function_id']) + if session_function_data['language'] == 'plpgsql': + sql = render_template( + "/".join([template_path, + 'debug_plpgsql_execute_target.sql']), + packge_oid=session_function_data['pkg'], + function_oid=obj['function_id'] + ) else: - sql = render_template("/".join([template_path, 'debug_spl_execute_target.sql']), - packge_oid=session['functionData'][str(trans_id)]['pkg'], - function_oid=obj['function_id']) + sql = render_template( + "/".join([template_path, + 'debug_spl_execute_target.sql']), + packge_oid=session_function_data['pkg'], + function_oid=obj['function_id'] + ) status, res = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) - if session['functionData'][str(trans_id)]['arg_mode']: + if session_function_data['arg_mode']: # In EDBAS 90, if an SPL-function has both an OUT-parameter - # and a return value (which is not possible on PostgreSQL otherwise), - # the return value is transformed into an extra OUT-parameter - # named "_retval_" - if session['functionData'][str(trans_id)]['args_name']: - arg_name = session['functionData'][str(trans_id)]['args_name'].split(",") + # and a return value (which is not possible on PostgreSQL + # otherwise), the return value is transformed into an extra + # OUT-parameter named "_retval_" + if session_function_data['args_name']: + arg_name = session_function_data['args_name'].split(",") if '_retval_' in arg_name: - arg_mode = session['functionData'][str(trans_id)]['arg_mode'].split(",") + arg_mode = session_function_data['arg_mode'].split(",") arg_mode.pop() else: - arg_mode = session['functionData'][str(trans_id)]['arg_mode'].split(",") + arg_mode = session_function_data['arg_mode'].split(",") else: - arg_mode = session['functionData'][str(trans_id)]['arg_mode'].split(",") + arg_mode = session_function_data['arg_mode'].split(",") else: - arg_mode = ['i'] * len(session['functionData'][str(trans_id)]['args_type'].split(",")) + arg_mode = ['i'] * len( + session_function_data['args_type'].split(",") + ) - if session['functionData'][str(trans_id)]['args_type']: - if session['functionData'][str(trans_id)]['args_name']: - arg_name = session['functionData'][str(trans_id)]['args_name'].split(",") + if session_function_data['args_type']: + if session_function_data['args_name']: + arg_name = session_function_data['args_name'].split(",") if '_retval_' in arg_name: - arg_type = session['functionData'][str(trans_id)]['args_type'].split(",") + arg_type = session_function_data[ + 'args_type'].split(",") arg_type.pop() else: - arg_type = session['functionData'][str(trans_id)]['args_type'].split(",") + arg_type = session_function_data[ + 'args_type'].split(",") else: - arg_type = session['functionData'][str(trans_id)]['args_type'].split(",") + arg_type = session_function_data['args_type'].split(",") # Below are two different template to execute and start executer if manager.server_type != 'pg' and manager.version < 90300: - str_query = render_template("/".join(['debugger/sql', 'execute_edbspl.sql']), - func_name=func_name, - is_func=session['functionData'][str(trans_id)]['is_func'], - lan_name=session['functionData'][str(trans_id)]['language'], - ret_type=session['functionData'][str(trans_id)]['return_type'], - data=session['functionData'][str(trans_id)]['args_value'], - arg_type=arg_type, - args_mode=arg_mode - ) + str_query = render_template( + "/".join(['debugger/sql', 'execute_edbspl.sql']), + func_name=func_name, + is_func=session_function_data['is_func'], + lan_name=session_function_data['language'], + ret_type=session_function_data['return_type'], + data=session_function_data['args_value'], + arg_type=arg_type, + args_mode=arg_mode + ) else: - str_query = render_template("/".join(['debugger/sql', 'execute_plpgsql.sql']), - func_name=func_name, - is_func=session['functionData'][str(trans_id)]['is_func'], - ret_type=session['functionData'][str(trans_id)]['return_type'], - data=session['functionData'][str(trans_id)]['args_value'] - ) + str_query = render_template( + "/".join(['debugger/sql', 'execute_plpgsql.sql']), + func_name=func_name, + is_func=session_function_data['is_func'], + ret_type=session_function_data['return_type'], + data=session_function_data['args_value'] + ) status, result = conn.execute_async(str_query) if not status: return internal_server_error(errormsg=result) else: if conn.connected(): - # For indirect debugging first create the listener and then wait for the target - sql = render_template("/".join([template_path, 'create_listener.sql'])) + # For indirect debugging first create the listener and then + # wait for the target + sql = render_template( + "/".join([template_path, 'create_listener.sql'])) status, res = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) - # Get and store the session variable which is required to fetch other information during debugging + # Get and store the session variable which is required to fetch + # other information during debugging int_session_id = res['rows'][0]['pldbg_create_listener'] # In EnterpriseDB versions <= 9.1 the @@ -808,25 +928,31 @@ def start_debugger_listener(trans_id): # takes four arguments like the community version has always # done. if server_type == 'ppas' and ver <= 90100: - sql = render_template("/".join([template_path, 'add_breakpoint_edb.sql']), - session_id=int_session_id, - function_oid=obj['function_id']) + sql = render_template( + "/".join([template_path, 'add_breakpoint_edb.sql']), + session_id=int_session_id, + function_oid=obj['function_id'] + ) status, res = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) else: - sql = render_template("/".join([template_path, 'add_breakpoint_pg.sql']), - session_id=int_session_id, - function_oid=obj['function_id']) + sql = render_template( + "/".join([template_path, 'add_breakpoint_pg.sql']), + session_id=int_session_id, + function_oid=obj['function_id'] + ) status, res = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) # wait for the target - sql = render_template("/".join([template_path, 'wait_for_target.sql']), - session_id=int_session_id) + sql = render_template( + "/".join([template_path, 'wait_for_target.sql']), + session_id=int_session_id + ) status, res = conn.execute_async(sql) if not status: @@ -839,17 +965,28 @@ def start_debugger_listener(trans_id): session_obj['frame_id'] = 0 session_obj['session_id'] = int_session_id update_session_debugger_transaction(trans_id, session_obj) - return make_json_response(data={'status': status, 'result': res}) + return make_json_response( + data={'status': status, 'result': res} + ) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') - return make_json_response(data={'status': status, 'result': result}) + result = gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) + return make_json_response( + data={'status': status, 'result': result} + ) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server has ' + 'been closed.' + ) return make_json_response(data={'status': status, 'result': result}) + @blueprint.route( '/execute_query//', methods=['GET'], endpoint='execute_query' @@ -859,8 +996,9 @@ def execute_debugger_query(trans_id, query_type): """ execute_debugger_query(trans_id, query_type) - This method is responsible to execute the query and return value. As this method is generic so user has to pass the - query_type to get the required information for debugging. + This method is responsible to execute the query and return value. As this + method is generic so user has to pass the query_type to get the required + information for debugging. e.g. If user want to execute 'step_into' then query_type='step_into'. If user want to execute 'continue' then query_type='continue' @@ -875,13 +1013,20 @@ def execute_debugger_query(trans_id, query_type): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) # find the debugger version and execute the query accordingly dbg_version = obj['debugger_version'] @@ -900,7 +1045,7 @@ def execute_debugger_query(trans_id, query_type): # We need to update the frame id variable when user move the next # step for debugging. if query_type == 'continue' or query_type == 'step_into' or \ - query_type == 'step_over': + query_type == 'step_over': # We should set the frame_id to 0 when execution starts. if obj['frame_id'] != 0: session_obj = debugger_data[str(trans_id)] @@ -934,7 +1079,9 @@ def execute_debugger_query(trans_id, query_type): ) -@blueprint.route('/messages//', methods=["GET"],endpoint='messages') +@blueprint.route( + '/messages//', methods=["GET"], endpoint='messages' +) @login_required def messages(trans_id): """ @@ -950,12 +1097,18 @@ def messages(trans_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': 'NotConnected', - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': 'NotConnected', + 'result': gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) conn = manager.connection(did=obj['database_id'], conn_id=obj['conn_id']) port_number = '' @@ -964,8 +1117,8 @@ def messages(trans_id): status, result = conn.poll() notify = conn.messages() if notify: - # In notice message we need to find "PLDBGBREAK" string to find the - # port number to attach. + # In notice message we need to find "PLDBGBREAK" string to find + # the port number to attach. # Notice message returned by the server is # "NOTICE: PLDBGBREAK:7". # From the above message we need to find out port number @@ -980,7 +1133,8 @@ def messages(trans_id): while notify[0][offset + str_len + tmpOffset].isdigit(): status = 'Success' tmpFlag = True - port_number = port_number + notify[0][offset + str_len + tmpOffset] + port_number = port_number + \ + notify[0][offset + str_len + tmpOffset] tmpOffset += 1 if not tmpFlag: @@ -1008,8 +1162,9 @@ def start_execution(trans_id, port_num): """ start_execution(trans_id, port_num) - This method is responsible for creating an asynchronous connection for execution thread. - Also store the session id into session return with attach port query for the direct debugging. + This method is responsible for creating an asynchronous connection for + execution thread. Also store the session id into session return with + attach port query for the direct debugging. Parameters: trans_id @@ -1021,8 +1176,13 @@ def start_execution(trans_id, port_num): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': 'NotConnected', - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': 'NotConnected', + 'result': gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] @@ -1031,7 +1191,8 @@ def start_execution(trans_id, port_num): # Create asynchronous connection using random connection id. exe_conn_id = str(random.randint(1, 9999999)) try: - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) conn = manager.connection(did=obj['database_id'], conn_id=exe_conn_id) except Exception as e: return internal_server_error(errormsg=str(e)) @@ -1054,7 +1215,8 @@ def start_execution(trans_id, port_num): template_path = 'debugger/sql/v2' # connect to port and store the session ID in the session variables - sql = render_template("/".join([template_path, 'attach_to_port.sql']), port=port_num) + sql = render_template( + "/".join([template_path, 'attach_to_port.sql']), port=port_num) status_port, res_port = conn.execute_dict(sql) if not status_port: return internal_server_error(errormsg=res_port) @@ -1067,7 +1229,12 @@ def start_execution(trans_id, port_num): session_obj['session_id'] = res_port['rows'][0]['pldbg_attach_to_port'] update_session_debugger_transaction(trans_id, session_obj) - return make_json_response(data={'status': 'Success', 'result': res_port['rows'][0]['pldbg_attach_to_port']}) + return make_json_response( + data={ + 'status': 'Success', + 'result': res_port['rows'][0]['pldbg_attach_to_port'] + } + ) @blueprint.route( @@ -1093,13 +1260,20 @@ def set_clear_breakpoint(trans_id, line_no, set_type): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) # find the debugger version and execute the query accordingly dbg_version = obj['debugger_version'] @@ -1110,16 +1284,21 @@ def set_clear_breakpoint(trans_id, line_no, set_type): query_type = '' - # We need to find out function OID before sending the foid to set the breakpoint because it may possible that - # debugging function has multi level function for debugging so we need to save the debug level to session variable - # and pass tha appropriate foid to set the breakpoint. - sql_ = render_template("/".join([template_path, "get_stack_info.sql"]), session_id=obj['session_id']) + # We need to find out function OID before sending the foid to set the + # breakpoint because it may possible that debugging function has multi + # level function for debugging so we need to save the debug level to + # session variable and pass tha appropriate foid to set the breakpoint. + sql_ = render_template( + "/".join([template_path, "get_stack_info.sql"]), + session_id=obj['session_id'] + ) status, res_stack = conn.execute_dict(sql_) if not status: return internal_server_error(errormsg=res_stack) - # For multilevel function debugging, we need to fetch current selected frame's function oid for setting the - # breakpoint. For single function the frame id will be 0. + # For multilevel function debugging, we need to fetch current selected + # frame's function oid for setting the breakpoint. For single function + # the frame id will be 0. foid = res_stack['rows'][obj['frame_id']]['func'] # Check the result of the stack before setting the breakpoint @@ -1129,17 +1308,25 @@ def set_clear_breakpoint(trans_id, line_no, set_type): else: query_type = 'clear_breakpoint' - sql = render_template("/".join([template_path, query_type + ".sql"]), session_id=obj['session_id'], - foid=foid, line_number=line_no) + sql = render_template( + "/".join([template_path, query_type + ".sql"]), + session_id=obj['session_id'], + foid=foid, line_number=line_no + ) status, result = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=result) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) - return make_json_response(data={'status': status, 'result': result['rows']}) + return make_json_response( + data={'status': status, 'result': result['rows']} + ) @blueprint.route( @@ -1160,13 +1347,20 @@ def clear_all_breakpoint(trans_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext( + 'Not connected to server or connection ' + 'with the server has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) # find the debugger version and execute the query accordingly dbg_version = obj['debugger_version'] @@ -1182,8 +1376,11 @@ def clear_all_breakpoint(trans_id): if request.form['breakpoint_list']: line_numbers = request.form['breakpoint_list'].split(",") for line_no in line_numbers: - sql = render_template("/".join([template_path, "clear_breakpoint.sql"]), session_id=obj['session_id'], - foid=obj['function_id'], line_number=line_no) + sql = render_template( + "/".join([template_path, "clear_breakpoint.sql"]), + session_id=obj['session_id'], + foid=obj['function_id'], line_number=line_no + ) status, result = conn.execute_dict(sql) if not status: @@ -1192,13 +1389,18 @@ def clear_all_breakpoint(trans_id): return make_json_response(data={'status': False}) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server has ' + 'been closed.') - return make_json_response(data={'status': status, 'result': result['rows']}) + return make_json_response( + data={'status': status, 'result': result['rows']} + ) @blueprint.route( - '/deposit_value/', methods=['POST'], endpoint='deposit_value' + '/deposit_value/', methods=['POST'], + endpoint='deposit_value' ) @login_required def deposit_parameter_value(trans_id): @@ -1214,13 +1416,18 @@ def deposit_parameter_value(trans_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext('Not connected to server or connection ' + 'with the server has been closed.') + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) # find the debugger version and execute the query accordingly dbg_version = obj['debugger_version'] @@ -1236,23 +1443,36 @@ def deposit_parameter_value(trans_id): data = json.loads(request.values['data'], encoding='utf-8') if data: - sql = render_template("/".join([template_path, "deposit_value.sql"]), session_id=obj['session_id'], - var_name=data[0]['name'], line_number=-1, val=data[0]['value']) + sql = render_template( + "/".join([template_path, "deposit_value.sql"]), + session_id=obj['session_id'], + var_name=data[0]['name'], line_number=-1, + val=data[0]['value'] + ) status, result = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=result) - # Check if value deposited successfully or not and depending on the result, return the message information. + # Check if value deposited successfully or not and depending on + # the result, return the message information. if result['rows'][0]['pldbg_deposit_value']: info = gettext('Value deposited successfully') else: info = gettext('Error while setting the value') return make_json_response( - data={'status': status, 'info': info, 'result': result['rows'][0]['pldbg_deposit_value']}) + data={ + 'status': status, + 'info': info, + 'result': result['rows'][0]['pldbg_deposit_value'] + } + ) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server has ' + 'been closed.' + ) return make_json_response(data={'status': status, 'result': result}) @@ -1277,13 +1497,20 @@ def select_frame(trans_id, frame_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': False, - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': False, + 'result': gettext( + 'Not connected to server or connection ' + 'with the server has been closed.' + ) + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) # find the debugger version and execute the query accordingly dbg_version = obj['debugger_version'] @@ -1297,17 +1524,25 @@ def select_frame(trans_id, frame_id): update_session_debugger_transaction(trans_id, session_obj) if conn.connected(): - sql = render_template("/".join([template_path, "select_frame.sql"]), session_id=obj['session_id'], - frame_id=frame_id) + sql = render_template( + "/".join([template_path, "select_frame.sql"]), + session_id=obj['session_id'], + frame_id=frame_id + ) status, result = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=result) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) - return make_json_response(data={'status': status, 'result': result['rows']}) + return make_json_response( + data={'status': status, 'result': result['rows']} + ) @blueprint.route( @@ -1319,7 +1554,8 @@ def get_arguments_sqlite(sid, did, scid, func_id): """ get_arguments_sqlite(sid, did, scid, func_id) - This method is responsible to get the function arguments saved to sqlite database during first debugging. + This method is responsible to get the function arguments saved to sqlite + database during first debugging. Parameters: sid @@ -1334,14 +1570,22 @@ def get_arguments_sqlite(sid, did, scid, func_id): """Get the count of the existing data available in sqlite database""" DbgFuncArgsCount = DebuggerFunctionArguments.query.filter_by( - server_id=sid, database_id=did, schema_id=scid, function_id=func_id).count() + server_id=sid, + database_id=did, + schema_id=scid, + function_id=func_id + ).count() args_data = [] if DbgFuncArgsCount: """Update the Debugger Function Arguments settings""" DbgFuncArgs = DebuggerFunctionArguments.query.filter_by( - server_id=sid, database_id=did, schema_id=scid, function_id=func_id) + server_id=sid, + database_id=did, + schema_id=scid, + function_id=func_id + ) args_list = DbgFuncArgs.all() @@ -1355,11 +1599,17 @@ def get_arguments_sqlite(sid, did, scid, func_id): } args_data.append(info) - # As we do have entry available for that function so we need to add that entry - return make_json_response(data={'result': args_data, 'args_count': DbgFuncArgsCount}) + # As we do have entry available for that function so we need to add + # that entry + return make_json_response( + data={'result': args_data, 'args_count': DbgFuncArgsCount} + ) else: - # As we do not have any entry available for that function so we need to add that entry - return make_json_response(data={'result': 'result', 'args_count': DbgFuncArgsCount}) + # As we do not have any entry available for that function so we need + # to add that entry + return make_json_response( + data={'result': 'result', 'args_count': DbgFuncArgsCount} + ) @blueprint.route( @@ -1371,7 +1621,8 @@ def set_arguments_sqlite(sid, did, scid, func_id): """ set_arguments_sqlite(sid, did, scid, func_id) - This method is responsible for setting the value of function arguments to sqlite database + This method is responsible for setting the value of function arguments + to sqlite database Parameters: sid @@ -1390,26 +1641,37 @@ def set_arguments_sqlite(sid, did, scid, func_id): try: for i in range(0, len(data)): DbgFuncArgsExists = DebuggerFunctionArguments.query.filter_by( - server_id=data[i]['server_id'], database_id=data[i]['database_id'], schema_id=data[i]['schema_id'], - function_id=data[i]['function_id'], arg_id=data[i]['arg_id']).count() + server_id=data[i]['server_id'], + database_id=data[i]['database_id'], + schema_id=data[i]['schema_id'], + function_id=data[i]['function_id'], + arg_id=data[i]['arg_id'] + ).count() # handle the Array list sent from the client array_string = '' - if data[i]['value'].__class__.__name__ in ('list') and data[i]['value']: + if data[i]['value'].__class__.__name__ in ( + 'list') and data[i]['value']: for k in range(0, len(data[i]['value'])): array_string += data[i]['value'][k]['value'] if k != (len(data[i]['value']) - 1): array_string += ',' - elif data[i]['value'].__class__.__name__ in ('list') and not data[i]['value']: + elif data[i]['value'].__class__.__name__ in ( + 'list') and not data[i]['value']: array_string = '' else: array_string = data[i]['value'] - # Check if data is already available in database then update the existing value otherwise add the new value + # Check if data is already available in database then update the + # existing value otherwise add the new value if DbgFuncArgsExists: DbgFuncArgs = DebuggerFunctionArguments.query.filter_by( - server_id=data[i]['server_id'], database_id=data[i]['database_id'], schema_id=data[i]['schema_id'], - function_id=data[i]['function_id'], arg_id=data[i]['arg_id']).first() + server_id=data[i]['server_id'], + database_id=data[i]['database_id'], + schema_id=data[i]['schema_id'], + function_id=data[i]['function_id'], + arg_id=data[i]['arg_id'] + ).first() DbgFuncArgs.is_null = data[i]['is_null'] DbgFuncArgs.is_expression = data[i]['is_expression'] @@ -1466,8 +1728,8 @@ def convert_data_to_dict(conn, result): columns.append(column) # We need to convert result from 2D array to dict for BackGrid - # BackGrid do not support for 2D array result as it it Backbone Model based grid - # This Conversion is not an overhead as most of the time + # BackGrid do not support for 2D array result as it it Backbone Model + # based grid, This Conversion is not an overhead as most of the time # result will be smaller _tmp_result = [] for row in result: @@ -1492,7 +1754,8 @@ def poll_end_execution_result(trans_id): """ poll_end_execution_result(trans_id) - This method polls the end of execution result messages returned by the database server. + This method polls the end of execution result messages returned by the + database server. Parameters: trans_id @@ -1510,7 +1773,8 @@ def poll_end_execution_result(trans_id): ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) conn = manager.connection(did=obj['database_id'], conn_id=obj['conn_id']) if conn.connected(): @@ -1518,9 +1782,10 @@ def poll_end_execution_result(trans_id): if statusmsg and statusmsg == 'SELECT 1': statusmsg = '' status, result = conn.poll() + session_function_data = session['functionData'][str(trans_id)] if status == ASYNC_OK and \ - not session['functionData'][str(trans_id)]['is_func'] and \ - session['functionData'][str(trans_id)]['language'] == 'edbspl': + not session_function_data['is_func'] and \ + session_function_data['language'] == 'edbspl': status = 'Success' additional_msgs = conn.messages() if len(additional_msgs) > 0: @@ -1532,15 +1797,22 @@ def poll_end_execution_result(trans_id): statusmsg = additional_msgs return make_json_response( - success=1, info=gettext("Execution Completed."), - data={'status': status, 'status_message': statusmsg} + success=1, + info=gettext("Execution Completed."), + data={ + 'status': status, + 'status_message': statusmsg + } ) if result: if 'ERROR' in result: status = 'ERROR' return make_json_response( info=gettext("Execution completed with error"), - data={'status': status, 'status_message': result} + data={ + 'status': status, + 'status_message': result + } ) else: status = 'Success' @@ -1557,9 +1829,13 @@ def poll_end_execution_result(trans_id): columns, result = convert_data_to_dict(conn, result) return make_json_response( - success=1, info=gettext("Execution Completed."), - data={'status': status, 'result': result, - 'col_info': columns, 'status_message': statusmsg} + success=1, + info=gettext("Execution Completed."), + data={ + 'status': status, + 'result': result, + 'col_info': columns, + 'status_message': statusmsg} ) else: status = 'Busy' @@ -1571,9 +1847,13 @@ def poll_end_execution_result(trans_id): statusmsg = additional_msgs + "\n" + statusmsg else: statusmsg = additional_msgs - return make_json_response(data={ - 'status': status, 'result': result, 'status_message': statusmsg - }) + return make_json_response( + data={ + 'status': status, + 'result': result, + 'status_message': statusmsg + } + ) else: status = 'NotConnected' result = gettext('Not connected to server or connection with the ' @@ -1590,7 +1870,8 @@ def poll_result(trans_id): """ poll_result(trans_id) - This method polls the result of the asynchronous query and returns the result. + This method polls the result of the asynchronous query and returns the + result. Parameters: trans_id @@ -1600,13 +1881,18 @@ def poll_result(trans_id): debugger_data = session['debuggerData'] if str(trans_id) not in debugger_data: return make_json_response( - data={'status': 'NotConnected', - 'result': gettext('Not connected to server or connection with the server has been closed.')} + data={ + 'status': 'NotConnected', + 'result': gettext('Not connected to server or connection ' + 'with the server has been closed.') + } ) obj = debugger_data[str(trans_id)] - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) - conn = manager.connection(did=obj['database_id'], conn_id=obj['exe_conn_id']) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(obj['server_id']) + conn = manager.connection( + did=obj['database_id'], conn_id=obj['exe_conn_id']) if conn.connected(): status, result = conn.poll() @@ -1617,6 +1903,14 @@ def poll_result(trans_id): status = 'Busy' else: status = 'NotConnected' - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server ' + 'has been closed.' + ) - return make_json_response(data={'status': status, 'result': result}) + return make_json_response( + data={ + 'status': status, + 'result': result + } + ) diff --git a/web/pgadmin/tools/grant_wizard/__init__.py b/web/pgadmin/tools/grant_wizard/__init__.py index a9e091f..c092d49 100644 --- a/web/pgadmin/tools/grant_wizard/__init__.py +++ b/web/pgadmin/tools/grant_wizard/__init__.py @@ -24,7 +24,7 @@ from config import PG_DEFAULT_DRIVER try: from urllib import unquote -except: +except ImportError: from urllib.parse import unquote from pgadmin.utils.ajax import precondition_required from functools import wraps @@ -136,9 +136,11 @@ def check_precondition(f): server_info['server_type'] = server_info['manager'].server_type server_info['version'] = server_info['manager'].version if server_info['server_type'] == 'pg': - server_info['template_path'] = 'grant_wizard/pg/#{0}#'.format(server_info['version']) + server_info['template_path'] = 'grant_wizard/pg/#{0}#'.format( + server_info['version']) elif server_info['server_type'] == 'ppas': - server_info['template_path'] = 'grant_wizard/ppas/#{0}#'.format(server_info['version']) + server_info['template_path'] = 'grant_wizard/ppas/#{0}#'.format( + server_info['version']) return f(*args, **kwargs) @@ -240,8 +242,8 @@ def properties(sid, did, node_id, node_type): # Fetch procedures only if server type is ppas if (len(server_prop) > 0 and - server_prop['server_type'] == 'ppas' and - ntype in ['schema', 'procedure']): + server_prop['server_type'] == 'ppas' and + ntype in ['schema', 'procedure']): SQL = render_template("/".join( [server_prop['template_path'], '/sql/function.sql']), node_id=node_id, type='procedure') diff --git a/web/pgadmin/tools/import_export/__init__.py b/web/pgadmin/tools/import_export/__init__.py index 2928e9c..3f83fd4 100644 --- a/web/pgadmin/tools/import_export/__init__.py +++ b/web/pgadmin/tools/import_export/__init__.py @@ -88,7 +88,7 @@ class IEMessage(IProcessDesc): x = x.replace('"', '\\"') x = x.replace('""', '\\"') - return ' "' + x + '"' + return ' "' + x + '"' return '' replace_next = False @@ -219,7 +219,6 @@ def create_import_export_job(sid): if server is None: return bad_request(errormsg=_("Could not find the given server")) - # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) @@ -238,7 +237,8 @@ def create_import_export_job(sid): if 'filename' in data: try: - _file = filename_with_file_manager_path(data['filename'], data['is_import']) + _file = filename_with_file_manager_path( + data['filename'], data['is_import']) except Exception as e: return bad_request(errormsg=str(e)) @@ -306,6 +306,7 @@ def create_import_export_job(sid): cmd=utility, args=args ) manager.export_password_env(p.id) + def export_pg_env(env): env['PGHOST'] = server.host env['PGPORT'] = str(server.port) diff --git a/web/pgadmin/tools/maintenance/__init__.py b/web/pgadmin/tools/maintenance/__init__.py index 38924e3..8416a20 100644 --- a/web/pgadmin/tools/maintenance/__init__.py +++ b/web/pgadmin/tools/maintenance/__init__.py @@ -120,8 +120,8 @@ class Message(IProcessDesc): if self.data['op'] == "REINDEX": if 'schema' in self.data and self.data['schema']: if 'primary_key' in self.data or\ - 'unique_constraint' in self.data or\ - 'index' in self.data: + 'unique_constraint' in self.data or\ + 'index' in self.data: return _('REINDEX INDEX') else: return _('REINDEX TABLE') @@ -248,5 +248,6 @@ def create_maintenance_job(sid, did): # Return response return make_json_response( - data={'job_id': jid, 'status': True, 'info': 'Maintenance job created.'} + data={'job_id': jid, 'status': True, + 'info': 'Maintenance job created.'} ) diff --git a/web/pgadmin/tools/restore/__init__.py b/web/pgadmin/tools/restore/__init__.py index 0f95b64..6afa7b4 100644 --- a/web/pgadmin/tools/restore/__init__.py +++ b/web/pgadmin/tools/restore/__init__.py @@ -58,6 +58,7 @@ class RestoreModule(PgAdminModule): """ return ['restore.create_job'] + # Create blueprint for RestoreModule class blueprint = RestoreModule( MODULE_NAME, __name__, static_url_path='' @@ -76,7 +77,7 @@ class RestoreMessage(IProcessDesc): x = x.replace('\\', '\\\\') x = x.replace('"', '\\"') x = x.replace('""', '\\"') - return ' "' + x + '"' + return ' "' + x + '"' return '' for arg in _args: @@ -85,7 +86,6 @@ class RestoreMessage(IProcessDesc): else: self.cmd += cmdArg(arg) - @property def message(self): # Fetch the server details like hostname, port, roles etc @@ -324,7 +324,7 @@ def create_restore_job(sid): data['file'].encode('utf-8') if hasattr( data['file'], 'encode' ) else data['file'], - *args + *args ), cmd=utility, args=args ) diff --git a/web/pgadmin/tools/sqleditor/__init__.py b/web/pgadmin/tools/sqleditor/__init__.py index f967a15..635219d 100644 --- a/web/pgadmin/tools/sqleditor/__init__.py +++ b/web/pgadmin/tools/sqleditor/__init__.py @@ -59,6 +59,7 @@ CONNECTION_STATUS_MESSAGE_MAPPING = dict({ 4: 'The connection with the server is bad.' }) + class SqlEditorModule(PgAdminModule): """ class SqlEditorModule(PgAdminModule) @@ -85,7 +86,6 @@ class SqlEditorModule(PgAdminModule): 'when': None }] - def get_panels(self): return [] @@ -128,10 +128,10 @@ class SqlEditorModule(PgAdminModule): max_val=999999, help_str=gettext( 'The length of time to display the query info notifier after ' - 'execution has completed. A value of -1 disables the notifier ' - 'and a value of 0 displays it until clicked. Values greater ' - 'than 0 display the notifier for the number of seconds ' - 'specified.' + 'execution has completed. A value of -1 disables the notifier' + ' and a value of 0 displays it until clicked. Values greater' + ' than 0 display the notifier for the number of seconds' + ' specified.' ) ) @@ -356,10 +356,13 @@ class SqlEditorModule(PgAdminModule): gettext("Connection status refresh rate"), 'integer', 2, min_val=1, max_val=600, category_label=gettext('Display'), - help_str=gettext('The number of seconds between connection/transaction ' - 'status polls.') + help_str=gettext( + 'The number of seconds between connection/transaction ' + 'status polls.' + ) ) + blueprint = SqlEditorModule(MODULE_NAME, __name__, static_url_path='/static') @@ -392,7 +395,7 @@ def check_transaction_status(trans_id): """ if 'gridData' not in session: return False, unauthorized(gettext("Unauthorized request.")), \ - None, None, None + None, None, None grid_data = session['gridData'] @@ -408,23 +411,28 @@ def check_transaction_status(trans_id): trans_obj = pickle.loads(session_obj['command_obj']) try: - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) - conn = manager.connection(did=trans_obj.did, conn_id=trans_obj.conn_id, - use_binary_placeholder=True, - array_to_string=True) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) + conn = manager.connection( + did=trans_obj.did, + conn_id=trans_obj.conn_id, + use_binary_placeholder=True, + array_to_string=True + ) except Exception as e: return False, internal_server_error(errormsg=str(e)), None, None, None if conn.connected(): return True, None, conn, trans_obj, session_obj else: - return False, gettext('Not connected to server or connection with the server has been closed.'), \ - None, trans_obj, session_obj + return False, gettext('Not connected to server or connection with ' + 'the server has been closed.'), \ + None, trans_obj, session_obj @blueprint.route( '/view_data/start/', - methods=["GET"], endpoint='view_data_start' + methods=["GET"], endpoint='view_data_start' ) @login_required def start_view_data(trans_id): @@ -437,7 +445,8 @@ def start_view_data(trans_id): limit = -1 # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) # get the default connection as current connection which is attached to # trans id holds the cursor which has query result so we cannot use that @@ -568,7 +577,8 @@ def start_query_tool(trans_id): conn_id = str(random.randint(1, 9999999)) try: - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) conn = manager.connection(did=trans_obj.did, conn_id=conn_id, use_binary_placeholder=True, array_to_string=True) @@ -611,14 +621,17 @@ def start_query_tool(trans_id): conn.execute_void("ROLLBACK;") else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server has ' + 'been closed.') can_edit = trans_obj.can_edit() can_filter = trans_obj.can_filter() else: status = False - result = gettext('Either transaction object or session object not found.') + result = gettext( + 'Either transaction object or session object not found.') return make_json_response( data={ @@ -644,10 +657,12 @@ def preferences(trans_id): if request.method == 'GET': # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: - # Call the set_auto_commit and set_auto_rollback method of transaction object + # Call the set_auto_commit and set_auto_rollback method of + # transaction object trans_obj.set_auto_commit(blueprint.auto_commit.get()) trans_obj.set_auto_rollback(blueprint.auto_rollback.get()) @@ -690,7 +705,8 @@ def preferences(trans_id): @login_required def poll(trans_id): """ - This method polls the result of the asynchronous query and returns the result. + This method polls the result of the asynchronous query and returns + the result. Args: trans_id: unique transaction id @@ -710,9 +726,11 @@ def poll(trans_id): oids = None # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None and session_obj is not None: - status, result = conn.poll(formatted_exception_msg=True, no_result=True) + status, result = conn.poll( + formatted_exception_msg=True, no_result=True) if not status: return internal_server_error(result) elif status == ASYNC_OK: @@ -821,10 +839,12 @@ def poll(trans_id): if res_len > 0: rows_fetched_from = trans_obj.get_fetched_row_cnt() - trans_obj.update_fetched_row_cnt(rows_fetched_from + res_len) + trans_obj.update_fetched_row_cnt( + rows_fetched_from + res_len) rows_fetched_from += 1 rows_fetched_to = trans_obj.get_fetched_row_cnt() - session_obj['command_obj'] = pickle.dumps(trans_obj, -1) + session_obj['command_obj'] = pickle.dumps( + trans_obj, -1) # As we changed the transaction object we need to # restore it and update the session variable. @@ -856,7 +876,7 @@ def poll(trans_id): if status == 'Success' and result is None: result = conn.status_message() if (result != 'SELECT 1' or result != 'SELECT 0') \ - and result is not None and additional_messages: + and result is not None and additional_messages: result = additional_messages + result return make_json_response( @@ -877,8 +897,13 @@ def poll(trans_id): ) -@blueprint.route('/fetch/', methods=["GET"], endpoint='fetch') -@blueprint.route('/fetch//', methods=["GET"], endpoint='fetch_all') +@blueprint.route( + '/fetch/', methods=["GET"], endpoint='fetch' +) +@blueprint.route( + '/fetch//', methods=["GET"], + endpoint='fetch_all' +) @login_required def fetch(trans_id, fetch_all=None): result = None @@ -888,7 +913,8 @@ def fetch(trans_id, fetch_all=None): fetch_row_cnt = -1 if fetch_all == 1 else ON_DEMAND_RECORD_COUNT # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None and session_obj is not None: status, result = conn.async_fetchmany_2darray(fetch_row_cnt) if not status: @@ -947,8 +973,9 @@ def fetch_pg_types(columns_info, trans_obj): if oids: status, res = default_conn.execute_dict( - u"""SELECT oid, format_type(oid,null) as typname FROM pg_type WHERE oid IN %s ORDER BY oid; -""", [tuple(oids)]) + u"SELECT oid, format_type(oid,null) as typname FROM pg_type " + u"WHERE oid IN %s ORDER BY oid;", [tuple(oids)] + ) if not status: return False, res @@ -1001,20 +1028,24 @@ def save(trans_id): changed_data = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: # If there is no primary key found then return from the function. - if (len(session_obj['primary_keys']) <= 0 or len(changed_data) <= 0) and 'has_oids' not in session_obj: + if (len(session_obj['primary_keys']) <= 0 or len(changed_data) <= 0) \ + and 'has_oids' not in session_obj: return make_json_response( data={ 'status': False, - 'result': gettext('No primary key found for this object, so unable to save records.') + 'result': gettext('No primary key found for this object, ' + 'so unable to save records.') } ) - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) default_conn = manager.connection(did=trans_obj.did) # Connect to the Server if not connected. @@ -1059,7 +1090,8 @@ def get_filter(trans_id): """ # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1089,7 +1121,8 @@ def apply_filter(trans_id): filter_sql = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1124,7 +1157,8 @@ def append_filter_inclusive(trans_id): filter_data = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1137,7 +1171,9 @@ def append_filter_inclusive(trans_id): if column_value is None: filter_sql = driver.qtIdent(conn, column_name) + ' IS NULL ' else: - filter_sql = driver.qtIdent(conn, column_name) + ' = ' + driver.qtLiteral(column_value) + filter_sql = driver.qtIdent( + conn, column_name + ) + ' = ' + driver.qtLiteral(column_value) trans_obj.append_filter(filter_sql) @@ -1170,7 +1206,8 @@ def append_filter_exclusive(trans_id): filter_data = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1181,9 +1218,12 @@ def append_filter_exclusive(trans_id): for column_name in filter_data: column_value = filter_data[column_name] if column_value is None: - filter_sql = driver.qtIdent(conn, column_name) + ' IS NOT NULL ' + filter_sql = driver.qtIdent( + conn, column_name) + ' IS NOT NULL ' else: - filter_sql = driver.qtIdent(conn, column_name) + ' IS DISTINCT FROM ' + driver.qtLiteral(column_value) + filter_sql = driver.qtIdent( + conn, column_name + ) + ' IS DISTINCT FROM ' + driver.qtLiteral(column_value) # Call the append_filter method of transaction object trans_obj.append_filter(filter_sql) @@ -1213,7 +1253,8 @@ def remove_filter(trans_id): """ # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1250,7 +1291,8 @@ def set_limit(trans_id): limit = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1289,7 +1331,8 @@ def cancel_transaction(trans_id): if str(trans_id) not in grid_data: return make_json_response( data={ - 'status': False, 'result': gettext('Transaction ID not found in the session.') + 'status': False, + 'result': gettext('Transaction ID not found in the session.') } ) @@ -1302,7 +1345,8 @@ def cancel_transaction(trans_id): # Fetch the main connection object for the database. try: - manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) + manager = get_driver( + PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) conn = manager.connection(did=trans_obj.did) except Exception as e: return internal_server_error(errormsg=str(e)) @@ -1318,7 +1362,8 @@ def cancel_transaction(trans_id): if conn.connected(): # on successful connection cancel the running transaction - status, result = conn.cancel_transaction(trans_obj.conn_id, trans_obj.did) + status, result = conn.cancel_transaction( + trans_obj.conn_id, trans_obj.did) # Delete connection if we have created it to # cancel the transaction @@ -1326,10 +1371,14 @@ def cancel_transaction(trans_id): manager.release(did=trans_obj.did) else: status = False - result = gettext('Not connected to server or connection with the server has been closed.') + result = gettext( + 'Not connected to server or connection with the server has ' + 'been closed.' + ) else: status = False - result = gettext('Either transaction object or session object not found.') + result = gettext( + 'Either transaction object or session object not found.') return make_json_response( data={ @@ -1352,10 +1401,10 @@ def get_object_name(trans_id): """ # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: - res = trans_obj.object_name else: status = False @@ -1382,7 +1431,8 @@ def set_auto_commit(trans_id): auto_commit = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1423,7 +1473,8 @@ def set_auto_rollback(trans_id): auto_rollback = request.args or request.form # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: @@ -1471,12 +1522,14 @@ def auto_complete(trans_id): text_before_cursor = data[1] # Check the transaction and connection status - status, error_msg, conn, trans_obj, session_obj = check_transaction_status(trans_id) + status, error_msg, conn, trans_obj, session_obj = \ + check_transaction_status(trans_id) if status and conn is not None \ and trans_obj is not None and session_obj is not None: # Create object of SQLAutoComplete class and pass connection object - auto_complete_obj = SQLAutoComplete(sid=trans_obj.sid, did=trans_obj.did, conn=conn) + auto_complete_obj = SQLAutoComplete( + sid=trans_obj.sid, did=trans_obj.did, conn=conn) # Get the auto completion suggestions. res = auto_complete_obj.get_completions(full_sql, text_before_cursor) @@ -1491,13 +1544,16 @@ def auto_complete(trans_id): @login_required def script(): """render the required javascript""" - return Response(response=render_template("sqleditor/js/sqleditor.js", - tab_size=blueprint.tab_size.get(), - use_spaces=blueprint.use_spaces.get(), - _=gettext), - status=200, - mimetype="application/javascript" - ) + return Response( + response=render_template( + "sqleditor/js/sqleditor.js", + tab_size=blueprint.tab_size.get(), + use_spaces=blueprint.use_spaces.get(), + _=gettext + ), + status=200, + mimetype="application/javascript" + ) def is_begin_required(query): @@ -1702,7 +1758,8 @@ def load_file(): def gen(): with codecs.open(file_path, 'r', encoding=enc) as fileObj: while True: - data = fileObj.read(4194304) # 4MB chunk (4 * 1024 * 1024 Bytes) + # 4MB chunk (4 * 1024 * 1024 Bytes) + data = fileObj.read(4194304) if not data: break yield data @@ -1815,9 +1872,14 @@ def start_query_download_tool(trans_id): r.call_on_close(cleanup) return r - r = Response(gen(quote=blueprint.csv_quoting.get(), - quote_char=blueprint.csv_quote_char.get(), - field_separator=blueprint.csv_field_separator.get()), mimetype='text/csv') + r = Response( + gen( + quote=blueprint.csv_quoting.get(), + quote_char=blueprint.csv_quote_char.get(), + field_separator=blueprint.csv_field_separator.get() + ), + mimetype='text/csv' + ) if 'filename' in data and data['filename'] != "": filename = data['filename'] @@ -1851,6 +1913,7 @@ def start_query_download_tool(trans_id): errormsg=gettext("Transaction status check failed.") ) + @blueprint.route( '/status/', methods=["GET"], diff --git a/web/pgadmin/tools/sqleditor/command.py b/web/pgadmin/tools/sqleditor/command.py index 97f13ea..e480ab7 100644 --- a/web/pgadmin/tools/sqleditor/command.py +++ b/web/pgadmin/tools/sqleditor/command.py @@ -67,7 +67,8 @@ class ObjectRegistry(ABCMeta): return (ObjectRegistry.registry[name])(**kwargs) raise NotImplementedError( - gettext("This feature has not been implemented for object type '{0}'.").format(name) + gettext("This feature has not been implemented for object " + "type '{0}'.").format(name) ) @@ -157,7 +158,8 @@ class SQLFilter(object): self.sid = kwargs['sid'] self.did = kwargs['did'] self.obj_id = kwargs['obj_id'] - self.__row_filter = kwargs['sql_filter'] if 'sql_filter' in kwargs else None + self.__row_filter = kwargs['sql_filter'] if 'sql_filter' in kwargs \ + else None manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid) conn = manager.connection(did=self.did) @@ -167,7 +169,10 @@ class SQLFilter(object): if conn.connected(): # Fetch the Namespace Name and object Name - query = render_template("/".join([self.sql_path, 'objectname.sql']), obj_id=self.obj_id) + query = render_template( + "/".join([self.sql_path, 'objectname.sql']), + obj_id=self.obj_id + ) status, result = conn.execute_dict(query) if not status: @@ -176,7 +181,10 @@ class SQLFilter(object): self.nsp_name = result['rows'][0]['nspname'] self.object_name = result['rows'][0]['relname'] else: - raise Exception(gettext('Not connected to server or connection with the server has been closed.')) + raise Exception(gettext( + 'Not connected to server or connection with the server ' + 'has been closed.') + ) def get_filter(self): """ @@ -248,8 +256,10 @@ class SQLFilter(object): conn = manager.connection(did=self.did) if conn.connected(): - sql = render_template("/".join([self.sql_path, 'validate.sql']), - nsp_name=self.nsp_name, object_name=self.object_name, row_filter=row_filter) + sql = render_template( + "/".join([self.sql_path, 'validate.sql']), + nsp_name=self.nsp_name, object_name=self.object_name, + row_filter=row_filter) status, result = conn.execute_scalar(sql) if not status: @@ -262,6 +272,7 @@ class FetchedRowTracker(object): """ Keeps track of fetched row count. """ + def __init__(self, **kwargs): self.fetched_rows = 0 @@ -285,7 +296,8 @@ class GridCommand(BaseCommand, SQLFilter, FetchedRowTracker): - Derived class can implement there own logic to get the primary keys. * save() - - Derived class can implement there own logic to save the data into the database. + - Derived class can implement there own logic to save the data into the + database. * set_limit(limit) - This method sets the limit for SQL query @@ -311,14 +323,17 @@ class GridCommand(BaseCommand, SQLFilter, FetchedRowTracker): self.cmd_type = kwargs['cmd_type'] if 'cmd_type' in kwargs else None self.limit = -1 - if self.cmd_type == VIEW_FIRST_100_ROWS or self.cmd_type == VIEW_LAST_100_ROWS: + if self.cmd_type == VIEW_FIRST_100_ROWS or \ + self.cmd_type == VIEW_LAST_100_ROWS: self.limit = 100 def get_primary_keys(self, *args, **kwargs): return None, None def save(self, changed_data, default_conn=None): - return forbidden(errmsg=gettext("Data cannot be saved for the current object.")) + return forbidden( + errmsg=gettext("Data cannot be saved for the current object.") + ) def get_limit(self): """ @@ -370,14 +385,22 @@ class TableCommand(GridCommand): sql_filter = self.get_filter() if sql_filter is None: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, pk_names=pk_names, cmd_type=self.cmd_type, - limit=self.limit, primary_keys=primary_keys, has_oids=has_oids) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, pk_names=pk_names, + cmd_type=self.cmd_type, limit=self.limit, + primary_keys=primary_keys, has_oids=has_oids + ) else: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, pk_names=pk_names, cmd_type=self.cmd_type, - sql_filter=sql_filter, limit=self.limit, primary_keys=primary_keys, - has_oids=has_oids) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, pk_names=pk_names, + cmd_type=self.cmd_type, sql_filter=sql_filter, + limit=self.limit, primary_keys=primary_keys, + has_oids=has_oids + ) return sql @@ -398,7 +421,10 @@ class TableCommand(GridCommand): if conn.connected(): # Fetch the primary key column names - query = render_template("/".join([self.sql_path, 'primary_keys.sql']), obj_id=self.obj_id) + query = render_template( + "/".join([self.sql_path, 'primary_keys.sql']), + obj_id=self.obj_id + ) status, result = conn.execute_dict(query) if not status: @@ -412,7 +438,10 @@ class TableCommand(GridCommand): # Remove last character from the string pk_names = pk_names[:-1] else: - raise Exception(gettext('Not connected to server or connection with the server has been closed.')) + raise Exception( + gettext('Not connected to server or connection with the ' + 'server has been closed.') + ) return pk_names, primary_keys @@ -436,14 +465,18 @@ class TableCommand(GridCommand): if conn.connected(): # Fetch the table oids status - query = render_template("/".join([self.sql_path, 'has_oids.sql']), obj_id=self.obj_id) + query = render_template( + "/".join([self.sql_path, 'has_oids.sql']), obj_id=self.obj_id) status, has_oids = conn.execute_scalar(query) if not status: raise Exception(has_oids) else: - raise Exception(gettext('Not connected to server or connection with the server has been closed.')) + raise Exception( + gettext('Not connected to server or connection with the ' + 'server has been closed.') + ) return has_oids @@ -493,7 +526,6 @@ class TableCommand(GridCommand): if len(changed_data[of_type]) < 1: continue - column_type = {} column_data = {} for each_col in columns_info: @@ -512,11 +544,16 @@ class TableCommand(GridCommand): # For newly added rows if of_type == 'added': # Python dict does not honour the inserted item order - # So to insert data in the order, we need to make ordered list of added index - # We don't need this mechanism in updated/deleted rows as - # it does not matter in those operations - added_index = OrderedDict(sorted(changed_data['added_index'].items(), - key=lambda x: int(x[0]))) + # So to insert data in the order, we need to make ordered + # list of added index We don't need this mechanism in + # updated/deleted rows as it does not matter in + # those operations + added_index = OrderedDict( + sorted( + changed_data['added_index'].items(), + key=lambda x: int(x[0]) + ) + ) list_of_sql[of_type] = [] # When new rows are added, only changed columns data is @@ -528,7 +565,8 @@ class TableCommand(GridCommand): has_oids = 'oid' in column_type for each_row in added_index: - # Get the row index to match with the added rows dict key + # Get the row index to match with the added rows + # dict key tmp_row_index = added_index[each_row] data = changed_data[of_type][tmp_row_index]['data'] # Remove our unique tracking key @@ -540,22 +578,31 @@ class TableCommand(GridCommand): # not_null=False and has no default value column_data.update(data) - sql = render_template("/".join([self.sql_path, 'insert.sql']), - data_to_be_saved=column_data, - primary_keys=None, - object_name=self.object_name, - nsp_name=self.nsp_name, - data_type=column_type, - pk_names=pk_names, - has_oids=has_oids) - select_sql = render_template("/".join([self.sql_path, 'select.sql']), - object_name=self.object_name, - nsp_name=self.nsp_name, - pk_names=pk_names.split(",") if pk_names else None, - has_oids=has_oids) - list_of_sql[of_type].append({'sql': sql, 'data': data, - 'client_row': tmp_row_index, - 'select_sql': select_sql}) + sql = render_template( + "/".join([self.sql_path, 'insert.sql']), + data_to_be_saved=column_data, + primary_keys=None, + object_name=self.object_name, + nsp_name=self.nsp_name, + data_type=column_type, + pk_names=pk_names, + has_oids=has_oids + ) + + select_sql = render_template( + "/".join([self.sql_path, 'select.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, + pk_names=pk_names.split(",") if pk_names + else None, + has_oids=has_oids + ) + + list_of_sql[of_type].append({ + 'sql': sql, 'data': data, + 'client_row': tmp_row_index, + 'select_sql': select_sql + }) # Reset column data column_data = {} @@ -565,12 +612,14 @@ class TableCommand(GridCommand): for each_row in changed_data[of_type]: data = changed_data[of_type][each_row]['data'] pk = changed_data[of_type][each_row]['primary_keys'] - sql = render_template("/".join([self.sql_path, 'update.sql']), - data_to_be_saved=data, - primary_keys=pk, - object_name=self.object_name, - nsp_name=self.nsp_name, - data_type=column_type) + sql = render_template( + "/".join([self.sql_path, 'update.sql']), + data_to_be_saved=data, + primary_keys=pk, + object_name=self.object_name, + nsp_name=self.nsp_name, + data_type=column_type + ) list_of_sql[of_type].append({'sql': sql, 'data': data}) list_of_rowid.append(data.get(client_primary_key)) @@ -589,8 +638,9 @@ class TableCommand(GridCommand): # Python3 # In Python2, it's already a list & We will also # fetch column names using index - keys = list(changed_data[of_type][each_row].keys()) - + keys = list( + changed_data[of_type][each_row].keys() + ) no_of_keys = len(keys) is_first = False # Map index with column name for each row @@ -599,17 +649,20 @@ class TableCommand(GridCommand): # Set primary key with label & delete index based # mapped key try: - row[changed_data['columns'][int(k)]['name']] = v + row[changed_data['columns'] + [int(k)]['name']] = v except ValueError: continue del row[k] - sql = render_template("/".join([self.sql_path, 'delete.sql']), - data=rows_to_delete, - primary_key_labels=keys, - no_of_keys=no_of_keys, - object_name=self.object_name, - nsp_name=self.nsp_name) + sql = render_template( + "/".join([self.sql_path, 'delete.sql']), + data=rows_to_delete, + primary_key_labels=keys, + no_of_keys=no_of_keys, + object_name=self.object_name, + nsp_name=self.nsp_name + ) list_of_sql[of_type].append({'sql': sql, 'data': {}}) for opr, sqls in list_of_sql.items(): @@ -627,15 +680,19 @@ class TableCommand(GridCommand): if not status: conn.execute_void('ROLLBACK;') - # If we roll backed every thing then update the message for - # each sql query. + # If we roll backed every thing then update the + # message for each sql query. for val in query_res: if query_res[val]['status']: - query_res[val]['result'] = 'Transaction ROLLBACK' + query_res[val]['result'] = \ + 'Transaction ROLLBACK' # If list is empty set rowid to 1 try: - _rowid = list_of_rowid[count] if list_of_rowid else 1 + if list_of_rowid: + _rowid = list_of_rowid[count] + else: + _rowid = 1 except Exception: _rowid = 0 @@ -648,29 +705,37 @@ class TableCommand(GridCommand): if not status: conn.execute_void('ROLLBACK;') - # If we roll backed every thing then update the message for - # each sql query. + # If we roll backed every thing then update + # the message for each sql query. for val in query_res: if query_res[val]['status']: - query_res[val]['result'] = 'Transaction ROLLBACK' + query_res[val]['result'] = \ + 'Transaction ROLLBACK' # If list is empty set rowid to 1 try: - _rowid = list_of_rowid[count] if list_of_rowid else 1 + if list_of_rowid: + _rowid = list_of_rowid[count] + else: + _rowid = 1 except Exception: _rowid = 0 return status, sel_res, query_res, _rowid if 'rows' in sel_res and len(sel_res['rows']) > 0: - row_added = {item['client_row']: sel_res['rows'][0]} + row_added = { + item['client_row']: sel_res['rows'][0]} rows_affected = conn.rows_affected() # store the result of each query in dictionary - query_res[count] = {'status': status, 'result': None if row_added else res, - 'sql': sql, 'rows_affected': rows_affected, - 'row_added': row_added} + query_res[count] = { + 'status': status, + 'result': None if row_added else res, + 'sql': sql, 'rows_affected': rows_affected, + 'row_added': row_added + } count += 1 @@ -708,13 +773,19 @@ class ViewCommand(GridCommand): sql_filter = self.get_filter() if sql_filter is None: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, cmd_type=self.cmd_type, - limit=self.limit) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, cmd_type=self.cmd_type, + limit=self.limit + ) else: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, cmd_type=self.cmd_type, - sql_filter=sql_filter, limit=self.limit) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, cmd_type=self.cmd_type, + sql_filter=sql_filter, limit=self.limit + ) return sql @@ -763,13 +834,19 @@ class ForeignTableCommand(GridCommand): sql_filter = self.get_filter() if sql_filter is None: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, cmd_type=self.cmd_type, - limit=self.limit) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, cmd_type=self.cmd_type, + limit=self.limit + ) else: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, cmd_type=self.cmd_type, - sql_filter=sql_filter, limit=self.limit) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, cmd_type=self.cmd_type, + sql_filter=sql_filter, limit=self.limit + ) return sql @@ -808,13 +885,19 @@ class CatalogCommand(GridCommand): sql_filter = self.get_filter() if sql_filter is None: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, cmd_type=self.cmd_type, - limit=self.limit) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, cmd_type=self.cmd_type, + limit=self.limit + ) else: - sql = render_template("/".join([self.sql_path, 'objectquery.sql']), object_name=self.object_name, - nsp_name=self.nsp_name, cmd_type=self.cmd_type, - sql_filter=sql_filter, limit=self.limit) + sql = render_template( + "/".join([self.sql_path, 'objectquery.sql']), + object_name=self.object_name, + nsp_name=self.nsp_name, cmd_type=self.cmd_type, + sql_filter=sql_filter, limit=self.limit + ) return sql diff --git a/web/pgadmin/tools/user_management/__init__.py b/web/pgadmin/tools/user_management/__init__.py index 3eb96f5..040b34d 100644 --- a/web/pgadmin/tools/user_management/__init__.py +++ b/web/pgadmin/tools/user_management/__init__.py @@ -51,7 +51,7 @@ class UserManagementModule(PgAdminModule): 'name': 'pgadmin.tools.user_management', 'path': url_for('user_management.index') + 'user_management', 'when': None - },{ + }, { 'name': 'pgadmin.user_management.current_user', 'path': url_for('user_management.index') + 'current_user', 'when': None, @@ -85,12 +85,14 @@ blueprint = UserManagementModule( def validate_user(data): new_data = dict() - email_filter = re.compile("^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9]" - "(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9]" - "(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$") + email_filter = re.compile( + "^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9]" + "(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9]" + "(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$" + ) if ('newPassword' in data and data['newPassword'] != "" and - 'confirmPassword' in data and data['confirmPassword'] != ""): + 'confirmPassword' in data and data['confirmPassword'] != ""): if data['newPassword'] == data['confirmPassword']: new_data['password'] = encrypt_password(data['newPassword']) @@ -132,20 +134,23 @@ def script(): mimetype="application/javascript" ) + @blueprint.route("/current_user.js") @login_required def current_user_info(): return Response( response=render_template( "user_management/js/current_user.js", - is_admin='true' if current_user.has_role("Administrator") else 'false', + is_admin='true' if current_user.has_role( + "Administrator") else 'false', user_id=current_user.id, email=current_user.email, name=( current_user.email.split('@')[0] if config.SERVER_MODE is True else 'postgres' ), - allow_save_password='true' if config.ALLOW_SAVE_PASSWORD else 'false' + allow_save_password='true' if config.ALLOW_SAVE_PASSWORD + else 'false' ), status=200, mimetype="application/javascript" @@ -247,7 +252,9 @@ def create(): ) -@blueprint.route('/user/', methods=['DELETE'], endpoint='delete_user') +@blueprint.route( + '/user/', methods=['DELETE'], endpoint='delete_user' +) @roles_required('Administrator') def delete(uid): """ diff --git a/web/setup.py b/web/setup.py index b1ae423..a1613de 100644 --- a/web/setup.py +++ b/web/setup.py @@ -41,8 +41,9 @@ if __name__ == '__main__': config.SETTINGS_SCHEMA_VERSION = SCHEMA_VERSION if "PGADMIN_TESTING_MODE" in os. environ and \ - os.environ["PGADMIN_TESTING_MODE"] == "1": + os.environ["PGADMIN_TESTING_MODE"] == "1": config.SQLITE_PATH = config.TEST_SQLITE_PATH + create_app_data_directory(config) app = create_app()