Add all missing options to the Import/Export Data functionality, and update the syntax of the COPY command to align with the latest standards. #8583
Add support for exporting table data based on a custom query. #8681pull/8710/head
|
|
@ -0,0 +1,91 @@
|
|||
.. _export_data_using_query:
|
||||
|
||||
***************************************
|
||||
`Export Data Using Query Dialog`:index:
|
||||
***************************************
|
||||
|
||||
Use the *Export Data Using Query* dialog to copy data to a file.
|
||||
|
||||
The *Export Data Using Query* dialog organizes the export of data through the
|
||||
*General*, *Options* and *Query* tabs.
|
||||
|
||||
.. image:: images/export_query_general.png
|
||||
:alt: Export data using query dialog general tab
|
||||
:align: center
|
||||
|
||||
Use the fields in the *General* tab to specify export preferences:
|
||||
|
||||
* Enter the name of the target file in the *Filename* field.
|
||||
Optionally, select the *Browse* icon (ellipsis) to the right to navigate
|
||||
into a directory and select a file.
|
||||
|
||||
* Use the drop-down listbox in the *Format* field to specify the file type.
|
||||
Select:
|
||||
|
||||
* *binary* for a .bin file.
|
||||
* *csv* for a .csv file.
|
||||
* *text* for a .txt file.
|
||||
|
||||
* Use the drop-down listbox in the *Encoding* field to specify the type of
|
||||
character encoding.
|
||||
|
||||
.. image:: images/export_query_options.png
|
||||
:alt: Export data using query dialog options tab
|
||||
:align: center
|
||||
|
||||
* Use the fields in the *Options* tab to specify additional information:
|
||||
|
||||
* Move the *Header* switch to the *Yes* position to include the table header
|
||||
with the data rows. If you include the table header, the first row of the
|
||||
file will contain the column names.
|
||||
* If you are exporting data, specify the delimiter that will separate the
|
||||
columns within the target file in the *Delimiter* field. The separating
|
||||
character can be a colon, semicolon, a vertical bar, or a tab.
|
||||
* Specify a quoting character used in the *Quote* field. Quoting can be
|
||||
applied to string columns only (i.e. numeric columns will not be quoted)
|
||||
or all columns regardless of data type. The character used for quoting can
|
||||
be a single quote or a double quote.
|
||||
* Specify a character that should appear before a data character that matches
|
||||
the *QUOTE* value in the *Escape* field.
|
||||
* Use the *NULL String* field to specify a string that will represent a null
|
||||
value within the source or target file.
|
||||
|
||||
Click the *Query* tab to continue.
|
||||
|
||||
.. image:: images/export_query.png
|
||||
:alt: Export data using query dialog query tab
|
||||
:align: center
|
||||
|
||||
Use the fields in the *Query* tab to write the query that will be exported:
|
||||
|
||||
* Use the *Export Data Query* field to specifies A SELECT, VALUES, INSERT, UPDATE,
|
||||
DELETE, or MERGE command whose results are to be copied .
|
||||
|
||||
* Use *Force Quote columns* field to forces quoting to be used for all non-NULL
|
||||
values in each specified column. NULL output is never quoted. This is a creatable
|
||||
select control. If you would like to quote all columns then provide only '*' in
|
||||
the field.
|
||||
|
||||
After completing the *Export Data Using Query* dialog, click the *OK* button to
|
||||
perform the export. pgAdmin will notify you when the background
|
||||
process completes:
|
||||
|
||||
.. image:: images/export_query_complete.png
|
||||
:alt: Export data using query completion notification
|
||||
:align: center
|
||||
|
||||
|
||||
Use the *View Processes* button on the notification to open the *Process
|
||||
Watcher* and review detailed information about the execution of the command
|
||||
that performed the export:
|
||||
|
||||
Use the **End Process** button to end the Export process.
|
||||
|
||||
.. image:: images/export_query_pw.png
|
||||
:alt: Export data using query process watcher
|
||||
:align: center
|
||||
|
||||
.. note:: If you are running *pgAdmin* in *Server Mode* you can click on the |sm_icon| icon in the process watcher window to open the file location in the Storage Manager. You can use the :ref:`Storage Manager <storage_manager>` to download the exported file on the client machine .
|
||||
|
||||
|
||||
.. |sm_icon| image:: images/sm_icon.png
|
||||
|
After Width: | Height: | Size: 122 KiB |
|
After Width: | Height: | Size: 65 KiB |
|
After Width: | Height: | Size: 56 KiB |
|
After Width: | Height: | Size: 237 KiB |
|
After Width: | Height: | Size: 108 KiB |
|
Before Width: | Height: | Size: 93 KiB After Width: | Height: | Size: 254 KiB |
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 85 KiB |
|
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 188 KiB |
|
Before Width: | Height: | Size: 182 KiB After Width: | Height: | Size: 320 KiB |
|
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 123 KiB |
|
|
@ -33,18 +33,29 @@ Use the fields in the *General* tab to specify import and export preferences:
|
|||
* Use the drop-down listbox in the *Encoding* field to specify the type of
|
||||
character encoding.
|
||||
|
||||
* use the drop-down listbox in the *On Error* field to specify how to behave
|
||||
when encountering an error converting a columns input value into its data type.
|
||||
An error_action value of stop means fail the command, while ignore means discard
|
||||
the input row and continue with the next one. The default is stop. This option is
|
||||
available from PG/EPAS version 17 and above.
|
||||
|
||||
* use the drop-down listbox in the *Log Verbosity* field to specify the amount
|
||||
of messages emitted by a COPY command: default or verbose. This is currently
|
||||
used in Import only when ON_ERROR option is set to ignore. This option is
|
||||
available from PG/EPAS version 17 and above.
|
||||
|
||||
.. image:: images/import_export_options.png
|
||||
:alt: Import Export data dialog options tab
|
||||
:align: center
|
||||
|
||||
* Use the fields in the *Options* tab to specify additional information:
|
||||
|
||||
* Move the *OID* switch to the *Yes* position to include the *OID* column.
|
||||
The *OID* is a system-assigned value that may not be modified. The default
|
||||
is *No*.
|
||||
* Move the *Header* switch to the *Yes* position to include the table header
|
||||
with the data rows. If you include the table header, the first row of the
|
||||
file will contain the column names.
|
||||
* Move the *Freeze* switch to the *Yes* position to requests copying the
|
||||
data with rows already frozen, just as they would be after running the
|
||||
VACUUM FREEZE command.
|
||||
* If you are exporting data, specify the delimiter that will separate the
|
||||
columns within the target file in the *Delimiter* field. The separating
|
||||
character can be a colon, semicolon, a vertical bar, or a tab.
|
||||
|
|
@ -54,8 +65,11 @@ Use the fields in the *General* tab to specify import and export preferences:
|
|||
be a single quote or a double quote.
|
||||
* Specify a character that should appear before a data character that matches
|
||||
the *QUOTE* value in the *Escape* field.
|
||||
* Use the *NULL Strings* field to specify a string that will represent a null
|
||||
* Use the *NULL String* field to specify a string that will represent a null
|
||||
value within the source or target file.
|
||||
* Use the *Default String* field to specify a string that will represent a default value.
|
||||
Each time the string is found in the input file, the default value of the corresponding
|
||||
column will be used. This option is available from PG/EPAS version 16 and above.
|
||||
|
||||
Click the *Columns* tab to continue.
|
||||
|
||||
|
|
@ -71,29 +85,39 @@ or exported:
|
|||
the left of the column name. Click an empty spot inside the field to access
|
||||
the drop-down list.
|
||||
|
||||
* If enabled, click inside the *Force Quote columns* field to forces quoting
|
||||
to be used for all non-NULL values in each specified column. NULL output is
|
||||
never quoted. To delete a column, click the *x* to the left of the column name.
|
||||
|
||||
* If enabled, click inside the *NOT NULL columns* field to select one or more
|
||||
columns that will not be checked for a NULL value. To delete a column, click
|
||||
the *x* to the left of the column name.
|
||||
|
||||
* If enabled, click inside the *NULL columns* field to match the specified columns
|
||||
values against the null string, even if it has been quoted, and if a match is
|
||||
found set the value to NULL. To delete a column, click the *x* to the left of the
|
||||
column name.
|
||||
|
||||
After completing the *Import/Export data* dialog, click the *OK* button to
|
||||
perform the import or export. pgAdmin will inform you when the background
|
||||
perform the import or export. pgAdmin will notify you when the background
|
||||
process completes:
|
||||
|
||||
.. image:: images/import_export_complete.png
|
||||
:alt: Import Export data completion notification
|
||||
:align: center
|
||||
|
||||
Use the **Stop Process** button to stop the Import/Export process.
|
||||
|
||||
Use the *Click here for details* link on the notification to open the *Process
|
||||
Use the *View Processes* button on the notification to open the *Process
|
||||
Watcher* and review detailed information about the execution of the command
|
||||
that performed the import or export:
|
||||
|
||||
Use the **End Process** button to end the Import/Export process.
|
||||
|
||||
.. image:: images/import_export_pw.png
|
||||
:alt: Import Export data process watcher
|
||||
:align: center
|
||||
|
||||
.. note:: If you are running *pgAdmin* in *Server Mode* you can click on the |sm_icon| icon in the process watcher window to open the file location in the Storage Manager. You can use the :ref:`Storage Manager <storage_manager>` to download the backup file on the client machine .
|
||||
.. note:: If you are running *pgAdmin* in *Server Mode* you can click on the |sm_icon| icon in the process watcher window to open the file location in the Storage Manager. You can use the :ref:`Storage Manager <storage_manager>` to download the exported file on the client machine .
|
||||
|
||||
|
||||
.. |sm_icon| image:: images/sm_icon.png
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ granting user privileges, and performing VACUUM, ANALYZE and REINDEX functions.
|
|||
add_restore_point_dialog
|
||||
change_password_dialog
|
||||
grant_wizard
|
||||
export_data_using_query
|
||||
import_export_data
|
||||
maintenance_dialog
|
||||
storage_manager
|
||||
|
|
|
|||
|
|
@ -103,44 +103,46 @@ The Tools Menu
|
|||
|
||||
Use the *Tools* menu to access the following options (in alphabetical order):
|
||||
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Option | Action |
|
||||
+===========================+===========================================================================================================================================+
|
||||
| *ERD Tool* | Click to open the :ref:`ERD Tool <erd_tool>` and start designing your database. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Grant Wizard...* | Click to access the :ref:`Grant Wizard <grant_wizard>` tool. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *PSQL Tool* | Click to open the :ref:`PSQL Tool <psql_tool>` and start PSQL in the current database context. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Query tool* | Click to open the :ref:`Query tool <query_tool>` for the currently selected object. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Schema Diff* | Click to open the :ref:`Schema Diff <schema_diff_feature>` and start comparing two database or two schema. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Backup Globals...* | Click to open the :ref:`Backup Globals... <backup_globals_dialog>` dialog to backup cluster objects. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Backup Server...* | Click to open the :ref:`Backup Server... <backup_server_dialog>` dialog to backup a server. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Backup...* | Click to open the :ref:`Backup... <backup_dialog>` dialog to backup database objects. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Restore...* | Click to access the :ref:`Restore <restore_dialog>` dialog to restore database files from a backup. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Import/Export Data...* | Click to open the :ref:`Import/Export data... <import_export_data>` dialog to import or export data from a table. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Maintenance...* | Click to open the :ref:`Maintenance... <maintenance_dialog>` dialog to VACUUM, ANALYZE, REINDEX, or CLUSTER. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Search Objects...* | Click to open the :ref:`Search Objects... <search_objects>` and start searching any kind of objects in a database. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Add named restore point* | Click to open the :ref:`Add named restore point... <add_restore_point_dialog>` dialog to take a point-in-time snapshot of the current |
|
||||
| | server state. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Pause replay of WAL* | Click to pause the replay of the WAL log. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Resume replay of WAL* | Click to resume the replay of the WAL log. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Reload Configuration...* | Click to update configuration files without restarting the server. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Storage Manager* | Click to open the :ref:`Storage Manager <storage_manager>` to upload, delete, or download the backup files. |
|
||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Option | Action |
|
||||
+==============================+===========================================================================================================================================+
|
||||
| *ERD Tool* | Click to open the :ref:`ERD Tool <erd_tool>` and start designing your database. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Grant Wizard...* | Click to access the :ref:`Grant Wizard <grant_wizard>` tool. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *PSQL Tool* | Click to open the :ref:`PSQL Tool <psql_tool>` and start PSQL in the current database context. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Query tool* | Click to open the :ref:`Query tool <query_tool>` for the currently selected object. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Schema Diff* | Click to open the :ref:`Schema Diff <schema_diff_feature>` and start comparing two database or two schema. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Backup Globals...* | Click to open the :ref:`Backup Globals... <backup_globals_dialog>` dialog to backup cluster objects. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Backup Server...* | Click to open the :ref:`Backup Server... <backup_server_dialog>` dialog to backup a server. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Backup...* | Click to open the :ref:`Backup... <backup_dialog>` dialog to backup database objects. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Restore...* | Click to access the :ref:`Restore <restore_dialog>` dialog to restore database files from a backup. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Export Data Using Query...* | Click to open the :ref:`Export Data Using Query... <export_data_using_query>` dialog to export data from a table using query. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Import/Export Data...* | Click to open the :ref:`Import/Export data... <import_export_data>` dialog to import or export data from a table. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Maintenance...* | Click to open the :ref:`Maintenance... <maintenance_dialog>` dialog to VACUUM, ANALYZE, REINDEX, or CLUSTER. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Search Objects...* | Click to open the :ref:`Search Objects... <search_objects>` and start searching any kind of objects in a database. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Add named restore point* | Click to open the :ref:`Add named restore point... <add_restore_point_dialog>` dialog to take a point-in-time snapshot of the current |
|
||||
| | server state. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Pause replay of WAL* | Click to pause the replay of the WAL log. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Resume replay of WAL* | Click to resume the replay of the WAL log. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Reload Configuration...* | Click to update configuration files without restarting the server. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| *Storage Manager* | Click to open the :ref:`Storage Manager <storage_manager>` to upload, delete, or download the backup files. |
|
||||
+------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Help Menu
|
||||
*************
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ Process Watcher
|
|||
|
||||
The Process Watcher logs all the activity associated with the process/task and provides
|
||||
additional information for troubleshooting
|
||||
Use the **Stop Process** button to stop the Backup process.
|
||||
Use the **End Process** button to end the process.
|
||||
|
||||
.. note:: If you are running *pgAdmin* in *Server Mode* you can click on the |sm_icon| icon in the process watcher window to open the file location in the Storage Manager. You can use the :ref:`Storage Manager <storage_manager>` to download the backup file on the client machine .
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ notes for it.
|
|||
:maxdepth: 1
|
||||
|
||||
|
||||
release_notes_9_4
|
||||
release_notes_9_3
|
||||
release_notes_9_2
|
||||
release_notes_9_1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
***********
|
||||
Version 9.4
|
||||
***********
|
||||
|
||||
Release date: 2025-05-29
|
||||
|
||||
This release contains a number of bug fixes and new features since the release of pgAdmin 4 v9.3.
|
||||
|
||||
Supported Database Servers
|
||||
**************************
|
||||
**PostgreSQL**: 13, 14, 15, 16 and 17
|
||||
|
||||
**EDB Advanced Server**: 13, 14, 15, 16 and 17
|
||||
|
||||
Bundled PostgreSQL Utilities
|
||||
****************************
|
||||
**psql**, **pg_dump**, **pg_dumpall**, **pg_restore**: 17.2
|
||||
|
||||
|
||||
New features
|
||||
************
|
||||
|
||||
| `Issue #8583 <https://github.com/pgadmin-org/pgadmin4/issues/8583>`_ - Add all missing options to the Import/Export Data functionality, and update the syntax of the COPY command to align with the latest standards.
|
||||
| `Issue #8681 <https://github.com/pgadmin-org/pgadmin4/issues/8681>`_ - Add support for exporting table data based on a custom query.
|
||||
|
||||
Housekeeping
|
||||
************
|
||||
|
||||
|
||||
Bug fixes
|
||||
*********
|
||||
|
||||
|
|
@ -23,11 +23,13 @@ from pgadmin.utils.ajax import make_json_response, bad_request, unauthorized
|
|||
|
||||
from config import PG_DEFAULT_DRIVER
|
||||
from pgadmin.model import Server
|
||||
from pgadmin.utils.constants import MIMETYPE_APP_JS, SERVER_NOT_FOUND
|
||||
from pgadmin.utils.constants import SERVER_NOT_FOUND
|
||||
from pgadmin.settings import get_setting, store_setting
|
||||
from pgadmin.tools.user_management.PgAdminPermissions import AllPermissionTypes
|
||||
|
||||
MODULE_NAME = 'import_export'
|
||||
NOT_NULL_COLUMNS = 'not_null_columns'
|
||||
NULL_COLUMNS = 'null_columns'
|
||||
|
||||
|
||||
class ImportExportModule(PgAdminModule):
|
||||
|
|
@ -111,27 +113,40 @@ class IEMessage(IProcessDesc):
|
|||
@property
|
||||
def message(self):
|
||||
# Fetch the server details like hostname, port, roles etc
|
||||
return _(
|
||||
"Copying table data '{0}.{1}' on database '{2}' "
|
||||
"and server '{3}'"
|
||||
).format(
|
||||
self.schema, self.table, self.database,
|
||||
self.get_server_name()
|
||||
)
|
||||
if self.schema is None and self.table is None:
|
||||
return _(
|
||||
"Copying table data using query on database '{0}' "
|
||||
"and server '{1}'"
|
||||
).format(self.database, self.get_server_name())
|
||||
else:
|
||||
return _(
|
||||
"Copying table data '{0}.{1}' on database '{2}' "
|
||||
"and server '{3}'"
|
||||
).format(
|
||||
self.schema, self.table, self.database,
|
||||
self.get_server_name()
|
||||
)
|
||||
|
||||
@property
|
||||
def type_desc(self):
|
||||
_type_desc = _("Import - ") if self.is_import else _("Export - ")
|
||||
return _type_desc + _("Copying table data")
|
||||
if self.schema is None and self.table is None:
|
||||
return _("Export - Copying table data using query")
|
||||
else:
|
||||
_type_desc = _("Import - ") if self.is_import else _("Export - ")
|
||||
return _type_desc + _("Copying table data")
|
||||
|
||||
def details(self, cmd, args):
|
||||
# Fetch the server details like hostname, port, roles etc
|
||||
if self.schema is None and self.table is None:
|
||||
object_str = "{0}".format(self.database)
|
||||
else:
|
||||
object_str = "{0}/{1}.{2}".format(self.database, self.schema,
|
||||
self.table)
|
||||
return {
|
||||
"message": self.message,
|
||||
"cmd": self._cmd,
|
||||
"server": self.get_server_name(),
|
||||
"object": "{0}/{1}.{2}".format(self.database, self.schema,
|
||||
self.table),
|
||||
"object": object_str,
|
||||
"type": _("Import Data") if self.is_import else _("Export Data")
|
||||
}
|
||||
|
||||
|
|
@ -142,58 +157,86 @@ def index():
|
|||
return bad_request(errormsg=_("This URL cannot be called directly."))
|
||||
|
||||
|
||||
def _get_ignored_column_list(data, driver, conn):
|
||||
def columns_to_string(columns, driver, conn):
|
||||
"""
|
||||
Get list of ignored columns for import/export.
|
||||
:param data: Data.
|
||||
:param driver: PG Driver.
|
||||
:param conn: Connection.
|
||||
:return: return ignored column list.
|
||||
This function create the columns list as a string
|
||||
"""
|
||||
icols = None
|
||||
cols = None
|
||||
for col in columns:
|
||||
if cols:
|
||||
cols += ', '
|
||||
else:
|
||||
cols = '('
|
||||
cols += driver.qtIdent(conn, col)
|
||||
cols += ')'
|
||||
|
||||
if data['icolumns']:
|
||||
ignore_cols = data['icolumns']
|
||||
|
||||
# format the ignore column list required as per copy command
|
||||
# requirement
|
||||
if ignore_cols and len(ignore_cols) > 0:
|
||||
icols = ", ".join([
|
||||
driver.qtIdent(conn, col)
|
||||
for col in ignore_cols])
|
||||
return icols
|
||||
return cols
|
||||
|
||||
|
||||
def _get_required_column_list(data, driver, conn):
|
||||
def _get_force_quote_column_list(data, driver, conn):
|
||||
"""
|
||||
Get list of required columns for import/export.
|
||||
:param data: Data.
|
||||
:param key: Key.
|
||||
:param driver: PG Driver.
|
||||
:param conn: Connection.
|
||||
:return: return required column list.
|
||||
"""
|
||||
cols = None
|
||||
|
||||
# format the column import/export list required as per copy command
|
||||
# requirement
|
||||
if data['columns']:
|
||||
columns = data['columns']
|
||||
if columns and len(columns) > 0:
|
||||
for col in columns:
|
||||
if cols:
|
||||
cols += ', '
|
||||
else:
|
||||
cols = '('
|
||||
cols += driver.qtIdent(conn, col)
|
||||
cols += ')'
|
||||
if 'force_quote_columns' not in data:
|
||||
return cols
|
||||
|
||||
# if export is using query then we need to check * is available in the
|
||||
# force_quote_columns then return *.
|
||||
if ('is_query_export' in data and data['is_query_export'] and
|
||||
'*' in data['force_quote_columns']):
|
||||
cols = '*'
|
||||
# If total columns is equal to selected columns for force quote then
|
||||
# return '*'
|
||||
elif ('total_columns' in data and
|
||||
len(data['force_quote_columns']) == data['total_columns']):
|
||||
cols = '*'
|
||||
else:
|
||||
if len(data['force_quote_columns']) > 0:
|
||||
cols = columns_to_string(data['force_quote_columns'], driver, conn)
|
||||
|
||||
return cols
|
||||
|
||||
|
||||
def _get_formatted_column_list(data, key, driver, conn):
|
||||
"""
|
||||
Get list of required columns for import/export.
|
||||
:param data: Data.
|
||||
:param key: Key.
|
||||
:param driver: PG Driver.
|
||||
:param conn: Connection.
|
||||
:return: return required column list.
|
||||
"""
|
||||
cols = None
|
||||
if key not in data:
|
||||
return cols
|
||||
|
||||
# if server version is >= 17 and key is either NULL_COLUMNS or
|
||||
# NOT_NULL_COLUMNS and total columns is equal to selected columns then
|
||||
# return '*'
|
||||
if ('total_columns' in data and conn.manager.version >= 170000 and
|
||||
key in [NULL_COLUMNS, NOT_NULL_COLUMNS] and
|
||||
len(data[key]) == data['total_columns']):
|
||||
cols = '*'
|
||||
else:
|
||||
columns = data[key]
|
||||
if len(columns) > 0:
|
||||
cols = columns_to_string(columns, driver, conn)
|
||||
|
||||
return cols
|
||||
|
||||
|
||||
def _save_import_export_settings(settings):
|
||||
settings = {key: settings[key] for key in settings if key not in
|
||||
['icolumns', 'columns', 'database', 'schema', 'table',
|
||||
'save_btn_icon']}
|
||||
['columns', 'database', 'schema', 'table', 'save_btn_icon',
|
||||
'not_null_columns', 'null_columns', 'force_quote_columns',
|
||||
'total_columns', 'is_query_export']}
|
||||
|
||||
if settings['is_import']:
|
||||
settings['import_file_name'] = settings['filename']
|
||||
|
|
@ -216,6 +259,21 @@ def _save_import_export_settings(settings):
|
|||
store_setting('import_export_setting', settings)
|
||||
|
||||
|
||||
def update_data_for_import_export(data):
|
||||
"""
|
||||
This function will update the data. Remove unwanted keys based on
|
||||
import/export type
|
||||
"""
|
||||
keys_not_required_with_query = ['on_error', 'log_verbosity', 'freeze',
|
||||
'default_string']
|
||||
if 'is_query_export' in data and data['is_query_export']:
|
||||
for k in keys_not_required_with_query:
|
||||
data.pop(k, None)
|
||||
data['is_import'] = False
|
||||
else:
|
||||
data.pop('query', None)
|
||||
|
||||
|
||||
@blueprint.route('/job/<int:sid>', methods=['POST'], endpoint="create_job")
|
||||
@permissions_required(AllPermissionTypes.tools_import_export_data)
|
||||
@pga_login_required
|
||||
|
|
@ -283,30 +341,34 @@ def create_import_export_job(sid):
|
|||
else:
|
||||
return bad_request(errormsg=_('Please specify a valid file'))
|
||||
|
||||
# Get required and ignored column list
|
||||
icols = _get_ignored_column_list(data, driver, conn)
|
||||
cols = _get_required_column_list(data, driver, conn)
|
||||
# Get required and other columns list
|
||||
cols = _get_formatted_column_list(data, 'columns', driver, conn)
|
||||
not_null_cols = _get_formatted_column_list(data, NOT_NULL_COLUMNS,
|
||||
driver, conn)
|
||||
null_cols = _get_formatted_column_list(data, NULL_COLUMNS, driver,
|
||||
conn)
|
||||
quote_cols = _get_force_quote_column_list(data, driver, conn)
|
||||
|
||||
# Save the settings
|
||||
_save_import_export_settings(new_settings)
|
||||
|
||||
# Remove unwanted keys from data
|
||||
update_data_for_import_export(data)
|
||||
|
||||
# Create the COPY FROM/TO from template
|
||||
query = render_template(
|
||||
'import_export/sql/cmd.sql',
|
||||
conn=conn,
|
||||
data=data,
|
||||
columns=cols,
|
||||
ignore_column_list=icols
|
||||
)
|
||||
temp_path = 'import_export/sql/#{0}#/cmd.sql'.format(manager.version)
|
||||
query = render_template(temp_path, conn=conn, data=data, columns=cols,
|
||||
not_null_columns=not_null_cols,
|
||||
null_columns=null_cols,
|
||||
force_quote_columns=quote_cols)
|
||||
|
||||
args = ['--command', query]
|
||||
|
||||
try:
|
||||
|
||||
io_params = {
|
||||
'sid': sid,
|
||||
'schema': data['schema'],
|
||||
'table': data['table'],
|
||||
'schema': data['schema'] if 'schema' in data else None,
|
||||
'table': data['table'] if 'table' in data else None,
|
||||
'database': data['database'],
|
||||
'is_import': data['is_import'],
|
||||
'filename': data['filename'],
|
||||
|
|
|
|||
|
|
@ -54,8 +54,25 @@ define([
|
|||
},
|
||||
permission: AllPermissionTypes.TOOLS_IMPORT_EXPORT_DATA,
|
||||
}]);
|
||||
pgBrowser.add_menus([{
|
||||
name: 'import',
|
||||
node: 'database',
|
||||
module: this,
|
||||
applies: ['tools', 'context'],
|
||||
callback: 'callback_import_export',
|
||||
category: 'import',
|
||||
priority: 3,
|
||||
label: gettext('Export Data Using Query...'),
|
||||
enable: supportedNodes.enabled.bind(
|
||||
null, pgBrowser.tree, ['database']
|
||||
),
|
||||
data: {
|
||||
data_disabled: gettext('Please select any database from the object explorer to Export Data using query.'),
|
||||
},
|
||||
permission: AllPermissionTypes.TOOLS_IMPORT_EXPORT_DATA,
|
||||
}]);
|
||||
},
|
||||
getUISchema: function(treeItem) {
|
||||
getUISchema: function(treeItem, isQueryExport) {
|
||||
let treeNodeInfo = pgBrowser.tree.getTreeNodeHierarchy(treeItem);
|
||||
const selectedNode = pgBrowser.tree.selected();
|
||||
let itemNodeData = pgBrowser.tree.findNodeByDomElement(selectedNode).getData();
|
||||
|
|
@ -72,6 +89,7 @@ define([
|
|||
});
|
||||
return columnsList;
|
||||
}),
|
||||
isQueryExport: isQueryExport,
|
||||
}
|
||||
);
|
||||
},
|
||||
|
|
@ -89,9 +107,9 @@ define([
|
|||
|
||||
setExtraParameters(treeInfo) {
|
||||
let extraData = {};
|
||||
extraData['database'] = treeInfo.database._label;
|
||||
extraData['schema'] = treeInfo.schema._label;
|
||||
extraData['table'] = treeInfo.table._label;
|
||||
extraData['database'] = treeInfo?.database?._label;
|
||||
extraData['schema'] = treeInfo?.schema?._label;
|
||||
extraData['table'] = treeInfo?.table?._label;
|
||||
extraData['save_btn_icon'] = 'done';
|
||||
return extraData;
|
||||
},
|
||||
|
|
@ -102,6 +120,7 @@ define([
|
|||
callback_import_export: function(args, item) {
|
||||
let i = item || pgBrowser.tree.selected(),
|
||||
server_data = null;
|
||||
let isQueryExport = pgBrowser.tree.itemData(i)?._type == 'database' ? true : false;
|
||||
|
||||
while (i) {
|
||||
let node_data = pgBrowser.tree.itemData(i);
|
||||
|
|
@ -148,7 +167,7 @@ define([
|
|||
);
|
||||
}else{
|
||||
// Open the dialog for the import/export module
|
||||
let schema = this.getUISchema(item);
|
||||
let schema = this.getUISchema(item, isQueryExport);
|
||||
let urlShortcut = 'import_export.create_job',
|
||||
urlBase = url_for(urlShortcut, {
|
||||
'sid': treeInfo.server._id,
|
||||
|
|
@ -160,10 +179,10 @@ define([
|
|||
'filename': 'import_export_data.html',
|
||||
});
|
||||
|
||||
pgAdmin.Browser.Events.trigger('pgadmin:utility:show', item,
|
||||
gettext('Import/Export data - table \'%s\'', treeInfo.table.label),{
|
||||
schema, extraData, urlBase, sqlHelpUrl, helpUrl, actionType: 'select', saveBtnName: gettext('OK'),
|
||||
}, pgAdmin.Browser.stdW.md
|
||||
let title = isQueryExport ? gettext('Export Data Using Query - database \'%s\'', treeInfo.database.label) : gettext('Import/Export data - table \'%s\'', treeInfo.table.label);
|
||||
pgAdmin.Browser.Events.trigger('pgadmin:utility:show', item, title,
|
||||
{ schema, extraData, urlBase, sqlHelpUrl, helpUrl, actionType: 'select', saveBtnName: gettext('OK'),
|
||||
}, pgAdmin.Browser.stdW.md, pgAdmin.Browser.stdH.lg
|
||||
);
|
||||
}
|
||||
})
|
||||
|
|
|
|||
|
|
@ -16,15 +16,19 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
constructor(fieldOptions = {}, initValues={}) {
|
||||
super({
|
||||
null_string: undefined,
|
||||
is_import: true,
|
||||
icolumns: [],
|
||||
oid: undefined,
|
||||
default_string: undefined,
|
||||
is_import: !fieldOptions?.isQueryExport,
|
||||
is_query_export: fieldOptions?.isQueryExport,
|
||||
header: undefined,
|
||||
freeze: undefined,
|
||||
delimiter: ',',
|
||||
quote: '"',
|
||||
escape: '\'',
|
||||
file: undefined,
|
||||
format: 'csv',
|
||||
total_columns: 0,
|
||||
on_error: 'stop',
|
||||
log_verbosity: 'default',
|
||||
...initValues,
|
||||
});
|
||||
|
||||
|
|
@ -34,9 +38,10 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
...fieldOptions,
|
||||
};
|
||||
|
||||
this.colums_selection_label = {e:'Columns to export', i:'Columns to import'};
|
||||
this.colums_selection_label = {e:gettext('Columns to export'), i:gettext('Columns to import')};
|
||||
this._type = 'e';
|
||||
this.notNullColOptions = [];
|
||||
this.isQueryExport = fieldOptions?.isQueryExport;
|
||||
}
|
||||
|
||||
isDisabled(state) {
|
||||
|
|
@ -55,12 +60,14 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
{ 'label': gettext('Import'), 'value': true },
|
||||
{ 'label': gettext('Export'), 'value': false },
|
||||
],
|
||||
visible: !obj.isQueryExport,
|
||||
},
|
||||
{
|
||||
id: 'filename',
|
||||
label: gettext('Filename'),
|
||||
group: gettext('General'),
|
||||
deps: ['is_import', 'format'],
|
||||
noEmpty: true,
|
||||
depChange:(state, source)=>{
|
||||
if (source == 'is_import'){
|
||||
let filename = state.is_import ? state.import_file_name : state.export_file_name;
|
||||
|
|
@ -108,10 +115,50 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
options: this.fieldOptions.encoding,
|
||||
},
|
||||
{
|
||||
id: 'oid',
|
||||
label: gettext('OID'),
|
||||
type: 'switch',
|
||||
group: gettext('Options')
|
||||
id: 'on_error',
|
||||
label: gettext('On Error'),
|
||||
group: gettext('General'),
|
||||
type: 'select',
|
||||
controlProps: { allowClear: false, noEmpty: true },
|
||||
options: [
|
||||
{
|
||||
label: gettext('stop'),
|
||||
value: 'stop',
|
||||
},
|
||||
{
|
||||
label: gettext('ignore'),
|
||||
value: 'ignore',
|
||||
}
|
||||
],
|
||||
min_version: 170000,
|
||||
disabled: function(state) {
|
||||
return (state?.format == 'binary' || !state?.is_import);
|
||||
},
|
||||
visible: !obj.isQueryExport,
|
||||
helpMessage: gettext('Specifies how to behave when encountering an error converting a columns input value into its data type. An error_action value of stop means fail the command, while ignore means discard the input row and continue with the next one. The default is stop. The ignore option is applicable only for COPY FROM when the FORMAT is text or csv.')
|
||||
},
|
||||
{
|
||||
id: 'log_verbosity',
|
||||
label: gettext('Log Verbosity'),
|
||||
group: gettext('General'),
|
||||
type: 'select',
|
||||
controlProps: { allowClear: false, noEmpty: true },
|
||||
options: [
|
||||
{
|
||||
label: gettext('default'),
|
||||
value: 'default',
|
||||
},
|
||||
{
|
||||
label: gettext('verbose'),
|
||||
value: 'verbose',
|
||||
}
|
||||
],
|
||||
min_version: 170000,
|
||||
disabled: function(state) {
|
||||
return (state?.format == 'binary' || !state?.is_import);
|
||||
},
|
||||
visible: !obj.isQueryExport,
|
||||
helpMessage: gettext('Specify the amount of messages emitted by a COPY command: default or verbose. If verbose is specified, additional messages are emitted during processing. This is currently used in COPY FROM command when ON_ERROR option is set to ignore.')
|
||||
},
|
||||
{
|
||||
id: 'header',
|
||||
|
|
@ -120,6 +167,23 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
group: gettext('Options'),
|
||||
disabled: this.isDisabled
|
||||
},
|
||||
{
|
||||
id: 'freeze',
|
||||
label: gettext('Freeze'),
|
||||
type: 'switch',
|
||||
group: gettext('Options'),
|
||||
deps: ['is_import'],
|
||||
depChange: (state) => {
|
||||
if (!state.is_import) {
|
||||
return { freeze: false };
|
||||
}
|
||||
},
|
||||
disabled: function(state) {
|
||||
return !state?.is_import;
|
||||
},
|
||||
visible: !obj.isQueryExport,
|
||||
helpMessage: gettext('Requests copying the data with rows already frozen, just as they would be after running the VACUUM FREEZE command.')
|
||||
},
|
||||
{
|
||||
id: 'delimiter',
|
||||
label: gettext('Delimiter'),
|
||||
|
|
@ -187,7 +251,7 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
},
|
||||
{
|
||||
id: 'null_string',
|
||||
label: gettext('NULL Strings'),
|
||||
label: gettext('NULL String'),
|
||||
group: gettext('Options'),
|
||||
type: 'text',
|
||||
deps: ['format'],
|
||||
|
|
@ -196,12 +260,28 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
},
|
||||
helpMessage: gettext('Specifies the string that represents a null value. The default is \\N (backslash-N) in text format, and an unquoted empty string in CSV format. You might prefer an empty string even in text format for cases where you don\'t want to distinguish nulls from empty strings. This option is not allowed when using binary format.'),
|
||||
},
|
||||
{
|
||||
id: 'default_string',
|
||||
label: gettext('Default String'),
|
||||
group: gettext('Options'),
|
||||
type: 'text',
|
||||
deps: ['format'],
|
||||
min_version: 160000,
|
||||
visible: !obj.isQueryExport,
|
||||
disabled: function(state) {
|
||||
return (state?.format == 'binary' || !state?.is_import);
|
||||
},
|
||||
helpMessage: gettext('Specifies the string that represents a default value. Each time the string is found in the input file, the default value of the corresponding column will be used. This option is allowed only in COPY FROM, and only when not using binary format'),
|
||||
},
|
||||
{
|
||||
id: 'export_group', type: 'group', label: obj.isQueryExport ? gettext('Query') : gettext('Columns'),
|
||||
},
|
||||
{
|
||||
id: 'columns',
|
||||
label: gettext(this.colums_selection_label[this._type]),
|
||||
group: gettext('Columns'),
|
||||
group: 'export_group',
|
||||
type: () => ({
|
||||
type: 'select',
|
||||
label: this.colums_selection_label[this._type],
|
||||
options: obj.fieldOptions.columns,
|
||||
optionsLoaded: (options) => {
|
||||
obj.notNullColOptions = options.map((o) => {
|
||||
|
|
@ -210,6 +290,7 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
|
||||
if (!obj.state) return;
|
||||
|
||||
obj.state.setUnpreparedData(['total_columns'], obj.notNullColOptions.length);
|
||||
const data = obj.state.data;
|
||||
obj.state.data = {
|
||||
...data,
|
||||
|
|
@ -217,7 +298,7 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
};
|
||||
},
|
||||
controlProps:{
|
||||
multiple: true, allowClear: false,
|
||||
multiple: true, allowClear: false, allowSelectAll: true,
|
||||
placeholder:
|
||||
this._type === 'i' ? gettext('Columns for importing...') :
|
||||
gettext('Columns for exporting...'),
|
||||
|
|
@ -227,43 +308,110 @@ export default class ImportExportSchema extends BaseUISchema {
|
|||
depChange:(state)=>{
|
||||
this._type = state.is_import? 'i' : 'e';
|
||||
},
|
||||
visible: !obj.isQueryExport,
|
||||
helpMessage: gettext('An optional list of columns to be copied. If no column list is specified, all columns of the table will be copied.')
|
||||
},
|
||||
{
|
||||
id: 'icolumns',
|
||||
id: 'query',
|
||||
label: gettext('Export Data Query'),
|
||||
group: 'export_group',
|
||||
type: 'sql',
|
||||
visible: obj.isQueryExport,
|
||||
helpMessage: gettext('Specifies A SELECT, VALUES, INSERT, UPDATE, DELETE, or MERGE command whose results are to be copied.'),
|
||||
},
|
||||
{
|
||||
id: 'force_quote_columns',
|
||||
label: gettext('Force Quote columns'),
|
||||
group: 'export_group',
|
||||
deps: ['format', 'is_import', 'notNullColOptions'],
|
||||
type: () => {
|
||||
if (obj.fieldOptions.isQueryExport) {
|
||||
return {
|
||||
type: 'select',
|
||||
options: [],
|
||||
controlProps: {
|
||||
multiple: true, allowClear: true,
|
||||
creatable: true, noDropdown: true,
|
||||
placeholder: gettext('Force Quote columns...'),
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
type: 'select',
|
||||
options: obj.notNullColOptions,
|
||||
optionsReloadBasis: obj.notNullColOptions.length,
|
||||
controlProps: {
|
||||
multiple: true, allowClear: true, allowSelectAll: true,
|
||||
placeholder: gettext('Force Quote columns...'),
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
disabled:function(state){
|
||||
return (state?.format != 'csv' || state?.is_query_export ? false : state?.is_import);
|
||||
},
|
||||
helpMessage: gettext('Forces quoting to be used for all non-NULL values in each specified column. NULL output is never quoted. If * is specified, non-NULL values will be quoted in all columns. This option is allowed only in COPY TO, and only when using CSV format.'),
|
||||
},
|
||||
{
|
||||
id: 'not_null_columns',
|
||||
label: gettext('NOT NULL columns'),
|
||||
group: gettext('Columns'),
|
||||
group: 'export_group',
|
||||
deps: ['format', 'is_import', 'notNullColOptions'],
|
||||
type: () => ({
|
||||
type: 'select',
|
||||
options: obj.notNullColOptions,
|
||||
optionsReloadBasis: obj.notNullColOptions.length,
|
||||
controlProps: {
|
||||
multiple: true, allowClear: true,
|
||||
multiple: true, allowClear: true, allowSelectAll: true,
|
||||
placeholder: gettext('Not null columns...'),
|
||||
},
|
||||
}),
|
||||
visible: !obj.isQueryExport,
|
||||
disabled:function(state){
|
||||
return (state?.format != 'csv' || !state?.is_import);
|
||||
},
|
||||
helpMessage: gettext('Do not match the specified column values against the null string. In the default case where the null string is empty, this means that empty values will be read as zero-length strings rather than nulls, even when they are not quoted. This option is allowed only in import, and only when using CSV format.'),
|
||||
},
|
||||
{
|
||||
id: 'null_columns',
|
||||
label: gettext('NULL columns'),
|
||||
group: 'export_group',
|
||||
deps: ['format', 'is_import', 'notNullColOptions'],
|
||||
type: () => ({
|
||||
type: 'select',
|
||||
options: obj.notNullColOptions,
|
||||
optionsReloadBasis: obj.notNullColOptions.length,
|
||||
controlProps: {
|
||||
multiple: true, allowClear: true, allowSelectAll: true,
|
||||
placeholder: gettext('Null columns...'),
|
||||
},
|
||||
}),
|
||||
visible: !obj.isQueryExport,
|
||||
disabled:function(state){
|
||||
return (state?.format != 'csv' || !state?.is_import);
|
||||
},
|
||||
helpMessage: gettext('Match the specified columns values against the null string, even if it has been quoted, and if a match is found set the value to NULL. In the default case where the null string is empty, this converts a quoted empty string into NULL. This option is allowed only in COPY FROM, and only when using CSV format.'),
|
||||
},
|
||||
{
|
||||
id: 'notNullColOptions', exclude: true, visible: false, type: 'text',
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'total_columns', visible: false, type: 'int',
|
||||
},
|
||||
{
|
||||
id: 'is_query_export', visible: false, type: 'boolean',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
validate(state, setError) {
|
||||
if (isEmptyString(state.service)) {
|
||||
if (this.isQueryExport) {
|
||||
let errmsg = null;
|
||||
/* events validation*/
|
||||
if (!state.filename) {
|
||||
errmsg = gettext('Please provide a filename.');
|
||||
setError('filename', errmsg);
|
||||
if (isEmptyString(state.query)) {
|
||||
errmsg = gettext('Export Data Query can not be empty.');
|
||||
setError('query', errmsg);
|
||||
return true;
|
||||
} else {
|
||||
setError('filename', null);
|
||||
setError('query', null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
\copy {% if data.query %}({{ data.query }}) {% else %}{{ conn|qtIdent(data.schema, data.table) }}{% if columns %}{{ columns }} {% endif %}{% endif %} {% if data.is_import %}FROM{% else %}TO{% endif %} {{ data.filename|qtLiteral(conn) }} WITH(FORMAT {{data.format}}{% if data.delimiter and data.format != 'binary' and data.delimiter == '[tab]' %}, DELIMITER E'\t'{% elif data.format != 'binary' and data.delimiter %}, DELIMITER {{ data.delimiter|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.header %}, HEADER{% endif %}{% if data.freeze %}, FREEZE{% endif %}{% if data.encoding %}, ENCODING {{ data.encoding|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.quote %}, QUOTE {{ data.quote|qtLiteral(conn) }}{% endif %}{% if data.format != 'binary' and data.null_string %}, NULL {{ data.null_string|qtLiteral(conn) }}{% endif %}{% if data.format != 'binary' and data.is_import and data.default_string %}, DEFAULT {{ data.default_string|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.escape %}, ESCAPE {{ data.escape|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.is_import and not_null_columns %}, FORCE_NOT_NULL {{ not_null_columns }}{% endif %}{% if data.format == 'csv' and data.is_import and null_columns %}, FORCE_NULL {{ null_columns }}{% endif %}{% if data.format == 'csv' and not data.is_import and force_quote_columns %}, FORCE_QUOTE {{ force_quote_columns }}{% endif %});
|
||||
|
|
@ -0,0 +1 @@
|
|||
\copy {% if data.query %}({{ data.query }}) {% else %}{{ conn|qtIdent(data.schema, data.table) }}{% if columns %}{{ columns }} {% endif %}{% endif %} {% if data.is_import %}FROM{% else %}TO{% endif %} {{ data.filename|qtLiteral(conn) }} WITH(FORMAT {{data.format}}{% if data.delimiter and data.format != 'binary' and data.delimiter == '[tab]' %}, DELIMITER E'\t'{% elif data.format != 'binary' and data.delimiter %}, DELIMITER {{ data.delimiter|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.header %}, HEADER{% endif %}{% if data.freeze %}, FREEZE{% endif %}{% if data.encoding %}, ENCODING {{ data.encoding|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.quote %}, QUOTE {{ data.quote|qtLiteral(conn) }}{% endif %}{% if data.format != 'binary' and data.null_string %}, NULL {{ data.null_string|qtLiteral(conn) }}{% endif %}{% if data.format != 'binary' and data.is_import and data.default_string %}, DEFAULT {{ data.default_string|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.escape %}, ESCAPE {{ data.escape|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.is_import and not_null_columns %}, FORCE_NOT_NULL {{ not_null_columns }}{% endif %}{% if data.format == 'csv' and data.is_import and null_columns %}, FORCE_NULL {{ null_columns }}{% endif %}{% if data.format == 'csv' and not data.is_import and force_quote_columns %}, FORCE_QUOTE {{ force_quote_columns }}{% endif %}{% if data.format != 'binary' and data.is_import and data.on_error == 'ignore' %}, ON_ERROR {{ data.on_error }}{% endif %}{% if data.format != 'binary' and data.is_import and data.log_verbosity and data.on_error == 'ignore' %}, LOG_VERBOSITY {{ data.log_verbosity }}{% endif %});
|
||||
|
|
@ -1 +0,0 @@
|
|||
\copy {{ conn|qtIdent(data.schema, data.table) }} {% if columns %} {{ columns }} {% endif %} {% if data.is_import %}FROM{% else %}TO{% endif %} {{ data.filename|qtLiteral(conn) }} {% if data.oid %} OIDS {% endif %}{% if data.delimiter is defined and data.delimiter == '' and (data.format == 'csv' or data.format == 'text') %} {% elif data.delimiter and data.format != 'binary' and data.delimiter == '[tab]' %} DELIMITER E'\t' {% elif data.format != 'binary' and data.delimiter %} DELIMITER {{ data.delimiter|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' %} CSV {% endif %} {% if data.format == 'csv' and data.header %} HEADER {% endif %}{% if data.encoding %} ENCODING {{ data.encoding|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.quote %} QUOTE {{ data.quote|qtLiteral(conn) }}{% endif %}{% if data.format != 'binary' and data.null_string %} NULL {{ data.null_string|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.escape %} ESCAPE {{ data.escape|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.is_import and ignore_column_list %} FORCE NOT NULL {{ ignore_column_list }} {% endif %};
|
||||
|
|
@ -0,0 +1 @@
|
|||
\copy {% if data.query %}({{ data.query }}) {% else %}{{ conn|qtIdent(data.schema, data.table) }}{% if columns %}{{ columns }} {% endif %}{% endif %} {% if data.is_import %}FROM{% else %}TO{% endif %} {{ data.filename|qtLiteral(conn) }} WITH(FORMAT {{data.format}}{% if data.delimiter and data.format != 'binary' and data.delimiter == '[tab]' %}, DELIMITER E'\t'{% elif data.format != 'binary' and data.delimiter %}, DELIMITER {{ data.delimiter|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.header %}, HEADER{% endif %}{% if data.freeze %}, FREEZE{% endif %}{% if data.encoding %}, ENCODING {{ data.encoding|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.quote %}, QUOTE {{ data.quote|qtLiteral(conn) }}{% endif %}{% if data.format != 'binary' and data.null_string %}, NULL {{ data.null_string|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.escape %}, ESCAPE {{ data.escape|qtLiteral(conn) }}{% endif %}{% if data.format == 'csv' and data.is_import and not_null_columns %}, FORCE_NOT_NULL {{ not_null_columns }}{% endif %}{% if data.format == 'csv' and data.is_import and null_columns %}, FORCE_NULL {{ null_columns }}{% endif %}{% if data.format == 'csv' and not data.is_import and force_quote_columns %}, FORCE_QUOTE {{ force_quote_columns }}{% endif %});
|
||||
|
|
@ -50,7 +50,9 @@ class BatchProcessTest(BaseTestGenerator):
|
|||
escape="'",
|
||||
database='postgres',
|
||||
columns=['test_col_1', 'test_col_2'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[],
|
||||
schema="export_test_schema",
|
||||
table="export_test_table",
|
||||
storage='/'
|
||||
|
|
@ -87,7 +89,9 @@ class BatchProcessTest(BaseTestGenerator):
|
|||
escape="'",
|
||||
database='postgres',
|
||||
columns=['test_col_1', 'test_col_2'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[],
|
||||
schema="import_test_schema",
|
||||
table="import_test_table",
|
||||
storage='/'
|
||||
|
|
|
|||
|
|
@ -34,65 +34,110 @@ class ExportJobTest(BaseTestGenerator):
|
|||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
database='',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT csv', 'QUOTE \'\\"\'',
|
||||
'ESCAPE \'\'\'\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text',
|
||||
'DEFAULT', 'FORCE_NOT_NULL',
|
||||
'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)),
|
||||
('When exporting a table with binary, encoding, delimiter, quote',
|
||||
('When exporting a table with csv, Header and Null String',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=False,
|
||||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
header=True,
|
||||
null_string='test',
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT csv', 'QUOTE \'\\"\'',
|
||||
'ESCAPE \'\'\'\'', 'HEADER',
|
||||
'NULL \'test\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text',
|
||||
'DEFAULT', 'FORCE_NOT_NULL',
|
||||
'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)),
|
||||
('When exporting a table with binary, encoding',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export_bin',
|
||||
format='binary',
|
||||
is_import=False,
|
||||
header=True,
|
||||
encoding="LATIN1",
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
database='',
|
||||
null_string='test',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['col1', 'col2']
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT binary', 'ENCODING \'LATIN1\''],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text', 'HEADER',
|
||||
'DELIMITER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'NULL',
|
||||
'DEFAULT', 'FORCE_NOT_NULL',
|
||||
'FORCE_NULL'
|
||||
],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)),
|
||||
('When exporting a table with text, encoding, delimiter, quote',
|
||||
('When exporting a table with text, encoding, delimiter, null',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export_text',
|
||||
format='text',
|
||||
is_import=False,
|
||||
header=True,
|
||||
encoding="ISO_8859_5",
|
||||
delimiter="[tab]",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
database='',
|
||||
null_string='test',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['col1', 'col2']
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT text', 'DELIMITER E\'\\\\t\'',
|
||||
'ENCODING \'ISO_8859_5\'',
|
||||
'NULL \'test\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT csv',
|
||||
'HEADER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
))
|
||||
)),
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
|
|
|
|||
|
|
@ -36,16 +36,17 @@ class ImportJobTest(BaseTestGenerator):
|
|||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
database='',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'FORMAT csv', 'QUOTE \'\\"\'',
|
||||
'ESCAPE \'\'\'\''],
|
||||
not_expected_cmd_opts=['FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
|
|
@ -56,11 +57,10 @@ class ImportJobTest(BaseTestGenerator):
|
|||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
database='',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
|
|
@ -70,25 +70,229 @@ class ImportJobTest(BaseTestGenerator):
|
|||
)
|
||||
)
|
||||
)),
|
||||
('When importing a table with binary, encoding, delimiter, quote',
|
||||
('When importing a table with csv, encoding, header, null',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=True,
|
||||
header=True,
|
||||
encoding="LATIN1",
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
null_string='test',
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'FORMAT csv', 'DELIMITER \'|\'',
|
||||
'HEADER', 'ENCODING \'LATIN1\'',
|
||||
'QUOTE \'\'\'\'', 'NULL \'test\'',
|
||||
'ESCAPE \'\'\'\''],
|
||||
not_expected_cmd_opts=['FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=False,
|
||||
header=True,
|
||||
encoding="LATIN1",
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
null_string='test',
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)
|
||||
)),
|
||||
('When importing a table with csv, default',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=True,
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
default_string='def_str',
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
server_min_version=160000,
|
||||
skip_msg="Default String supported by PG/EPAS 16 and above.",
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'FORMAT csv', 'DELIMITER \'|\'',
|
||||
'QUOTE \'\'\'\'', 'DEFAULT \'def_str\'',
|
||||
'ESCAPE \'\'\'\''],
|
||||
not_expected_cmd_opts=['FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=False,
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)
|
||||
)),
|
||||
('When importing a table with csv, on_error and log_verbosity',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=True,
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
on_error='ignore',
|
||||
log_verbosity='verbose',
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
server_min_version=170000,
|
||||
skip_msg="ON_ERROR and LOG_VERBOSITY supported by PG/EPAS 17 and "
|
||||
"above.",
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'FORMAT csv', 'DELIMITER \'|\'',
|
||||
'QUOTE \'\'\'\'', 'ON_ERROR ignore',
|
||||
'LOG_VERBOSITY verbose'],
|
||||
not_expected_cmd_opts=['FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=False,
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)
|
||||
)),
|
||||
('When importing a table with csv, on_error and log_verbosity should '
|
||||
'not be visible if on_error = stop',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=True,
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
on_error='stop',
|
||||
log_verbosity='verbose',
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
server_min_version=170000,
|
||||
skip_msg="ON_ERROR and LOG_VERBOSITY supported by PG/EPAS 17 and "
|
||||
"above.",
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'FORMAT csv', 'DELIMITER \'|\'',
|
||||
'QUOTE \'\'\'\''],
|
||||
not_expected_cmd_opts=['FORCE_QUOTE_COLUMNS', 'ON_ERROR',
|
||||
'LOG_VERBOSITY'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
params=dict(
|
||||
filename='test_import_export',
|
||||
format='csv',
|
||||
is_import=False,
|
||||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
columns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)
|
||||
)),
|
||||
('When importing a table with binary, encoding',
|
||||
dict(
|
||||
params=dict(
|
||||
filename='test_import_export_bin',
|
||||
format='binary',
|
||||
is_import=True,
|
||||
header=True,
|
||||
delimiter="",
|
||||
null_string='test',
|
||||
encoding="LATIN1",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
database='',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM','WITH',
|
||||
'FORMAT binary', 'ENCODING \'LATIN1\''],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text', 'HEADER',
|
||||
'DELIMITER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'NULL',
|
||||
'DEFAULT', 'FORCE_NOT_NULL',
|
||||
'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
|
|
@ -100,11 +304,10 @@ class ImportJobTest(BaseTestGenerator):
|
|||
delimiter="|",
|
||||
quote="'",
|
||||
escape="'",
|
||||
database='',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
|
|
@ -124,16 +327,22 @@ class ImportJobTest(BaseTestGenerator):
|
|||
delimiter="[tab]",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
database='',
|
||||
null_string='test',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'FORMAT text', 'DELIMITER E\'\\\\t\'',
|
||||
'ENCODING \'ISO_8859_5\'',
|
||||
'NULL \'test\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT csv',
|
||||
'HEADER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
),
|
||||
export_options=dict(
|
||||
|
|
@ -145,16 +354,15 @@ class ImportJobTest(BaseTestGenerator):
|
|||
delimiter="[tab]",
|
||||
quote="'",
|
||||
escape="'",
|
||||
database='',
|
||||
columns=[],
|
||||
icolumns=[],
|
||||
schema="",
|
||||
table=""
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[]
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_params=dict(
|
||||
expected_cmd_opts=['--command', 'copy', 'TO'],
|
||||
not_expected_cmd_opts=[],
|
||||
not_expected_cmd_opts=['FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)
|
||||
)
|
||||
|
|
@ -218,6 +426,11 @@ class ImportJobTest(BaseTestGenerator):
|
|||
self.server_id = parent_node_dict["server"][-1]["server_id"]
|
||||
url = self.url.format(self.server_id)
|
||||
|
||||
if (hasattr(self, 'server_min_version') and
|
||||
self.server_information["server_version"] <
|
||||
self.server_min_version):
|
||||
self.skipTest(self.skip_msg)
|
||||
|
||||
self.create_export()
|
||||
|
||||
# Create the import/export job
|
||||
|
|
|
|||
|
|
@ -45,14 +45,19 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
escape="'",
|
||||
database='postgres',
|
||||
columns=['test_col_1', 'test_col_2'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[],
|
||||
schema="export_test_schema",
|
||||
table="export_test_table"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO',
|
||||
'export_test_schema', 'export_test_table'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'export_test_schema', 'export_test_table',
|
||||
'WITH', 'FORMAT csv', 'QUOTE \\\'"\\\'',
|
||||
'ESCAPE \\\'\\\'\\\'\\\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with csv file, header, delimiter=tab, '
|
||||
|
|
@ -78,19 +83,24 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_010', 'test_col_011'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[],
|
||||
schema="test_schema_01",
|
||||
table="export_test_table_01"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
|
||||
'export_test_table_01', 'HEADER', 'DELIMITER',
|
||||
'LATIN1'],
|
||||
not_expected_cmd_opts=[],
|
||||
'export_test_table_01', 'WITH',
|
||||
'FORMAT csv', 'HEADER',
|
||||
'ENCODING \\\'LATIN1\\\'',
|
||||
'DELIMITER E\\\'\\\\t\\\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with csv file, header, delimiter=tab, '
|
||||
'encoding=LATIN1',
|
||||
('When export file with csv file with force_quote_column as '
|
||||
'selected cols',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
|
|
@ -102,25 +112,65 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_01',
|
||||
filename='test_export_file_02',
|
||||
format="csv",
|
||||
encoding="LATIN1",
|
||||
header=True,
|
||||
delimiter="[tab]",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_010', 'test_col_011'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['test_col_010'],
|
||||
schema="test_schema_01",
|
||||
table="export_test_table_01"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
|
||||
'export_test_table_01', 'HEADER', 'DELIMITER',
|
||||
'LATIN1'],
|
||||
not_expected_cmd_opts=[],
|
||||
'export_test_table_01', 'WITH', 'FORMAT csv',
|
||||
'FORCE_QUOTE (test_col_010)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with csv file with force_quote_column *',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_03',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_010', 'test_col_011'],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['test_col_010', 'test_col_011'],
|
||||
total_columns=2,
|
||||
schema="test_schema_01",
|
||||
table="export_test_table_01"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_01',
|
||||
'export_test_table_01', 'WITH', 'FORMAT csv',
|
||||
'FORCE_QUOTE *'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with binary file, oid, encoding=UTF8',
|
||||
|
|
@ -135,7 +185,7 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_02',
|
||||
filename='test_export_file_04',
|
||||
format="binary",
|
||||
encoding="UTF8",
|
||||
oid=True,
|
||||
|
|
@ -145,7 +195,9 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_020', 'test_col_021'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[],
|
||||
schema="test_schema_02",
|
||||
table="export_test_table_02"
|
||||
),
|
||||
|
|
@ -158,6 +210,44 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
not_expected_cmd_opts=[],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with binary file, encoding=UTF8',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_05',
|
||||
format="binary",
|
||||
encoding="UTF8",
|
||||
header=True,
|
||||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_020', 'test_col_021'],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['test_col_020', 'test_col_021'],
|
||||
schema="test_schema_02",
|
||||
table="export_test_table_02"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_02',
|
||||
'export_test_table_02', 'WITH',
|
||||
'FORMAT binary', 'ENCODING \'UTF8\''],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text', 'HEADER',
|
||||
'DELIMITER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'NULL',
|
||||
'DEFAULT', 'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with text file, delimiter=|, encoding=ISO_8859_6',
|
||||
dict(
|
||||
class_params=dict(
|
||||
|
|
@ -170,24 +260,31 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_03',
|
||||
filename='test_export_file_06',
|
||||
format="text",
|
||||
encoding="ISO_8859_6",
|
||||
delimiter="|",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
null_string='abcd',
|
||||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_030', 'test_col_031'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['test_col_030', 'test_col_031'],
|
||||
schema="test_schema_03",
|
||||
table="export_test_table_03"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_03',
|
||||
'export_test_table_03', 'DELIMITER',
|
||||
'ISO_8859_6'],
|
||||
not_expected_cmd_opts=[],
|
||||
'export_test_table_03', 'WITH',
|
||||
'FORMAT text', 'DELIMITER \'|\'',
|
||||
'ENCODING \'ISO_8859_6\'', 'NULL \'abcd\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT csv',
|
||||
'HEADER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with binary file, delimiter=tab, '
|
||||
|
|
@ -203,7 +300,7 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_04',
|
||||
filename='test_export_file_07',
|
||||
format="binary",
|
||||
encoding="ISO_8859_6",
|
||||
quote="\"",
|
||||
|
|
@ -211,15 +308,20 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
is_import=False,
|
||||
database='postgres',
|
||||
columns=['test_col_040', 'test_col_041'],
|
||||
icolumns=[],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=[],
|
||||
schema="test_schema_04",
|
||||
table="export_test_table_04"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'test_schema_04',
|
||||
'export_test_table_04',
|
||||
'ISO_8859_6'],
|
||||
not_expected_cmd_opts=['DELIMITER'],
|
||||
'export_test_table_04', 'WITH',
|
||||
'FORMAT binary', 'ENCODING \'ISO_8859_6\''],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text', 'HEADER',
|
||||
'DELIMITER', 'QUOTE', 'ESCAPE', 'NULL',
|
||||
'FORCE_QUOTE_COLUMNS', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with default options',
|
||||
|
|
@ -234,7 +336,7 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_import_file.csv',
|
||||
filename='test_export_file.csv',
|
||||
format='csv',
|
||||
is_import=True,
|
||||
delimiter="",
|
||||
|
|
@ -242,14 +344,440 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
escape="'",
|
||||
database='postgres',
|
||||
columns=['test_col_1', 'test_col_2'],
|
||||
icolumns=[],
|
||||
schema="import_test_schema",
|
||||
table="import_test_table"
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
schema="export_test_schema",
|
||||
table="export_test_table"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM',
|
||||
'import_test_schema', 'import_test_table'],
|
||||
not_expected_cmd_opts=[],
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'WITH',
|
||||
'export_test_schema', 'export_test_table',
|
||||
'WITH', 'FORMAT csv', 'QUOTE \\\'"\\\'',
|
||||
'ESCAPE \\\'\\\'\\\'\\\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with csv file, header, delimiter=tab, '
|
||||
'encoding=LATIN1',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_01',
|
||||
format="csv",
|
||||
encoding="LATIN1",
|
||||
header=True,
|
||||
delimiter="[tab]",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=True,
|
||||
database='postgres',
|
||||
columns=['test_col_010', 'test_col_011'],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
schema="test_schema_01",
|
||||
table="export_test_table_01"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'test_schema_01',
|
||||
'export_test_table_01', 'WITH',
|
||||
'FORMAT csv', 'HEADER',
|
||||
'ENCODING \\\'LATIN1\\\'',
|
||||
'DELIMITER E\\\'\\\\t\\\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with csv file with force_not_null_column and'
|
||||
' force_null as selected cols',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_02',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=True,
|
||||
database='postgres',
|
||||
columns=['test_col_010', 'test_col_011'],
|
||||
not_null_columns=['test_col_010'],
|
||||
null_columns=['test_col_011'],
|
||||
schema="test_schema_01",
|
||||
table="export_test_table_01"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'test_schema_01',
|
||||
'export_test_table_01', 'WITH',
|
||||
'FORCE_NOT_NULL (test_col_010)',
|
||||
'FORCE_NULL (test_col_011)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with csv file with force_not_null_column and'
|
||||
' force_null as *',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_03',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=True,
|
||||
database='postgres',
|
||||
columns=['test_col_010', 'test_col_011'],
|
||||
not_null_columns=['test_col_010', 'test_col_011'],
|
||||
null_columns=['test_col_010', 'test_col_011'],
|
||||
total_columns=2,
|
||||
schema="test_schema_01",
|
||||
table="export_test_table_01"
|
||||
),
|
||||
server_min_version=170000,
|
||||
skip_msg="FORCE_NOT_NULL * and FORCE_NULL * syntax is available "
|
||||
"from PG/EPAS 17 and above.",
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'test_schema_01',
|
||||
'export_test_table_01', 'WITH',
|
||||
'FORCE_NOT_NULL *', 'FORCE_NULL *'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with binary file, encoding=UTF8',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_05',
|
||||
format="binary",
|
||||
encoding="UTF8",
|
||||
header=True,
|
||||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
is_import=True,
|
||||
database='postgres',
|
||||
columns=['test_col_020', 'test_col_021'],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['test_col_020', 'test_col_021'],
|
||||
schema="test_schema_02",
|
||||
table="export_test_table_02"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'test_schema_02',
|
||||
'export_test_table_02', 'WITH',
|
||||
'FORMAT binary', 'ENCODING \'UTF8\''],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with text file, delimiter=|, encoding=ISO_8859_6',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_06',
|
||||
format="text",
|
||||
encoding="ISO_8859_6",
|
||||
delimiter="|",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
null_string='abcd',
|
||||
is_import=True,
|
||||
database='postgres',
|
||||
columns=['test_col_030', 'test_col_031'],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
force_quote_columns=['test_col_030', 'test_col_031'],
|
||||
schema="test_schema_03",
|
||||
table="export_test_table_03"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'test_schema_03',
|
||||
'export_test_table_03', 'WITH',
|
||||
'FORMAT text', 'DELIMITER \'|\'',
|
||||
'ENCODING \'ISO_8859_6\'', 'NULL \'abcd\''],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT csv',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When import file with binary file, delimiter=tab, '
|
||||
'encoding=ISO_8859_6',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_07',
|
||||
format="binary",
|
||||
encoding="ISO_8859_6",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
is_import=True,
|
||||
database='postgres',
|
||||
columns=['test_col_040', 'test_col_041'],
|
||||
not_null_columns=[],
|
||||
null_columns=[],
|
||||
schema="test_schema_04",
|
||||
table="export_test_table_04"
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'FROM', 'test_schema_04',
|
||||
'export_test_table_04', 'WITH',
|
||||
'FORMAT binary', 'ENCODING \'ISO_8859_6\''],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text',
|
||||
'FORCE_QUOTE_COLUMNS'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with csv using Query',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_query_01',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
is_query_export=True,
|
||||
database='postgres',
|
||||
query='select * from export_test_table',
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'(select * from export_test_table)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file using Query with csv file, header, delimiter=tab, '
|
||||
'encoding=LATIN1',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_query_02',
|
||||
format="csv",
|
||||
encoding="LATIN1",
|
||||
header=True,
|
||||
delimiter="[tab]",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
is_query_export=True,
|
||||
database='postgres',
|
||||
query='select * from export_test_table',
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT csv', 'HEADER',
|
||||
'ENCODING \\\'LATIN1\\\'',
|
||||
'DELIMITER E\\\'\\\\t\\\'',
|
||||
'(select * from export_test_table)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file using Query with csv file with force_quote_column '
|
||||
'as selected cols',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_query_03',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
is_query_export=True,
|
||||
database='postgres',
|
||||
query='select * from export_test_table',
|
||||
force_quote_columns=['test_col_010'],
|
||||
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT csv', 'FORCE_QUOTE (test_col_010)',
|
||||
'(select * from export_test_table)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file using Query with csv file with '
|
||||
'force_quote_column *',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_03',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
is_query_export=True,
|
||||
database='postgres',
|
||||
query='select * from export_test_table',
|
||||
force_quote_columns=['*', 'test_col_011'],
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT csv', 'FORCE_QUOTE *',
|
||||
'(select * from export_test_table)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with csv using query',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_query_01',
|
||||
format="csv",
|
||||
encoding="",
|
||||
header=False,
|
||||
delimiter="'",
|
||||
quote="'",
|
||||
escape="\"",
|
||||
is_import=False,
|
||||
is_query_export=True,
|
||||
database='postgres',
|
||||
query='select * from export_test_table',
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'(select * from export_test_table)'],
|
||||
not_expected_cmd_opts=['FORMAT binary', 'FORMAT text', 'DEFAULT',
|
||||
'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
('When export file with binary using query, encoding=UTF8',
|
||||
dict(
|
||||
class_params=dict(
|
||||
sid=1,
|
||||
name='test_export_server',
|
||||
port=5444,
|
||||
host='localhost',
|
||||
database='postgres',
|
||||
bfile='test_export',
|
||||
username='postgres'
|
||||
),
|
||||
params=dict(
|
||||
filename='test_export_file_05',
|
||||
format="binary",
|
||||
encoding="UTF8",
|
||||
header=True,
|
||||
delimiter="",
|
||||
quote="\"",
|
||||
escape="'",
|
||||
is_import=False,
|
||||
database='postgres',
|
||||
is_query_export=True,
|
||||
query='select * from export_test_table_02',
|
||||
),
|
||||
url=import_export_url,
|
||||
expected_cmd_opts=['--command', 'copy', 'TO', 'WITH',
|
||||
'FORMAT binary', 'ENCODING \'UTF8\'',
|
||||
'(select * from export_test_table_02)'],
|
||||
not_expected_cmd_opts=['FORMAT csv', 'FORMAT text', 'HEADER',
|
||||
'DELIMITER', 'QUOTE', 'ESCAPE',
|
||||
'FORCE_QUOTE_COLUMNS', 'NULL',
|
||||
'DEFAULT', 'FORCE_NOT_NULL', 'FORCE_NULL'],
|
||||
expected_exit_code=[0, None]
|
||||
)),
|
||||
]
|
||||
|
|
@ -325,9 +853,11 @@ class IECreateJobTest(BaseTestGenerator):
|
|||
db_owner = server_response['data']['user']['name']
|
||||
self.data = database_utils.get_db_data(db_owner)
|
||||
|
||||
if hasattr(self, 'server_max_version') \
|
||||
and server_response["data"]["version"] > self.\
|
||||
server_max_version:
|
||||
resp_server_version = server_response["data"]["version"]
|
||||
if ((hasattr(self, 'server_max_version') and
|
||||
resp_server_version > self.server_max_version) or
|
||||
(hasattr(self, 'server_min_version') and
|
||||
resp_server_version < self.server_min_version)):
|
||||
self.skipTest(self.skip_msg)
|
||||
|
||||
url = self.url.format(self.server_id)
|
||||
|
|
|
|||