testing table_to_insert when table data is exported to csv

This commit is contained in:
Ottmar Gobrecht 2020-08-11 07:14:30 +02:00
parent a534f2febf
commit a38d389e73
5 changed files with 311 additions and 100 deletions

View File

@ -164,11 +164,11 @@ EXAMPLE ZIP FILE SQL*Plus
-- SQL*Plus can only handle CLOBs, no BLOBs - so we are forced to create a CLOB
-- for spooling the content to the client disk. You need to decode the base64
-- encoded file before you are able to unzip the content. Also see this blog
-- post how to do this on the different operating systems:
-- post how to do this on different operating systems:
-- https://www.igorkromin.net/index.php/2017/04/26/base64-encode-or-decode-on-the-command-line-without-installing-extra-tools-on-linux-windows-or-macos/
-- Example Windows: certutil -decode app_100.zip.base64 app_100.zip
-- Example Mac: base64 -D -i app_100.zip.base64 -o app_100.zip
-- Example Linux: base64 -d app_100.zip.base64 > app_100.zip
-- Example Mac: base64 -D -i app_100.zip.base64 -o app_100.zip
-- Example Linux: base64 -d app_100.zip.base64 > app_100.zip
set verify off feedback off heading off termout off
set trimout on trimspool on pagesize 0 linesize 5000 long 100000000 longchunksize 32767
whenever sqlerror exit sql.sqlcode rollback
@ -376,7 +376,7 @@ FUNCTION queries_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"', -- Used when the data contains the delimiter character.
p_header_prefix IN VARCHAR2 DEFAULT NULL, -- Prefix the header line with this text.
p_include_runtime_log IN BOOLEAN DEFAULT true, -- If true, generate file plex_runtime_log.md with runtime statistics.
p_include_error_log IN BOOLEAN DEFAULT true) -- If true, generate file plex_error_log.md with detailed error messages.
p_include_error_log IN BOOLEAN DEFAULT true) -- If true, generate file plex_error_log.md with detailed error messages.
RETURN tab_export_files;
```

View File

@ -9,8 +9,9 @@
"scripts": {
"prebuild": "npx ploc --in src/PLEX.pks --out README.md",
"build": "node src/build.js",
"postbuild": "echo exit | sqlplus -S tests/oracle@localhost/xepdb1 @plex_install.sql",
"watch": "chokidar src/PLEX.pks src/PLEX.pkb src/plex_install.sql --initial -c \"npm run build\""
"postbuild": "echo exit | sqlplus -S /@cloud_playground_data @plex_install.sql",
"watch": "chokidar src/PLEX.pks src/PLEX.pkb src/plex_install.sql --initial -c \"npm run build\"",
"test-ccflags": "echo exit | sqlplus -S /@cloud_playground_data @plex_install_dev_test_ccflags.sql"
},
"devDependencies": {
"chokidar-cli": "^2.1.0",

View File

@ -262,11 +262,11 @@ EXAMPLE ZIP FILE SQL*Plus
-- SQL*Plus can only handle CLOBs, no BLOBs - so we are forced to create a CLOB
-- for spooling the content to the client disk. You need to decode the base64
-- encoded file before you are able to unzip the content. Also see this blog
-- post how to do this on the different operating systems:
-- post how to do this on different operating systems:
-- https://www.igorkromin.net/index.php/2017/04/26/base64-encode-or-decode-on-the-command-line-without-installing-extra-tools-on-linux-windows-or-macos/
-- Example Windows: certutil -decode app_100.zip.base64 app_100.zip
-- Example Mac: base64 -D -i app_100.zip.base64 -o app_100.zip
-- Example Linux: base64 -d app_100.zip.base64 > app_100.zip
-- Example Mac: base64 -D -i app_100.zip.base64 -o app_100.zip
-- Example Linux: base64 -d app_100.zip.base64 > app_100.zip
set verify off feedback off heading off termout off
set trimout on trimspool on pagesize 0 linesize 5000 long 100000000 longchunksize 32767
whenever sqlerror exit sql.sqlcode rollback
@ -314,7 +314,7 @@ FUNCTION queries_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"', -- Used when the data contains the delimiter character.
p_header_prefix IN VARCHAR2 DEFAULT NULL, -- Prefix the header line with this text.
p_include_runtime_log IN BOOLEAN DEFAULT true, -- If true, generate file plex_runtime_log.md with runtime statistics.
p_include_error_log IN BOOLEAN DEFAULT true) -- If true, generate file plex_error_log.md with detailed error messages.
p_include_error_log IN BOOLEAN DEFAULT true) -- If true, generate file plex_error_log.md with detailed error messages.
RETURN tab_export_files;
/**
Export one or more queries as CSV data within a file collection.
@ -569,7 +569,7 @@ PROCEDURE util_ensure_unique_file_names (p_export_files IN OUT tab_export_files)
FUNCTION util_to_xlsx_datetime (
p_date IN DATE)
RETURN;
RETURN NUMBER;
--------------------------------------------------------------------------------------------------------------------------------
-- The following tools are working on global private package variables
@ -607,7 +607,7 @@ PROCEDURE util_clob_query_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"',
p_header_prefix IN VARCHAR2 DEFAULT NULL);
PROCEDURE util_clob_table_to_forallinsert (
PROCEDURE util_clob_table_to_insert (
p_table_name IN VARCHAR2,
p_max_rows IN NUMBER DEFAULT 1000);
@ -799,7 +799,7 @@ PROCEDURE util_clob_query_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"',
p_header_prefix IN VARCHAR2 DEFAULT NULL);
PROCEDURE util_clob_table_to_forallinsert (
PROCEDURE util_clob_table_to_insert (
p_table_name IN VARCHAR2,
p_max_rows IN NUMBER DEFAULT 1000);
@ -1401,7 +1401,6 @@ PROCEDURE util_clob_query_to_csv (
p_header_prefix IN VARCHAR2 DEFAULT NULL)
IS
-- inspired by Tim Hall: https://oracle-base.com/dba/script?category=miscellaneous&file=csv.sql
v_line_terminator VARCHAR2(2) := c_crlf; -- to be compatible with Excel we need to use crlf here (multiline text uses lf and is wrapped in quotes)
v_cursor PLS_INTEGER;
v_ignore_me PLS_INTEGER;
v_data_count PLS_INTEGER := 0;
@ -1487,6 +1486,8 @@ BEGIN
END IF;
END LOOP;
v_ignore_me := dbms_sql.execute(v_cursor);
-- create header
util_clob_append(p_header_prefix);
FOR i IN 1..v_col_cnt LOOP
IF i > 1 THEN
@ -1496,7 +1497,8 @@ BEGIN
escape_varchar2_buffer_for_csv;
util_clob_append(v_buffer_varchar2);
END LOOP;
util_clob_append(v_line_terminator);
util_clob_append(c_crlf);
-- create data
LOOP
EXIT WHEN dbms_sql.fetch_rows(v_cursor) = 0 OR v_data_count = p_max_rows;
@ -1541,18 +1543,17 @@ BEGIN
util_clob_append(v_buffer_varchar2);
END IF;
END LOOP;
util_clob_append(v_line_terminator);
util_clob_append(c_crlf);
v_data_count := v_data_count + 1;
END LOOP;
dbms_sql.close_cursor(v_cursor);
END IF;
END util_clob_query_to_csv;
PROCEDURE util_clob_table_to_forallinsert (
PROCEDURE util_clob_table_to_insert (
p_table_name IN VARCHAR2,
p_max_rows IN NUMBER DEFAULT 1000)
IS
v_line_terminator VARCHAR2(2) := c_crlf; -- to be compatible with Excel we need to use crlf here (multiline text uses lf and is wrapped in quotes)
v_cursor PLS_INTEGER;
v_ignore_me PLS_INTEGER;
v_data_count PLS_INTEGER := 0;
@ -1594,32 +1595,71 @@ IS
--------------------------------------------------------------------------------------------------------------------------------
PROCEDURE escape_varchar2_buffer_for_csv IS
PROCEDURE prepare_varchar2_buffer_for_scripting IS
c_single_quote constant varchar2(1) := q'[']';
c_double_quote constant varchar2(2) := q'['']';
BEGIN
NULL;
-- IF v_buffer_varchar2 IS NOT NULL THEN
-- -- normalize line feeds for Excel
-- v_buffer_varchar2 := replace(
-- replace(v_buffer_varchar2, c_crlf, c_lf),
-- c_cr,
-- c_lf);
-- -- if we have the delimiter character or line feeds in the string then we
-- -- have to wrap the text in quotes marks and escape the quote marks
-- -- inside the text by double them
-- IF instr(v_buffer_varchar2, p_delimiter) > 0 OR instr(v_buffer_varchar2, c_lf) > 0 THEN
-- v_buffer_varchar2 := p_quote_mark
-- || replace(v_buffer_varchar2, p_quote_mark, p_quote_mark || p_quote_mark)
-- || p_quote_mark;
-- END IF;
-- END IF;
IF v_buffer_varchar2 IS NOT NULL THEN
-- if we have the single quote character in the string then we
-- have to double them
v_buffer_varchar2 := c_single_quote ||
case when instr(v_buffer_varchar2, c_single_quote) > 0
then replace(v_buffer_varchar2, c_single_quote, c_double_quote)
else v_buffer_varchar2
end || c_single_quote;
END IF;
EXCEPTION
WHEN value_error THEN
v_buffer_varchar2 := 'Value skipped - escaped text larger then ' || c_vc2_max_size || ' characters';
END escape_varchar2_buffer_for_csv;
END prepare_varchar2_buffer_for_scripting;
FUNCTION get_table_pk_list RETURN VARCHAR2 IS
FUNCTION get_order_by_list RETURN VARCHAR2 IS
v_return varchar2(4000);
begin
return null;
-- try to use pk column list ...
for i in (
select
( select
listagg(ucc.column_name, ', ') within group(order by position)
from
user_cons_columns ucc
where
ucc.constraint_name = uc.constraint_name
) as order_by_list
from
user_constraints uc
where
table_name = p_table_name
and constraint_type = 'P'
) loop
v_return := i.order_by_list;
end loop;
-- ... or fetch first three table columns as order by list
if v_return is null then
for i in (
with t as (
select
column_name,
column_id
from
user_tab_cols
where
table_name = 'DRS_FILM'
and data_type in ('NUMBER','VARCHAR2','DATE')
order by
column_id
fetch first 3 rows only
)
select
listagg(column_name, ', ') within group (order by column_id) as order_by_list
from t
) loop
v_return := i.order_by_list;
end loop;
end if;
return v_return;
end;
BEGIN
@ -1627,7 +1667,7 @@ BEGIN
v_cursor := dbms_sql.open_cursor;
dbms_sql.parse(
v_cursor,
'select * from ' || p_table_name || ' order by ' || get_table_pk_list,
'select * from ' || p_table_name || ' order by ' || get_order_by_list,
dbms_sql.native);
-- https://support.esri.com/en/technical-article/000010110
-- http://bluefrog-oracle.blogspot.com/2011/11/describing-ref-cursor-using-dbmssql-api.html
@ -1646,21 +1686,36 @@ BEGIN
END IF;
END LOOP;
v_ignore_me := dbms_sql.execute(v_cursor);
FOR i IN 1..v_col_cnt LOOP
v_buffer_varchar2 := v_desc_tab(i).col_name;
escape_varchar2_buffer_for_csv;
util_clob_append(v_buffer_varchar2);
END LOOP;
util_clob_append(v_line_terminator);
-- create header
util_clob_append('-- Script generated by PLEX version ' || c_plex_version || c_crlf);
util_clob_append('-- More infos here: ' || c_plex_url || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('prompt - insert into ' || p_table_name || c_crlf);
util_clob_append('set define off feedback off' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('declare' || c_crlf);
util_clob_append('type r_t is table of drs_film%rowtype index by pls_integer;' || c_crlf);
util_clob_append('r r_t;' || c_crlf);
util_clob_append('start pls_integer := dbms_utility.get_time;' || c_crlf);
util_clob_append('begin' || c_crlf);
-- create data
LOOP
EXIT WHEN dbms_sql.fetch_rows(v_cursor) = 0 OR v_data_count = p_max_rows;
v_data_count := v_data_count + 1;
-- start new table row
util_clob_append('--' || c_crlf);
FOR i IN 1..v_col_cnt LOOP
-- start column
util_clob_append('r(' || v_data_count || ').' || v_desc_tab(i).col_name || ' := ');
IF v_desc_tab(i).col_type = c_clob THEN
dbms_sql.column_value(v_cursor, i, v_buffer_clob);
IF length(v_buffer_clob) <= c_vc2_max_size THEN
v_buffer_varchar2 := substr(v_buffer_clob, 1, c_vc2_max_size);
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
ELSE
v_buffer_varchar2 := 'CLOB value skipped - larger then ' || c_vc2_max_size || ' characters';
@ -1671,7 +1726,7 @@ BEGIN
v_buffer_clob := v_buffer_xmltype.getclobval();
IF length(v_buffer_clob) <= c_vc2_max_size THEN
v_buffer_varchar2 := substr(v_buffer_clob, 1, c_vc2_max_size);
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
ELSE
v_buffer_varchar2 := 'XML value skipped - larger then ' || c_vc2_max_size || ' characters';
@ -1680,25 +1735,58 @@ BEGIN
ELSIF v_desc_tab(i).col_type = c_long THEN
dbms_sql.column_value_long(v_cursor, i, c_vc2_max_size, 0, v_buffer_varchar2, v_buffer_long_length);
IF v_buffer_long_length <= c_vc2_max_size THEN
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
ELSE
util_clob_append('LONG value skipped - larger then ' || c_vc2_max_size || ' characters');
END IF;
ELSIF v_desc_tab(i).col_type IN (c_raw, c_long_raw, c_blob, c_bfile) THEN
util_clob_append('Binary data type skipped - not supported for CSV');
util_clob_append('Binary data type skipped - currently not supported');
ELSE
dbms_sql.column_value(v_cursor, i, v_buffer_varchar2);
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
END IF;
-- end column
util_clob_append(';' || c_crlf);
END LOOP;
util_clob_append(v_line_terminator);
v_data_count := v_data_count + 1;
END LOOP;
dbms_sql.close_cursor(v_cursor);
-- create forall insert
if v_data_count = 0 then
util_clob_append('-- No data found in table :-(' || c_crlf);
util_clob_append('end;' || c_crlf);
util_clob_append('/' || c_crlf);
else
util_clob_append('--' || c_crlf);
util_clob_append('forall i in 1..r.count' || c_crlf);
util_clob_append(' insert into "' || p_table_name || '" (' || c_crlf);
FOR i IN 1..v_col_cnt LOOP
util_clob_append(' "' || v_desc_tab(i).col_name || '"'
|| case when i != v_col_cnt then ',' end
|| c_crlf);
end loop;
util_clob_append(' ) values (' || c_crlf);
FOR i IN 1..v_col_cnt LOOP
util_clob_append(' r(i)."' || v_desc_tab(i).col_name || '"'
|| case when i != v_col_cnt then ',' end
|| c_crlf);
end loop;
util_clob_append(' );' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append(q'[dbms_output.put_line('- ']' || c_crlf);
util_clob_append(q'[ ||(dbms_utility.get_time - start) / 100]' || c_crlf);
util_clob_append(q'[ || ' seconds');]' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('end;' || c_crlf);
util_clob_append('/' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('commit;' || c_crlf);
util_clob_append('' || c_crlf);
end if;
END IF;
END util_clob_table_to_forallinsert;
END util_clob_table_to_insert;
--------------------------------------------------------------------------------------------------------------------------------
@ -2586,6 +2674,8 @@ SELECT table_name,
LOOP
FETCH v_cur INTO v_rec;
EXIT WHEN v_cur%notfound;
-- csv file
BEGIN
v_file_path := p_base_path_data || '/' || v_rec.table_name || '.csv';
util_log_start(v_file_path);
@ -2605,8 +2695,23 @@ SELECT table_name,
WHEN OTHERS THEN
util_log_error(v_file_path);
END;
-- forall insert script
BEGIN
v_file_path := p_base_path_data || '/' || v_rec.table_name || '.sql';
util_log_start(v_file_path);
util_clob_table_to_insert(p_table_name => v_rec.table_name);
util_clob_add_to_export_files(
p_export_files => v_export_files,
p_name => v_file_path);
util_log_stop;
EXCEPTION
WHEN OTHERS THEN
util_log_error(v_file_path);
END;
END LOOP;
CLOSE v_cur;
END process_data;
PROCEDURE create_template_files IS

View File

@ -175,7 +175,7 @@ PROCEDURE util_clob_query_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"',
p_header_prefix IN VARCHAR2 DEFAULT NULL);
PROCEDURE util_clob_table_to_forallinsert (
PROCEDURE util_clob_table_to_insert (
p_table_name IN VARCHAR2,
p_max_rows IN NUMBER DEFAULT 1000);
@ -777,7 +777,6 @@ PROCEDURE util_clob_query_to_csv (
p_header_prefix IN VARCHAR2 DEFAULT NULL)
IS
-- inspired by Tim Hall: https://oracle-base.com/dba/script?category=miscellaneous&file=csv.sql
v_line_terminator VARCHAR2(2) := c_crlf; -- to be compatible with Excel we need to use crlf here (multiline text uses lf and is wrapped in quotes)
v_cursor PLS_INTEGER;
v_ignore_me PLS_INTEGER;
v_data_count PLS_INTEGER := 0;
@ -863,6 +862,8 @@ BEGIN
END IF;
END LOOP;
v_ignore_me := dbms_sql.execute(v_cursor);
-- create header
util_clob_append(p_header_prefix);
FOR i IN 1..v_col_cnt LOOP
IF i > 1 THEN
@ -872,7 +873,8 @@ BEGIN
escape_varchar2_buffer_for_csv;
util_clob_append(v_buffer_varchar2);
END LOOP;
util_clob_append(v_line_terminator);
util_clob_append(c_crlf);
-- create data
LOOP
EXIT WHEN dbms_sql.fetch_rows(v_cursor) = 0 OR v_data_count = p_max_rows;
@ -917,18 +919,17 @@ BEGIN
util_clob_append(v_buffer_varchar2);
END IF;
END LOOP;
util_clob_append(v_line_terminator);
util_clob_append(c_crlf);
v_data_count := v_data_count + 1;
END LOOP;
dbms_sql.close_cursor(v_cursor);
END IF;
END util_clob_query_to_csv;
PROCEDURE util_clob_table_to_forallinsert (
PROCEDURE util_clob_table_to_insert (
p_table_name IN VARCHAR2,
p_max_rows IN NUMBER DEFAULT 1000)
IS
v_line_terminator VARCHAR2(2) := c_crlf; -- to be compatible with Excel we need to use crlf here (multiline text uses lf and is wrapped in quotes)
v_cursor PLS_INTEGER;
v_ignore_me PLS_INTEGER;
v_data_count PLS_INTEGER := 0;
@ -970,32 +971,71 @@ IS
--------------------------------------------------------------------------------------------------------------------------------
PROCEDURE escape_varchar2_buffer_for_csv IS
PROCEDURE prepare_varchar2_buffer_for_scripting IS
c_single_quote constant varchar2(1) := q'[']';
c_double_quote constant varchar2(2) := q'['']';
BEGIN
NULL;
-- IF v_buffer_varchar2 IS NOT NULL THEN
-- -- normalize line feeds for Excel
-- v_buffer_varchar2 := replace(
-- replace(v_buffer_varchar2, c_crlf, c_lf),
-- c_cr,
-- c_lf);
-- -- if we have the delimiter character or line feeds in the string then we
-- -- have to wrap the text in quotes marks and escape the quote marks
-- -- inside the text by double them
-- IF instr(v_buffer_varchar2, p_delimiter) > 0 OR instr(v_buffer_varchar2, c_lf) > 0 THEN
-- v_buffer_varchar2 := p_quote_mark
-- || replace(v_buffer_varchar2, p_quote_mark, p_quote_mark || p_quote_mark)
-- || p_quote_mark;
-- END IF;
-- END IF;
IF v_buffer_varchar2 IS NOT NULL THEN
-- if we have the single quote character in the string then we
-- have to double them
v_buffer_varchar2 := c_single_quote ||
case when instr(v_buffer_varchar2, c_single_quote) > 0
then replace(v_buffer_varchar2, c_single_quote, c_double_quote)
else v_buffer_varchar2
end || c_single_quote;
END IF;
EXCEPTION
WHEN value_error THEN
v_buffer_varchar2 := 'Value skipped - escaped text larger then ' || c_vc2_max_size || ' characters';
END escape_varchar2_buffer_for_csv;
END prepare_varchar2_buffer_for_scripting;
FUNCTION get_table_pk_list RETURN VARCHAR2 IS
FUNCTION get_order_by_list RETURN VARCHAR2 IS
v_return varchar2(4000);
begin
return null;
-- try to use pk column list ...
for i in (
select
( select
listagg(ucc.column_name, ', ') within group(order by position)
from
user_cons_columns ucc
where
ucc.constraint_name = uc.constraint_name
) as order_by_list
from
user_constraints uc
where
table_name = p_table_name
and constraint_type = 'P'
) loop
v_return := i.order_by_list;
end loop;
-- ... or fetch first three table columns as order by list
if v_return is null then
for i in (
with t as (
select
column_name,
column_id
from
user_tab_cols
where
table_name = 'DRS_FILM'
and data_type in ('NUMBER','VARCHAR2','DATE')
order by
column_id
fetch first 3 rows only
)
select
listagg(column_name, ', ') within group (order by column_id) as order_by_list
from t
) loop
v_return := i.order_by_list;
end loop;
end if;
return v_return;
end;
BEGIN
@ -1003,7 +1043,7 @@ BEGIN
v_cursor := dbms_sql.open_cursor;
dbms_sql.parse(
v_cursor,
'select * from ' || p_table_name || ' order by ' || get_table_pk_list,
'select * from ' || p_table_name || ' order by ' || get_order_by_list,
dbms_sql.native);
-- https://support.esri.com/en/technical-article/000010110
-- http://bluefrog-oracle.blogspot.com/2011/11/describing-ref-cursor-using-dbmssql-api.html
@ -1022,21 +1062,36 @@ BEGIN
END IF;
END LOOP;
v_ignore_me := dbms_sql.execute(v_cursor);
FOR i IN 1..v_col_cnt LOOP
v_buffer_varchar2 := v_desc_tab(i).col_name;
escape_varchar2_buffer_for_csv;
util_clob_append(v_buffer_varchar2);
END LOOP;
util_clob_append(v_line_terminator);
-- create header
util_clob_append('-- Script generated by PLEX version ' || c_plex_version || c_crlf);
util_clob_append('-- More infos here: ' || c_plex_url || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('prompt - insert into ' || p_table_name || c_crlf);
util_clob_append('set define off feedback off' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('declare' || c_crlf);
util_clob_append('type r_t is table of drs_film%rowtype index by pls_integer;' || c_crlf);
util_clob_append('r r_t;' || c_crlf);
util_clob_append('start pls_integer := dbms_utility.get_time;' || c_crlf);
util_clob_append('begin' || c_crlf);
-- create data
LOOP
EXIT WHEN dbms_sql.fetch_rows(v_cursor) = 0 OR v_data_count = p_max_rows;
v_data_count := v_data_count + 1;
-- start new table row
util_clob_append('--' || c_crlf);
FOR i IN 1..v_col_cnt LOOP
-- start column
util_clob_append('r(' || v_data_count || ').' || v_desc_tab(i).col_name || ' := ');
IF v_desc_tab(i).col_type = c_clob THEN
dbms_sql.column_value(v_cursor, i, v_buffer_clob);
IF length(v_buffer_clob) <= c_vc2_max_size THEN
v_buffer_varchar2 := substr(v_buffer_clob, 1, c_vc2_max_size);
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
ELSE
v_buffer_varchar2 := 'CLOB value skipped - larger then ' || c_vc2_max_size || ' characters';
@ -1047,7 +1102,7 @@ BEGIN
v_buffer_clob := v_buffer_xmltype.getclobval();
IF length(v_buffer_clob) <= c_vc2_max_size THEN
v_buffer_varchar2 := substr(v_buffer_clob, 1, c_vc2_max_size);
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
ELSE
v_buffer_varchar2 := 'XML value skipped - larger then ' || c_vc2_max_size || ' characters';
@ -1056,25 +1111,58 @@ BEGIN
ELSIF v_desc_tab(i).col_type = c_long THEN
dbms_sql.column_value_long(v_cursor, i, c_vc2_max_size, 0, v_buffer_varchar2, v_buffer_long_length);
IF v_buffer_long_length <= c_vc2_max_size THEN
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
ELSE
util_clob_append('LONG value skipped - larger then ' || c_vc2_max_size || ' characters');
END IF;
ELSIF v_desc_tab(i).col_type IN (c_raw, c_long_raw, c_blob, c_bfile) THEN
util_clob_append('Binary data type skipped - not supported for CSV');
util_clob_append('Binary data type skipped - currently not supported');
ELSE
dbms_sql.column_value(v_cursor, i, v_buffer_varchar2);
escape_varchar2_buffer_for_csv;
prepare_varchar2_buffer_for_scripting;
util_clob_append(v_buffer_varchar2);
END IF;
-- end column
util_clob_append(';' || c_crlf);
END LOOP;
util_clob_append(v_line_terminator);
v_data_count := v_data_count + 1;
END LOOP;
dbms_sql.close_cursor(v_cursor);
-- create forall insert
if v_data_count = 0 then
util_clob_append('-- No data found in table :-(' || c_crlf);
util_clob_append('end;' || c_crlf);
util_clob_append('/' || c_crlf);
else
util_clob_append('--' || c_crlf);
util_clob_append('forall i in 1..r.count' || c_crlf);
util_clob_append(' insert into "' || p_table_name || '" (' || c_crlf);
FOR i IN 1..v_col_cnt LOOP
util_clob_append(' "' || v_desc_tab(i).col_name || '"'
|| case when i != v_col_cnt then ',' end
|| c_crlf);
end loop;
util_clob_append(' ) values (' || c_crlf);
FOR i IN 1..v_col_cnt LOOP
util_clob_append(' r(i)."' || v_desc_tab(i).col_name || '"'
|| case when i != v_col_cnt then ',' end
|| c_crlf);
end loop;
util_clob_append(' );' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append(q'[dbms_output.put_line('- ']' || c_crlf);
util_clob_append(q'[ ||(dbms_utility.get_time - start) / 100]' || c_crlf);
util_clob_append(q'[ || ' seconds');]' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('end;' || c_crlf);
util_clob_append('/' || c_crlf);
util_clob_append('' || c_crlf);
util_clob_append('commit;' || c_crlf);
util_clob_append('' || c_crlf);
end if;
END IF;
END util_clob_table_to_forallinsert;
END util_clob_table_to_insert;
--------------------------------------------------------------------------------------------------------------------------------
@ -1962,6 +2050,8 @@ SELECT table_name,
LOOP
FETCH v_cur INTO v_rec;
EXIT WHEN v_cur%notfound;
-- csv file
BEGIN
v_file_path := p_base_path_data || '/' || v_rec.table_name || '.csv';
util_log_start(v_file_path);
@ -1981,8 +2071,23 @@ SELECT table_name,
WHEN OTHERS THEN
util_log_error(v_file_path);
END;
-- forall insert script
BEGIN
v_file_path := p_base_path_data || '/' || v_rec.table_name || '.sql';
util_log_start(v_file_path);
util_clob_table_to_insert(p_table_name => v_rec.table_name);
util_clob_add_to_export_files(
p_export_files => v_export_files,
p_name => v_file_path);
util_log_stop;
EXCEPTION
WHEN OTHERS THEN
util_log_error(v_file_path);
END;
END LOOP;
CLOSE v_cur;
END process_data;
PROCEDURE create_template_files IS

View File

@ -224,11 +224,11 @@ EXAMPLE ZIP FILE SQL*Plus
-- SQL*Plus can only handle CLOBs, no BLOBs - so we are forced to create a CLOB
-- for spooling the content to the client disk. You need to decode the base64
-- encoded file before you are able to unzip the content. Also see this blog
-- post how to do this on the different operating systems:
-- post how to do this on different operating systems:
-- https://www.igorkromin.net/index.php/2017/04/26/base64-encode-or-decode-on-the-command-line-without-installing-extra-tools-on-linux-windows-or-macos/
-- Example Windows: certutil -decode app_100.zip.base64 app_100.zip
-- Example Mac: base64 -D -i app_100.zip.base64 -o app_100.zip
-- Example Linux: base64 -d app_100.zip.base64 > app_100.zip
-- Example Mac: base64 -D -i app_100.zip.base64 -o app_100.zip
-- Example Linux: base64 -d app_100.zip.base64 > app_100.zip
set verify off feedback off heading off termout off
set trimout on trimspool on pagesize 0 linesize 5000 long 100000000 longchunksize 32767
whenever sqlerror exit sql.sqlcode rollback
@ -276,7 +276,7 @@ FUNCTION queries_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"', -- Used when the data contains the delimiter character.
p_header_prefix IN VARCHAR2 DEFAULT NULL, -- Prefix the header line with this text.
p_include_runtime_log IN BOOLEAN DEFAULT true, -- If true, generate file plex_runtime_log.md with runtime statistics.
p_include_error_log IN BOOLEAN DEFAULT true) -- If true, generate file plex_error_log.md with detailed error messages.
p_include_error_log IN BOOLEAN DEFAULT true) -- If true, generate file plex_error_log.md with detailed error messages.
RETURN tab_export_files;
/**
Export one or more queries as CSV data within a file collection.
@ -531,7 +531,7 @@ PROCEDURE util_ensure_unique_file_names (p_export_files IN OUT tab_export_files)
FUNCTION util_to_xlsx_datetime (
p_date IN DATE)
RETURN;
RETURN NUMBER;
--------------------------------------------------------------------------------------------------------------------------------
-- The following tools are working on global private package variables
@ -569,7 +569,7 @@ PROCEDURE util_clob_query_to_csv (
p_quote_mark IN VARCHAR2 DEFAULT '"',
p_header_prefix IN VARCHAR2 DEFAULT NULL);
PROCEDURE util_clob_table_to_forallinsert (
PROCEDURE util_clob_table_to_insert (
p_table_name IN VARCHAR2,
p_max_rows IN NUMBER DEFAULT 1000);