Files
oracle/vg/datapump_package.sql
2026-03-12 21:23:47 +01:00

625 lines
36 KiB
MySQL

alter session set current_schema = DBA_OWN;
CREATE OR REPLACE PACKAGE "DATAPUMP"
AS
/************************************************************************************************************************************
* Purpose : PL/SQL Wrapper for Datapump Export/Import Utility
*
*
* Modification History
* Date Ver Author Remarks
* --------- --- -------------- -----------------------------------------------
* 23-Dec-15 0.3 Vishal Gupta Following enhancements
* - Added CLOB support for schema/table/partition list parameters
* - Added METRIC input parameter
* 10-Dec-15 0.2 Vishal Gupta Added following input parameters - include_rows, remap_tablespace
* , partition_name, schema_list, table_list , partition_list
* 12-Jun-14 0.1 Vishal Gupta Created (first version)
*
************************************************************************************************************************************/
PROCEDURE EXPORT_IMPORT(
operation IN VARCHAR2 DEFAULT 'EXPORT' , /* Valid values EXPORT, IMPORT */
job_mode IN VARCHAR2 DEFAULT 'TABLE' , /* Valid values TABLE, SCHEMA */
schema_name IN VARCHAR2 DEFAULT NULL , /* Schema Name */
table_name IN VARCHAR2 DEFAULT NULL , /* Table Name */
partition_name IN VARCHAR2 DEFAULT NULL , /* Partition Name */
include_metadata IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
include_rows IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
directory IN VARCHAR2 DEFAULT NULL ,
dumpfile IN VARCHAR2 DEFAULT 'datapump.dmp' , /* If parallelism is used, then include %U to generation multiple files */
logfile IN VARCHAR2 DEFAULT 'datapump.log' ,
parallelism IN NUMBER DEFAULT NULL ,
job_name IN VARCHAR2 DEFAULT NULL ,
cluster_ok IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
service_name IN VARCHAR2 DEFAULT NULL ,
remap_table IN VARCHAR2 DEFAULT NULL , /* Comma separated list of OLD_TABLENAME:NEW_TABLENAME format mappings */
remap_schema IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_SCHEMA:TARGET_SCHEMA format mappings */
remap_tablespace IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_TABLESPACE:TARGET_TABLESPACE format mappings */
filesize IN VARCHAR2 DEFAULT NULL , /* e.g 10G */
reusefile IN NUMBER DEFAULT 0 , /* Valid values are 0 , 1 */
skip_unusable_indexes IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
compression IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE, DATA_ONLY, METADATA_ONLY, ALL */
table_exists_action IN VARCHAR2 DEFAULT 'SKIP' , /* Valid values are TRUNCATE , SKIP , APPEND , REPLACE */
estimate_only IN NUMBER DEFAULT 0 , /* Valid values are 0 or 1 */
estimate_method IN VARCHAR2 DEFAULT NULL , /* Valid values are BLOCKS , STATISTICS */
partition_options IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE , DEPARTITION, MERGE */
subquery IN VARCHAR2 DEFAULT NULL , /* Specifies a subquery that is added to the end of the SELECT statement for the table. */
exclude IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to EXCLUDE */
include IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to INCLUDE */
metrics IN VARCHAR2 DEFAULT 0 , /* Valid values are 0,1. With value=1, number of objects and the elapsed time are recorded in the Data Pump log file */
version IN VARCHAR2 DEFAULT 'COMPATIBLE' /* Valid values are COMPATIBLE, LATEST */
);
PROCEDURE EXPORT_IMPORT(
operation IN VARCHAR2 DEFAULT 'EXPORT' , /* Valid values EXPORT, IMPORT */
job_mode IN VARCHAR2 DEFAULT 'TABLE' , /* Valid values TABLE, SCHEMA */
schema_list IN CLOB DEFAULT NULL , /* Comma separated list of Schemas */
table_list IN CLOB DEFAULT NULL , /* Comma separated list of Tables */
partition_list IN CLOB DEFAULT NULL , /* Comma separated list of Partitions */
include_metadata IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
include_rows IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
directory IN VARCHAR2 DEFAULT NULL ,
dumpfile IN VARCHAR2 DEFAULT 'datapump.dmp' , /* If parallelism is used, then include %U to generation multiple files */
logfile IN VARCHAR2 DEFAULT 'datapump.log' ,
parallelism IN NUMBER DEFAULT NULL ,
job_name IN VARCHAR2 DEFAULT NULL ,
cluster_ok IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
service_name IN VARCHAR2 DEFAULT NULL ,
remap_table IN VARCHAR2 DEFAULT NULL , /* Comma separated list of OLD_TABLENAME:NEW_TABLENAME format mappings */
remap_schema IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_SCHEMA:TARGET_SCHEMA format mappings */
remap_tablespace IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_TABLESPACE:TARGET_TABLESPACE format mappings */
filesize IN VARCHAR2 DEFAULT NULL , /* e.g 10G */
reusefile IN NUMBER DEFAULT 0 , /* Valid values are 0 , 1 */
skip_unusable_indexes IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
compression IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE, DATA_ONLY, METADATA_ONLY, ALL */
table_exists_action IN VARCHAR2 DEFAULT 'SKIP' , /* Valid values are TRUNCATE , SKIP , APPEND , REPLACE */
estimate_only IN NUMBER DEFAULT 0 , /* Valid values are 0 or 1 */
estimate_method IN VARCHAR2 DEFAULT NULL , /* Valid values are BLOCKS , STATISTICS */
partition_options IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE , DEPARTITION, MERGE */
subquery IN VARCHAR2 DEFAULT NULL , /* Specifies a subquery that is added to the end of the SELECT statement for the table. */
exclude IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to EXCLUDE */
include IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to INCLUDE */
metrics IN VARCHAR2 DEFAULT 0 , /* Valid values are 0,1. With value=1, number of objects and the elapsed time are recorded in the Data Pump log file */
version IN VARCHAR2 DEFAULT 'COMPATIBLE' /* Valid values are COMPATIBLE, LATEST */
);
END;
/
CREATE OR REPLACE PACKAGE BODY "DATAPUMP"
AS
/************************************************************************************************************************************
* Purpose : PL/SQL Wrapper for Datapump Export/Import Utility
*
*
* Modification History
* Date Ver Author Remarks
* --------- --- -------------- -----------------------------------------------
* 10-Dec-15 0.2 Vishal Gupta Added following input parameters - include_rows, remap_tablespace
* , partition_name, schema_list, table_list , partition_list
* 12-Jun-14 0.1 Vishal Gupta Created (first version)
*
************************************************************************************************************************************/
PROCEDURE EXPORT_IMPORT(
operation IN VARCHAR2 DEFAULT 'EXPORT' , /* Valid values EXPORT, IMPORT */
job_mode IN VARCHAR2 DEFAULT 'TABLE' , /* Valid values TABLE, SCHEMA */
schema_name IN VARCHAR2 DEFAULT NULL , /* Schema Name */
table_name IN VARCHAR2 DEFAULT NULL , /* Table Name */
partition_name IN VARCHAR2 DEFAULT NULL , /* Partition Name */
include_metadata IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
include_rows IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
directory IN VARCHAR2 DEFAULT NULL ,
dumpfile IN VARCHAR2 DEFAULT 'datapump.dmp' , /* If parallelism is used, then include %U to generation multiple files */
logfile IN VARCHAR2 DEFAULT 'datapump.log' ,
parallelism IN NUMBER DEFAULT NULL ,
job_name IN VARCHAR2 DEFAULT NULL ,
cluster_ok IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
service_name IN VARCHAR2 DEFAULT NULL ,
remap_table IN VARCHAR2 DEFAULT NULL , /* Comma separated list of OLD_TABLENAME:NEW_TABLENAME format mappings */
remap_schema IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_SCHEMA:TARGET_SCHEMA format mappings */
remap_tablespace IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_TABLESPACE:TARGET_TABLESPACE format mappings */
filesize IN VARCHAR2 DEFAULT NULL , /* e.g 10G */
reusefile IN NUMBER DEFAULT 0 , /* Valid values are 0 , 1 */
skip_unusable_indexes IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
compression IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE, DATA_ONLY, METADATA_ONLY, ALL */
table_exists_action IN VARCHAR2 DEFAULT 'SKIP' , /* Valid values are TRUNCATE , SKIP , APPEND , REPLACE */
estimate_only IN NUMBER DEFAULT 0 , /* Valid values are 0 or 1 */
estimate_method IN VARCHAR2 DEFAULT NULL , /* Valid values are BLOCKS , STATISTICS */
partition_options IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE , DEPARTITION, MERGE */
subquery IN VARCHAR2 DEFAULT NULL , /* Specifies a subquery that is added to the end of the SELECT statement for the table. */
exclude IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to EXCLUDE */
include IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to INCLUDE */
metrics IN VARCHAR2 DEFAULT 0 , /* Valid values are 0,1. With value=1, number of objects and the elapsed time are recorded in the Data Pump log file */
version IN VARCHAR2 DEFAULT 'COMPATIBLE' /* Valid values are COMPATIBLE, LATEST */
)
AS
l_schema_list CLOB := schema_name ;
l_table_list CLOB := table_name ;
l_partition_list CLOB := partition_name ;
BEGIN
EXPORT_IMPORT (operation => operation ,
job_mode => job_mode ,
schema_list => l_schema_list ,
table_list => l_table_list ,
partition_list => l_partition_list ,
include_metadata => include_metadata ,
include_rows => include_rows ,
directory => directory ,
dumpfile => dumpfile ,
logfile => logfile ,
parallelism => parallelism ,
job_name => job_name ,
cluster_ok => cluster_ok ,
service_name => service_name ,
remap_table => remap_table ,
remap_schema => remap_schema ,
remap_tablespace => remap_tablespace ,
filesize => filesize ,
reusefile => reusefile ,
skip_unusable_indexes => skip_unusable_indexes ,
compression => compression ,
table_exists_action => table_exists_action ,
estimate_only => estimate_only ,
estimate_method => estimate_method ,
partition_options => partition_options ,
subquery => subquery ,
exclude => exclude ,
include => include ,
metrics => metrics ,
version => version );
END;
PROCEDURE EXPORT_IMPORT(
operation IN VARCHAR2 DEFAULT 'EXPORT' , /* Valid values EXPORT, IMPORT */
job_mode IN VARCHAR2 DEFAULT 'TABLE' , /* Valid values TABLE, SCHEMA */
schema_list IN CLOB DEFAULT NULL , /* Comma separated list of Schemas */
table_list IN CLOB DEFAULT NULL , /* Comma separated list of Tables */
partition_list IN CLOB DEFAULT NULL , /* Comma separated list of Partitions */
include_metadata IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
include_rows IN NUMBER DEFAULT 1 , /* Valid values are 0 or 1 */
directory IN VARCHAR2 DEFAULT NULL ,
dumpfile IN VARCHAR2 DEFAULT 'datapump.dmp' , /* If parallelism is used, then include %U to generation multiple files */
logfile IN VARCHAR2 DEFAULT 'datapump.log' ,
parallelism IN NUMBER DEFAULT NULL ,
job_name IN VARCHAR2 DEFAULT NULL ,
cluster_ok IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
service_name IN VARCHAR2 DEFAULT NULL ,
remap_table IN VARCHAR2 DEFAULT NULL , /* Comma separated list of OLD_TABLENAME:NEW_TABLENAME format mappings */
remap_schema IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_SCHEMA:TARGET_SCHEMA format mappings */
remap_tablespace IN VARCHAR2 DEFAULT NULL , /* Comma separated list of SOURCE_TABLESPACE:TARGET_TABLESPACE format mappings */
filesize IN VARCHAR2 DEFAULT NULL , /* e.g 10G */
reusefile IN NUMBER DEFAULT 0 , /* Valid values are 0 , 1 */
skip_unusable_indexes IN NUMBER DEFAULT 1 , /* Valid values are 0 , 1 */
compression IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE, DATA_ONLY, METADATA_ONLY, ALL */
table_exists_action IN VARCHAR2 DEFAULT 'SKIP' , /* Valid values are TRUNCATE , SKIP , APPEND , REPLACE */
estimate_only IN NUMBER DEFAULT 0 , /* Valid values are 0 or 1 */
estimate_method IN VARCHAR2 DEFAULT NULL , /* Valid values are BLOCKS , STATISTICS */
partition_options IN VARCHAR2 DEFAULT 'NONE' , /* Valid values are NONE , DEPARTITION, MERGE */
subquery IN VARCHAR2 DEFAULT NULL , /* Specifies a subquery that is added to the end of the SELECT statement for the table. */
exclude IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to EXCLUDE */
include IN VARCHAR2 DEFAULT NULL , /* Comma separated list of object types to INCLUDE */
metrics IN VARCHAR2 DEFAULT 0 , /* Valid values are 0,1. With value=1, number of objects and the elapsed time are recorded in the Data Pump log file */
version IN VARCHAR2 DEFAULT 'COMPATIBLE' /* Valid values are COMPATIBLE, LATEST */
)
AS
ind NUMBER; -- Loop index
h1 NUMBER; -- Data Pump job handle
percent_done NUMBER; -- Percentage of job complete
job_state VARCHAR2(30); -- To keep track of job state
le ku$_LogEntry; -- For WIP and error messages
js ku$_JobStatus; -- The job status from get_status
jd ku$_JobDesc; -- The job description from get_status
sts ku$_Status; -- The status object returned by get_status
l_remap_table_list VARCHAR2(4000);
l_remap_schema_list VARCHAR2(4000);
l_remap_tablespace_list VARCHAR2(4000);
l_table_list CLOB;
l_schema_list CLOB;
l_partition_list CLOB;
BEGIN
dbms_output.enable(1000000);
-- Create a (user-named) Data Pump job .
dbms_output.put_line('Creating datapump ' || operation || ' job ...');
h1 := DBMS_DATAPUMP.OPEN(operation => operation
, job_mode => UPPER(job_mode)
, remote_link => NULL
, job_name => job_name
, version => version
);
-- Add logfile
dbms_output.put_line('Setting logfile as ' || logfile );
DBMS_DATAPUMP.ADD_FILE(handle => h1
, filename => logfile
, directory => directory
, filesize => NULL
, filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE
, reusefile => NULL
);
dbms_datapump.log_entry(h1,'Datapump Job started using PL/SQL Wrapper for datapump api.');
dbms_datapump.log_entry(h1,'Passed/Defaulted input parameter values ...');
dbms_datapump.log_entry(h1,' OPERATION = ' || NVL(operation,'NULL') );
dbms_datapump.log_entry(h1,' JOB_MODE = ' || NVL(job_mode,'NULL') );
--dbms_datapump.log_entry(h1,' SCHEMA_LIST = ' || NVL(schema_list,'NULL') );
--dbms_datapump.log_entry(h1,' TABLE_LIST = ' || NVL(table_list,'NULL') );
--dbms_datapump.log_entry(h1,' PARTITION_LIST = ' || NVL(partition_list,'NULL') );
dbms_datapump.log_entry(h1,' INCLUDE_METADATA = ' || NVL(TO_CHAR(include_metadata),'NULL') );
dbms_datapump.log_entry(h1,' INCLUDE_ROWS = ' || NVL(TO_CHAR(include_rows),'NULL') );
dbms_datapump.log_entry(h1,' DIRECTORY = ' || NVL(directory,'NULL') );
dbms_datapump.log_entry(h1,' DUMPFILE = ' || NVL(dumpfile,'NULL') );
dbms_datapump.log_entry(h1,' LOGFILE = ' || NVL(logfile,'NULL') );
dbms_datapump.log_entry(h1,' PARALLELISM = ' || NVL(TO_CHAR(parallelism),'NULL') );
dbms_datapump.log_entry(h1,' JOB_NAME = ' || NVL(job_name,'NULL') );
dbms_datapump.log_entry(h1,' CLUSTER_OK = ' || NVL(TO_CHAR(cluster_ok),'NULL') );
dbms_datapump.log_entry(h1,' SERVICE_NAME = ' || NVL(service_name,'NULL') );
dbms_datapump.log_entry(h1,' REMAP_TABLE = ' || NVL(remap_table,'NULL') );
dbms_datapump.log_entry(h1,' REMAP_SCHEMA = ' || NVL(remap_schema,'NULL') );
dbms_datapump.log_entry(h1,' REMAP_TABLESPACE = ' || NVL(remap_tablespace,'NULL') );
dbms_datapump.log_entry(h1,' FILESIZE = ' || NVL(filesize,'NULL') );
dbms_datapump.log_entry(h1,' REUSEFILE = ' || NVL(TO_CHAR(reusefile),'NULL') );
dbms_datapump.log_entry(h1,' SKIP_UNUSABLE_INDEXES = ' || NVL(TO_CHAR(skip_unusable_indexes),'NULL') );
dbms_datapump.log_entry(h1,' COMPRESSION = ' || NVL(compression,'NULL') );
dbms_datapump.log_entry(h1,' TABLE_EXISTS_ACTION = ' || NVL(table_exists_action,'NULL') );
dbms_datapump.log_entry(h1,' ESTIMATE_ONLY = ' || NVL(TO_CHAR(estimate_only),'NULL') );
dbms_datapump.log_entry(h1,' ESTIMATE_METHOD = ' || NVL(estimate_method,'NULL') );
dbms_datapump.log_entry(h1,' PARTITION_OPTIONS = ' || NVL(partition_options,'NULL') );
dbms_datapump.log_entry(h1,' SUBQUERY = ' || NVL(subquery,'NULL') );
dbms_datapump.log_entry(h1,' EXCLUDE = ' || NVL(exclude,'NULL') );
dbms_datapump.log_entry(h1,' INCLUDE = ' || NVL(include,'NULL') );
dbms_datapump.log_entry(h1,' METRICS = ' || NVL(TO_CHAR(metrics),'NULL') );
dbms_datapump.log_entry(h1,' VERSION = ' || NVL(version,'NULL') );
-- Specify parameters
IF operation = 'EXPORT' THEN
dbms_datapump.log_entry(h1,'Setting export options...');
dbms_datapump.log_entry(h1,'Setting ESTIMATE_ONLY = ' || NVL(TO_CHAR(estimate_only),'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'ESTIMATE_ONLY' , value => estimate_only );
IF estimate_method IS NOT NULL
THEN
dbms_datapump.log_entry(h1,'Settting ESTIMATE_METHOD = ' || NVL(estimate_method,'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'ESTIMATE' , value => estimate_method );
END IF;
dbms_datapump.log_entry(h1,'Setting COMPRESSION as ' || NVL(compression,'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'COMPRESSION' , value => compression );
END IF;
IF operation = 'IMPORT' THEN
dbms_datapump.log_entry(h1,'Setting import options ');
dbms_datapump.log_entry(h1,' TABLE_EXISTS_ACTION = ' || NVL(table_exists_action,'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'TABLE_EXISTS_ACTION' , value => table_exists_action );
dbms_datapump.log_entry(h1,' PARTITION_OPTIONS = ' || NVL(partition_options,'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'PARTITION_OPTIONS' , value => partition_options );
dbms_datapump.log_entry(h1,' SKIP_UNUSABLE_INDEXES = ' || NVL(TO_CHAR(skip_unusable_indexes),'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'SKIP_UNUSABLE_INDEXES' , value => skip_unusable_indexes );
END IF;
-- Enable/Disable Metrics
IF metrics IS NOT NULL THEN
dbms_datapump.log_entry(h1,' METRICS = ' || NVL(TO_CHAR(metrics),'NULL') );
dbms_output.put_line('metric=' || metrics);
DBMS_DATAPUMP.SET_PARAMETER(handle => h1 , name => 'METRICS' , value => 1);
END IF;
-- Include metadata
IF include_metadata IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting INCLUDE_METADATA = ' || NVL(TO_CHAR(include_metadata),'NULL') );
DBMS_DATAPUMP.SET_PARAMETER(handle => h1
, name => 'INCLUDE_METADATA'
, value => include_metadata
);
END IF;
-- Include Rows
IF include_rows IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting INCLUDE_ROWS = ' || TO_CHAR(include_rows) );
DBMS_DATAPUMP.DATA_FILTER( handle => h1
, name => 'INCLUDE_ROWS'
, value => include_rows
);
END IF;
-- Specify parallelism
dbms_datapump.log_entry(h1,'Setting parallelism to ' || NVL(TO_CHAR(parallelism),'NULL') );
DBMS_DATAPUMP.SET_PARALLEL(handle => h1 , degree => parallelism);
-- Specify SCHEMA_LIST
IF schema_list IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting SCHEMA_LIST = ' || schema_list);
l_schema_list := '';
FOR i IN (SELECT regexp_substr(schema_list, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(schema_list, ',', 1, LEVEL - 1) > 0
)
LOOP
l_schema_list := l_schema_list || '''' || i.token || ''',';
END LOOP;
l_schema_list := UPPER(RTRIM(l_schema_list,','));
DBMS_DATAPUMP.METADATA_FILTER(handle => h1
, name => 'SCHEMA_LIST'
, value => l_schema_list
);
END IF;
-- Specify TABLE_LIST
IF table_list IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting TABLE_LIST ' );
l_table_list := '';
FOR i IN (SELECT regexp_substr(table_list, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(table_list, ',', 1, LEVEL - 1) > 0
)
LOOP
dbms_datapump.log_entry(h1,' ' || i.token);
l_table_list := l_table_list || '''' || i.token || ''',';
END LOOP;
l_table_list := UPPER(RTRIM(l_table_list,','));
--/*
DBMS_DATAPUMP.METADATA_FILTER(handle => h1
, name => 'NAME_LIST'
, value => l_table_list
);
--*/
END IF;
-- Specify Partition List
IF partition_list IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting PARTITION_LIST ' );
l_partition_list := '';
FOR i IN (SELECT regexp_substr(partition_list, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(partition_list, ',', 1, LEVEL - 1) > 0
)
LOOP
dbms_datapump.log_entry(h1,' ' || i.token);
l_partition_list := l_partition_list || '''' || i.token || ''',';
END LOOP;
l_partition_list := UPPER(RTRIM(l_partition_list,','));
DBMS_DATAPUMP.DATA_FILTER(handle => h1
, name => 'PARTITION_LIST'
, value => l_partition_list
);
END IF;
-- Specify SUBQuery
IF subquery IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting SUBQUERY = ' || subquery );
DBMS_DATAPUMP.DATA_FILTER( handle => h1
, name => 'SUBQUERY'
, value => subquery
);
END IF;
-- Specify EXCLUDE
IF exclude IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting EXCLUDE = ' || exclude );
FOR i IN (SELECT regexp_substr(exclude, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(exclude, ',', 1, LEVEL - 1) > 0
)
LOOP
DBMS_DATAPUMP.METADATA_FILTER(handle => h1
, name => 'EXCLUDE_PATH_EXPR'
, value => 'LIKE UPPER(''%' || i.token || '%'')'
);
END LOOP;
END IF;
-- Specify INCLUDE
IF include IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting INCLUDE = ' || include );
FOR i IN (SELECT regexp_substr(include, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(include, ',', 1, LEVEL - 1) > 0
)
LOOP
DBMS_DATAPUMP.METADATA_FILTER(handle => h1
, name => 'INCLUDE_PATH_EXPR'
, value => 'LIKE UPPER(''%' || i.token || '%'')'
);
END LOOP;
END IF;
-- Remap schema
IF operation = 'IMPORT' THEN
IF remap_schema IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting REMAP_SCHEMA as ' || remap_schema );
FOR i IN (SELECT regexp_substr(remap_schema, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(remap_schema, ',', 1, LEVEL - 1) > 0
)
LOOP
DBMS_DATAPUMP.METADATA_REMAP( handle => h1
, name => 'REMAP_SCHEMA'
, old_value => SUBSTR(i.token,1,INSTR(i.token,':')-1)
, value => SUBSTR(i.token,INSTR(i.token,':')+1)
);
END LOOP;
END IF;
END IF;
-- Remap table
IF operation = 'IMPORT' THEN
IF remap_table IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting REMAP_TABLE as ' || remap_table );
FOR i IN (SELECT regexp_substr(remap_table, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(remap_table, ',', 1, LEVEL - 1) > 0
)
LOOP
DBMS_DATAPUMP.METADATA_REMAP( handle => h1
, name => 'REMAP_TABLE'
, old_value => SUBSTR(i.token,1,INSTR(i.token,':')-1)
, value => SUBSTR(i.token,INSTR(i.token,':')+1)
);
END LOOP;
END IF;
END IF;
-- Remap Tablespace
IF operation = 'IMPORT' THEN
IF remap_tablespace IS NOT NULL THEN
dbms_datapump.log_entry(h1,'Setting REMAP_TABLESPACE as ' || remap_tablespace );
FOR i IN (SELECT regexp_substr(remap_tablespace, '[^,]+', 1, LEVEL) token
FROM dual
CONNECT BY instr(remap_tablespace, ',', 1, LEVEL - 1) > 0
)
LOOP
DBMS_DATAPUMP.METADATA_REMAP( handle => h1
, name => 'REMAP_TABLESPACE'
, old_value => SUBSTR(i.token,1,INSTR(i.token,':')-1)
, value => SUBSTR(i.token,INSTR(i.token,':')+1)
);
END LOOP;
END IF;
END IF;
-- Specify a single dump file for the job (using the handle just returned)
-- and a directory object, which must already be defined and accessible
-- to the user running this procedure.
IF operation = 'EXPORT' THEN
dbms_datapump.log_entry(h1,'Setting dumpfile as ' || dumpfile );
dbms_datapump.log_entry(h1,'Setting maximum filesize as ' || filesize );
dbms_datapump.log_entry(h1,'Setting Reuse dumpfile as ' || NVL(TO_CHAR(reusefile),'NULL') );
DBMS_DATAPUMP.ADD_FILE(handle => h1
, filename => dumpfile
, directory => directory
, filesize => filesize
, filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
, reusefile => reusefile
);
END IF;
-- Add dumpfile
IF operation = 'IMPORT' THEN
dbms_datapump.log_entry(h1,'Setting dumpfile as ' || dumpfile );
DBMS_DATAPUMP.ADD_FILE(handle => h1
, filename => dumpfile
, directory => directory
, filesize => NULL
, filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE
, reusefile => NULL
);
END IF;
-- Start the job. An exception will be generated if something is not set up
-- properly.
dbms_datapump.log_entry(h1,'Starting ' || operation || ' job at ' || TO_CHAR(sysdate,'DD-MON-YYYY HH24:MI:SS') );
DBMS_DATAPUMP.START_JOB(handle => h1
, cluster_ok => cluster_ok
, service_name => service_name
);
-- The export job should now be running. In the following loop, the job
-- is monitored until it completes. In the meantime, progress information is
-- displayed.
percent_done := 0;
job_state := 'UNDEFINED';
WHILE (job_state != 'COMPLETED') and (job_state != 'STOPPED')
LOOP
dbms_datapump.get_status( handle => h1
, mask => dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip
, timeout => -1
, job_state => job_state
, status => sts
);
js := sts.job_status;
-- If the percentage done changed, display the new value.
--if js.percent_done != percent_done
--then
-- dbms_output.put_line('*** Job percent done = ' ||to_char(js.percent_done));
-- percent_done := js.percent_done;
--end if;
-- If any work-in-progress (WIP) or error messages were received for the job,
-- display them.
if (bitand(sts.mask,dbms_datapump.ku$_status_wip) != 0)
then
le := sts.wip;
else
if (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
then
le := sts.error;
else
le := null;
end if;
end if;
if le is not null
then
ind := le.FIRST;
while ind is not null
loop
dbms_output.put_line(le(ind).LogText);
ind := le.NEXT(ind);
end loop;
end if;
END LOOP;
-- Indicate that the job finished and detach from it.
dbms_output.put_line('Job has completed. Final job state = ' || job_state);
dbms_datapump.detach(h1);
EXCEPTION
WHEN others THEN
-- dbms_output.put_line(DBMS_UTILITY.FORMAT_ERROR_STACK() );
-- dbms_output.put_line(DBMS_UTILITY.FORMAT_ERROR_BACKTRACE);
-- dbms_output.put_line(DBMS_UTILITY.FORMAT_CALL_STACK () );
dbms_datapump.get_status( handle => h1
, mask => dbms_datapump.ku$_status_job_error +
dbms_datapump.ku$_status_job_status +
dbms_datapump.ku$_status_wip
, timeout => -1
, job_state => job_state
, status => sts
);
IF (bitand(sts.mask,dbms_datapump.ku$_status_job_error) != 0)
THEN
le := sts.error;
ind := le.FIRST;
while ind is not null
loop
dbms_output.put_line(le(ind).LogText);
ind := le.NEXT(ind);
end loop;
END IF;
DBMS_DATAPUMP.STOP_JOB( handle=> h1, immediate => 1 , keep_master => 0 , delay => 0 );
RAISE;
END;
END;
/