Cloning a schema with DBMS_DATAPUMP
The following scripts try to demonstrate how a schema might be cloned using dbms_datapump
.
run.sql
run.sql
is the driving script.
connect rene/rene
@cleanup
create directory DATAPUMP_DIR as 'C:\temp';
@create_schemas.sql
connect exp_imp_admin/p
@@export_import_plsql
--@@export_import_dp
-- Since to_schema_A will also access the same tables
-- in schema_b that from_schema_A accesses, we also
-- need to grant the relevant grants for to_schema_A:
connect schema_B/p
grant insert, select on table_b_1 to to_schema_A;
connect to_schema_a/p
insert into table_a_1 values (999, 'foo bar');
select object_type, object_name from dba_objects where owner = 'TO_SCHEMA_A';
select name, line, substrb(text, 1, 50) from all_errors where owner = 'TO_SCHEMA_A';
@@call_count_procs
create_schemas.sql
-- vim: foldmethod=marker foldmarker={,}
create user from_schema_A -- {
--
-- This is the schema that is supposed to be
-- cloned.
--
identified by p
quota unlimited on users;
grant create procedure,
create session,
create table,
create trigger,
create view
to from_schema_A;
-- The following grants only used if from_schema_A performs
-- the export:
-- grant read, write on directory DATAPUMP_DIR to from_schema_A;
----- }
create user schema_B -- {
--
-- This is a schema that contains objects that
-- are referenced by from_schema_A
--
identified by p
quota unlimited on users;
grant create session,
create table
to schema_B;
----- }
----- }
create user exp_imp_admin -- {
--
-- this is the user that will perform the export and the import
--
identified by p;
grant exp_full_database,
imp_full_database
to exp_imp_admin;
alter user exp_imp_admin quota unlimited on users;
----- }
connect schema_b/p
----------------------- {
create table table_b_1 (
a number,
b varchar2(10)
);
-- from_schema_A will create a view view_a_1 that
-- selects from table_b_1. from_schema_A will also
-- create a trigger that inserts into table_b_1.
--
-- So, we have to grant insert and select on this
-- table to from_schema_A:
grant insert, select on table_b_1 to from_schema_A;
----- }
connect from_schema_a/p
----------------------- {
create table table_a_1 (
c number,
d varchar2(20)
);
insert into table_a_1 values (10, 'ten' );
insert into table_a_1 values (11, 'eleven');
create view view_a_1 as
select * from schema_b.table_b_1;
create package package_a_1 as
function count_a return number;
function count_b return number;
end package_a_1;
/
create package body package_a_1 as -- {
function count_a return number is -- {
ret number;
begin
select count(*) into ret
from table_a_1;
return ret;
end count_a; -- }
function count_b return number is -- {
ret number;
begin
select count(*) into ret
from view_a_1;
return ret;
end count_b; -- }
end package_a_1; -- }
/
create trigger trigger_a
before insert on table_a_1
for each row
begin
insert into schema_b.table_b_1 values (:new.c, :new.d);
end trigger_a;
/
-- There's a trigger on the table, so the
-- following insersts should fill table_b_1
-- (in schema_b):
insert into table_a_1 values (1, 'one');
insert into table_a_1 values (2, 'two');
@@call_count_procs
----- }
export.plsql
declare
datapump_job number;
job_state varchar2(20);
begin
datapump_job := dbms_datapump.open(
operation => 'EXPORT',
job_mode => 'SCHEMA',
remote_link => null,
job_name => 'Clone schema A, export',
version => 'LATEST',
compression => dbms_datapump.ku$_compress_metadata
);
dbms_output.put_line('datapump_job: ' || datapump_job);
dbms_datapump.metadata_filter(
handle => datapump_job,
name =>'SCHEMA_LIST',
value =>'''FROM_SCHEMA_A'''
);
dbms_datapump.add_file(
handle => datapump_job,
filename => 'clone_schema_a_export.log',
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_log_file);
dbms_datapump.add_file(
handle => datapump_job,
filename => 'clone_schema_a.dmp', -- Note, created will be in UPPERCASE!
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_dump_file);
dbms_datapump.start_job (datapump_job);
dbms_datapump.wait_for_job(datapump_job, job_state);
dbms_output.put_line('Job state: ' || job_state);
dbms_datapump.detach(datapump_job);
exception when others then
dbms_output.put_line(sqlcode);
dbms_output.put_line(sqlerrm);
if datapump_job is not null then
dbms_datapump.detach(datapump_job);
end if;
end;
/
$dir c:\temp\clone*
Parameter file for export (export.par)
content=metadata_only
directory=DATAPUMP_DIR
dumpfile=clone_schema_a.dmp
schemas=from_schema_a
# exclude=trigger
import.plsql
declare
datapump_job number;
job_state varchar2(20);
begin
datapump_job := dbms_datapump.open(
operation => 'IMPORT',
job_mode => 'SCHEMA',
remote_link => null,
job_name => 'Clone schema A, import',
version => 'LATEST',
compression => dbms_datapump.ku$_compress_metadata
);
dbms_output.put_line('datapump_job: ' || datapump_job);
dbms_datapump.metadata_remap(datapump_job,'REMAP_SCHEMA','FROM_SCHEMA_A','TO_SCHEMA_A');
dbms_datapump.add_file(
handle => datapump_job,
filename => 'clone_schema_a_import.log',
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_log_file);
dbms_datapump.add_file(
handle => datapump_job,
filename => 'clone_schema_a.dmp', -- Note: export has created the file with UPPERCASE letters
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_dump_file);
dbms_datapump.start_job (datapump_job);
dbms_datapump.wait_for_job(datapump_job, job_state);
dbms_output.put_line('Job state: ' || job_state);
dbms_datapump.detach(datapump_job);
--exception when others then
--
-- dbms_output.put_line(sqlcode);
-- dbms_output.put_line(sqlerrm);
--
-- if datapump_job is not null then
-- dbms_datapump.detach(datapump_job);
-- end if;
end;
/
$dir c:\temp\clone*
import.part
directory=DATAPUMP_DIR
dumpfile=clone_schema_a.dmp
remap_schema=from_schema_a:to_schema_a
transform=OID:n
Some grants
Since to_schema_A
will also access the same tables in schema_b
that from_schema_A accesses, we also need to grant the relevant grants for to_schema_A:
connect schema_B/p
grant insert, select on table_b_1 to to_schema_A;
Misc
connect to_schema_a/p
insert into table_a_1 values (999, 'foo bar');
select object_type, object_name from dba_objects where owner = 'TO_SCHEMA_A';
select name, line, substrb(text, 1, 50) from all_errors where owner = 'TO_SCHEMA_A';
export_import_dp.sql (TODO
$expdp exp_imp_admin/p parfile=export.par
$impdp exp_imp_admin/p parfile=import.par
call_count_procs.sql
exec dbms_output.put_line('package_a_1.count_a: ' || package_a_1.count_a);
exec dbms_output.put_line('package_a_1.count_b: ' || package_a_1.count_b);
cleanup.sql
Cleaning up the whole thing…
connect rene/rene
-- https://github.com/ReneNyffenegger/oracle_scriptlets/blob/master/sqlpath/drop_schema_if_exists.sql
@drop_schema_if_exists to_schema_A
@drop_schema_if_exists from_schema_A
@drop_schema_if_exists schema_B
@drop_schema_if_exists exp_imp_admin
-- https://github.com/ReneNyffenegger/oracle_scriptlets/blob/master/sqlpath/utl_file_fremove.sql
@utl_file_fremove DATAPUMP_DIR CLONE_SCHEMA_A.DMP
-- https://github.com/ReneNyffenegger/oracle_scriptlets/blob/master/sqlpath/drop_if_exists.sql
@drop_if_exists DATAPUMP_DIR
set define off
--Setting define off so that 2>&1 doesn't do stupid things...
$del c:\temp\CLONE_SCHEMA_A.DMP >nul 2>&1
$del c:\temp\clone_schema_a_export.log >nul 2>&1
$del c:\temp\clone_schema_a_import.log >nul 2>&1
set define on
Extracting a schema
run.sql
connect / as sysdba
@@drop_if_exists DATAPUMP_DIR
create directory DATAPUMP_DIR as 'c:\temp';
@@create_schema
@@export_dump.plsql
@@export_sql.plsql
@@import_sql_file
$@del c:\temp\EXPORT.DMP
$@del c:\temp\schema.sql
create_schema.sql
-- https://github.com/ReneNyffenegger/oracle_scriptlets/blob/master/sqlpath/drop_schema_if_exists.sql
@drop_schema_if_exists tq84_user
create user tq84_user
identified by p
quota unlimited on users;
grant create procedure,
create table,
create session,
create view
to tq84_user;
grant read,write on directory DATAPUMP_DIR to tq84_user;
connect tq84_user/p
create table table_parent (
id number primary key,
col_1 number not null,
col_2 varchar2(20),
col_3 date
);
create table table_child (
id number primary key,
id_parent references table_parent,
col_1 number not null,
col_2 varchar2(20)
);
create package package_1 as
function f return number;
end package_1;
/
create package body package_1 as
function f return number is begin
return 42;
end f;
end package_1;
/
create view view_01 as select
p.col_1 p_col_1,
p.col_2 p_col_2,
p.col_3 p_col_3,
c.col_1 c_col_1,
c.col_2 c_col_2
from
table_parent p left join
table_child c on p.id = c.id_parent;
insert into table_parent values (1, 20, 'foo', null );
insert into table_parent values (2, 40, 'bar', sysdate );
insert into table_parent values (3, 60, 'baz', sysdate-20/3);
insert into table_child values (1, 1, 0.2, 'apple');
insert into table_child values (2, 1, 0.3, 'pear' );
insert into table_child values (3, 1, 0.5, 'lemon');
insert into table_child values (4, 3, 1 , 'New York');
dbms_datapump_exception.plsql
dbms_output.put_line(sqlcode);
dbms_output.put_line(sqlerrm);
declare
status_ ku$_status1010;
begin
dbms_datapump.get_status(
handle => datapump_job,
mask => dbms_datapump.ku$_status_job_error,
timeout => 0,
job_state => job_state,
status => status_
);
for i in 1 .. status_.error.count loop
dbms_output.put_line(status_.error(i).logtext);
end loop;
if datapump_job is not null then
dbms_datapump.detach(datapump_job);
end if;
end;
export_dump.plsql
declare
datapump_job number;
job_state varchar2(20);
begin
datapump_job := dbms_datapump.open(
operation => 'EXPORT',
job_mode => 'SCHEMA',
remote_link => null,
job_name => 'Export dump file',
version => 'LATEST'
);
dbms_output.put_line('datapump_job: ' || datapump_job);
--
-- Since this script is *only* used to create
-- an SQL script, we don't need to epxort
-- the data as well:
dbms_datapump.data_filter(
handle => datapump_job,
name =>'INCLUDE_ROWS',
value => 0);
dbms_datapump.add_file(
handle => datapump_job,
filename => 'export.dmp',
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_dump_file);
dbms_datapump.start_job(
handle => datapump_job,
skip_current => 0,
abort_step => 0);
dbms_datapump.wait_for_job(datapump_job, job_state);
dbms_output.put_line('Job state: ' || job_state);
dbms_datapump.detach(datapump_job);
exception when others then
@@dbms_datapump_exception.plsql
end;
/
export_sql.plsql
declare
datapump_job number;
job_state varchar2(20);
begin
datapump_job := dbms_datapump.open(
operation => 'SQL_FILE',
job_mode => 'SCHEMA',
remote_link => null,
job_name => 'Export SQL file',
version => 'LATEST'
);
dbms_output.put_line('datapump_job: ' || datapump_job);
dbms_datapump.add_file(
handle => datapump_job,
filename => 'export.dmp',
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_dump_file);
dbms_datapump.add_file(
handle => datapump_job,
filename => 'schema.sql',
directory => 'DATAPUMP_DIR',
filetype => dbms_datapump.ku$_file_type_sql_file);
dbms_datapump.start_job(
handle => datapump_job,
skip_current => 0,
abort_step => 0);
dbms_datapump.wait_for_job(datapump_job, job_state);
dbms_output.put_line('Job state: ' || job_state);
dbms_datapump.detach(datapump_job);
exception when others then
@@dbms_datapump_exception.plsql
end;
/
import_sql_file.sql
connect / as sysdba
@drop_schema_if_exists tq84_user
create user tq84_user
identified by p
quota unlimited on users;
grant create procedure,
create table,
create session,
create view
to tq84_user;
connect tq84_user/p
@c:\temp\schema.sql