Below is the steps to do export and import in AWS Cloud Environment
1.CONNECT TO PROD AWS CLOUD FROM LOCAL PROD DB
sqlplus root/<root_password>@awscld
2.EXPORT
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => ‘EXPORT’, job_mode => ‘SCHEMA’, job_name=>null);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => ‘<DUMP_NAME>‘, directory => ‘DATA_PUMP_DIR’, filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => ‘<EXPDUMP_LOG_NAME>‘, directory => ‘DATA_PUMP_DIR’, filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,’SCHEMA_EXPR’,’IN (”<SCHEMA_NAME>”)’);
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
3.CHECK THE STATUS OF THE EXPORT BY USING BELOW QUERY
SELECT text FROM table(rdsadmin.rds_file_util.read_text_file(‘DATA_PUMP_DIR’,'<DUMP_LOG_NAME>’));
4.MOVE THE DUMP FILE TO AWS DEV CLOUD
Move the DUMP to AWS DEV CLOUD
5.CONNECT TO AWS DEV CLOUD FROM LOCAL PROD DB
sqlplus root/<root_password>@AWSCLD_DEV
6.BACKUP THE OLD DATA FOR SAFETY PURPOSE
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => ‘EXPORT’, job_mode => ‘SCHEMA’, job_name=>null);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => ‘<DUMP_NAME>‘, directory => ‘DATA_PUMP_DIR’, filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => ‘<EXPDUMP_LOG_NAME>‘, directory => ‘DATA_PUMP_DIR’, filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,’SCHEMA_EXPR’,’IN (”<SCHEMA_NAME>”)’);
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
7.CREATING USER AND ASSIGNING ROLES
After taking the backup drop the user by using below command
Drop user <username> cascade;
After dropping create the same user
8.IMPORT
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => ‘IMPORT’, job_mode => ‘SCHEMA’, job_name=>null);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => ‘<DUMP_NAME>‘, directory => ‘DATA_PUMP_DIR’, filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => ‘<IMPDUMP_LOG_NAME>‘, directory => ‘DATA_PUMP_DIR’, filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,’SCHEMA_EXPR’,’IN (”<SCHEMA_NAME>”)’);
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
9.CHECK OBJECT COUNTS IN BOTH AWS CLOUD PROD AND AWS CLOUD DEV.
select OBJECT_TYPE,count(*) from dba_objects where OWNER='<OWNER_NAME>’ group by OBJECT_TYPE order by 2 desc;