Import data in AWS Oracle RDS
copy dumpfile to s3
[oracle@anurag dpdump]$
[oracle@anurag dpdump]$ pwd
/u01/oracle/admin/india/dpdump
[oracle@anurag dpdump]$
[oracle@anurag dpdump]$ aws s3 cp /u01/oracle/admin/india/dpdump/EXPDAT01-13_50_36.DMP s3://demodatapump
upload: ./EXPDAT01-13_50_36.DMP to s3://demodatapump/EXPDAT01-13_50_36.DMP
[oracle@anurag dpdump]$
download dumpfile from S3 to RDS instance
SELECT rdsadmin.rdsadmin_s3_tasks.download_from_s3(
p_bucket_name => 'demodatapump',
p_directory_name => 'DATA_PUMP_DIR')
AS TASK_ID FROM DUAL ;
1591366708566-339
check progress using log
SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('BDUMP','dbtask-1591366708566-339.log'));
SQL> SELECT rdsadmin.rdsadmin_s3_tasks.download_from_s3(
p_bucket_name => 'demodatapump',
p_directory_name => 'DATA_PUMP_DIR')
AS TASK_ID FROM DUAL ; 2 3 4
TASK_ID
--------------------------------------------------------------------------------
1591366708566-339
SQL>
SQL> SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('BDUMP','dbtask-1591366708566-339.log'));
TEXT
--------------------------------------------------------------------------------
2020-06-05 14:18:30.436 UTC [INFO ] This task is about to list the Amazon S3 obj
ects for AWS Region us-west-1, bucket name demodatapump, and prefix .
2020-06-05 14:18:30.519 UTC [INFO ] The task successfully listed the Amazon S3 o
bjects for AWS Region us-west-1, bucket name demodatapump, and prefix .
2020-06-05 14:18:30.536 UTC [INFO ] This task is about to download the Amazon S3
object or objects in /rdsdbdata/datapump from bucket name demodatapump and key
EXPDAT01-13_50_36.DMP.
2020-06-05 14:18:30.745 UTC [INFO ] The task successfully downloaded the Amazon
TEXT
--------------------------------------------------------------------------------
S3 object or objects from bucket name demodatapump with key EXPDAT01-13_50_36.DM
P to the location /rdsdbdata/datapump.
2020-06-05 14:18:30.745 UTC [INFO ] The task finished successfully.
SQL>
check list of directory
SQL> select * from table(RDSADMIN.RDS_FILE_UTIL.LISTDIR('DATA_PUMP_DIR')) order by mtime;
FILENAME TYPE FILESIZE MTIME
------------------------------ ---------- ---------- ---------
sample_copied.dmp file 352256 29-MAY-20
sample_imp.log file 835 29-MAY-20
test_schema.dmp file 352256 29-MAY-20
datapump/ directory 4096 05-JUN-20
EXPDAT01-13_50_36.DMP file 724992 05-JUN-20
SQL>
import HR schema
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'IMPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'EXPDAT01-13_50_36.DMP', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'impdp_hr_schema.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''HR'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
SQL> DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'IMPORT', job_mode => 'SCHEMA', job_name=>null);
2 3 4 5 DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'EXPDAT01-13_50_36.DMP', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'impdp_hr_schema.log', directory => 'DATA_P 6 UMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''HR'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/ 7 8 9 10
PL/SQL procedure successfully completed.
SQL>
SQL>
check import log
SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('DATA_PUMP_DIR','impdp_hr_schema.log'))
SQL> SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('DATA_PUMP_DIR','impdp_hr_schema.log'));
TEXT
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Master table "ADMIN"."SYS_IMPORT_SCHEMA_01" successfully loaded/unloaded
Starting "ADMIN"."SYS_IMPORT_SCHEMA_01":
Processing object type SCHEMA_EXPORT/USER
Processing object type SCHEMA_EXPORT/SYSTEM_GRANT
Processing object type SCHEMA_EXPORT/ROLE_GRANT
Processing object type SCHEMA_EXPORT/DEFAULT_ROLE
Processing object type SCHEMA_EXPORT/TABLESPACE_QUOTA
Processing object type SCHEMA_EXPORT/PRE_SCHEMA/PROCACT_SCHEMA
Processing object type SCHEMA_EXPORT/SEQUENCE/SEQUENCE
Processing object type SCHEMA_EXPORT/TABLE/TABLE
Processing object type SCHEMA_EXPORT/TABLE/TABLE_DATA
. . imported "HR"."EMPLOYEES" 17.08 KB 107 rows
. . imported "HR"."LOCATIONS" 8.437 KB 23 rows
. . imported "HR"."JOB_HISTORY" 7.195 KB 10 rows
. . imported "HR"."JOBS" 7.109 KB 19 rows
. . imported "HR"."DEPARTMENTS" 7.125 KB 27 rows
. . imported "HR"."COUNTRIES" 6.367 KB 25 rows
. . imported "HR"."REGIONS" 5.546 KB 4 rows
Processing object type SCHEMA_EXPORT/TABLE/COMMENT
Processing object type SCHEMA_EXPORT/PROCEDURE/PROCEDURE
Processing object type SCHEMA_EXPORT/PROCEDURE/ALTER_PROCEDURE
Processing object type SCHEMA_EXPORT/VIEW/VIEW
Processing object type SCHEMA_EXPORT/TABLE/INDEX/INDEX
Processing object type SCHEMA_EXPORT/TABLE/CONSTRAINT/CONSTRAINT
Processing object type SCHEMA_EXPORT/TABLE/INDEX/STATISTICS/INDEX_STATISTICS
Processing object type SCHEMA_EXPORT/TABLE/CONSTRAINT/REF_CONSTRAINT
Processing object type SCHEMA_EXPORT/TABLE/TRIGGER
Processing object type SCHEMA_EXPORT/TABLE/STATISTICS/TABLE_STATISTICS
Processing object type SCHEMA_EXPORT/STATISTICS/MARKER
Job "ADMIN"."SYS_IMPORT_SCHEMA_01" successfully completed at Fri Jun 5 14:22:36 2020 elapsed 0 00:00:19
30 rows selected.
SQL>
validate schema after import done
SQL> select owner,object_name ,object_type ,status from dba_objects where owner='HR';
OWNER OBJECT_NAME OBJECT_TYPE STATUS
-------------------- ---------------------------------------- ----------------------- ----------
HR DEPARTMENTS_SEQ SEQUENCE VALID
HR EMPLOYEES_SEQ SEQUENCE VALID
HR LOCATIONS_SEQ SEQUENCE VALID
HR COUNTRIES TABLE VALID
HR COUNTRY_C_ID_PK INDEX VALID
HR DEPARTMENTS TABLE VALID
HR EMPLOYEES TABLE VALID
HR LOCATIONS TABLE VALID
HR REGIONS TABLE VALID
HR JOBS TABLE VALID
HR JOB_HISTORY TABLE VALID
HR SECURE_DML PROCEDURE VALID
HR ADD_JOB_HISTORY PROCEDURE VALID
HR EMP_DETAILS_VIEW VIEW VALID
HR DEPT_LOCATION_IX INDEX VALID
HR DEPT_ID_PK INDEX VALID
HR EMP_JOB_IX INDEX VALID
HR EMP_DEPARTMENT_IX INDEX VALID
HR EMP_MANAGER_IX INDEX VALID
HR EMP_EMP_ID_PK INDEX VALID
HR EMP_NAME_IX INDEX VALID
HR LOC_ID_PK INDEX VALID
HR LOC_COUNTRY_IX INDEX VALID
HR LOC_STATE_PROVINCE_IX INDEX VALID
HR LOC_CITY_IX INDEX VALID
HR REG_ID_PK INDEX VALID
HR JOB_ID_PK INDEX VALID
HR JHIST_EMP_ID_ST_DATE_PK INDEX VALID
HR JHIST_EMPLOYEE_IX INDEX VALID
HR JHIST_DEPARTMENT_IX INDEX VALID
HR JHIST_JOB_IX INDEX VALID
HR EMP_EMAIL_UK INDEX VALID
HR SECURE_EMPLOYEES TRIGGER VALID
HR UPDATE_JOB_HISTORY TRIGGER VALID
34 rows selected.
upload in S3
SELECT rdsadmin.rdsadmin_s3_tasks.upload_to_s3(
p_bucket_name => 'demodatapump',
p_directory_name => 'DATA_PUMP_DIR')
AS TASK_ID FROM DUAL;
check alert log in RDS
SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('BDUMP','alert_ORCL.log'));