Import schema in AWS Oracle RDS
==============================================
Environment Details:
-------------------------------
Schema Name : TEST
Source (Normal)
-----------------
Database Name : india
Database Version : 19c
Platform : oracle linux
Target (RDS)
--------------
Database Name : orcl
Dabase version : 19c
Platform : AWS Oracle RDS
Step 1 : perform schema export
expdp '/ as sysdba' directory=DATA_PUMP_DIR dumpfile=test_schema.dmp LOGFILE=test_schema.log schemas=test
example
[oracle@anurag ~]$ expdp directory=DATA_PUMP_DIR dumpfile=test_schema.dmp LOGFILE=test_schema.log schemas=test
Export: Release 19.0.0.0.0 - Production on Thu May 28 17:21:14 2020
Version 19.3.0.0.0
Copyright (c) 1982, 2019, Oracle and/or its affiliates. All rights reserved.
Username: / as sysdba
Password:
Connected to: Oracle Database 19c Enterprise Edition Release 19.0.0.0.0 - Production
Starting "SYS"."SYS_EXPORT_SCHEMA_01": /******** AS SYSDBA directory=DATA_PUMP_DIR dumpfile=test_schema.dmp LOGFILE=test_schema.log schemas=test
Processing object type SCHEMA_EXPORT/TABLE/TABLE_DATA
Processing object type SCHEMA_EXPORT/TABLE/STATISTICS/TABLE_STATISTICS
Processing object type SCHEMA_EXPORT/STATISTICS/MARKER
Processing object type SCHEMA_EXPORT/USER
Processing object type SCHEMA_EXPORT/SYSTEM_GRANT
Processing object type SCHEMA_EXPORT/ROLE_GRANT
Processing object type SCHEMA_EXPORT/DEFAULT_ROLE
Processing object type SCHEMA_EXPORT/PRE_SCHEMA/PROCACT_SCHEMA
Processing object type SCHEMA_EXPORT/TABLE/TABLE
. . exported "TEST"."TEST" 5.539 KB 2 rows
Master table "SYS"."SYS_EXPORT_SCHEMA_01" successfully loaded/unloaded
******************************************************************************
Dump file set for SYS.SYS_EXPORT_SCHEMA_01 is:
/u01/oracle/admin/india/dpdump/test_schema.dmp
Job "SYS"."SYS_EXPORT_SCHEMA_01" successfully completed at Thu May 28 17:21:53 2020 elapsed 0 00:00:29
[oracle@anurag ~]$
we can perform export using dataump API as well
login as sysdba
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'EXPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE(handle => hdnl, filename => 'sample.dmp', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'exp.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file); DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''TEST'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
example :
SQL>
SQL> DECLARE
2 hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'EXPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE(handle => hdnl, filename => 'sample.dmp', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file); 3
4 5 6 DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'exp.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file); DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''TEST'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/ 7 8 9
PL/SQL procedure successfully completed.
SQL>
SQL>
Step 2: ADD RDS database tns entry in TNSNAMES.ora file also validate using tnsping
ORCL =
(DESCRIPTION =
(ADDRESS = (PROTOCOL = TCP)(HOST = aws_rds_db_orcl_endpoint_name)(PORT = 1521))
(CONNECT_DATA =
(SERVER = DEDICATED)
(SERVICE_NAME = orcl)
(UR = A)
)
)
Step 3: Create public database link to using AWS rds database endpoint (create as per your requirement like public or private)
create public database link to_rds19c connect to "username" identified by "password"
USING '(DESCRIPTION = (ADDRESS = (PROTOCOL = TCP)(HOST = aws_rds_db_orcl_endpoint_name)(PORT = 1521)) (CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = orcl)))';
SQL> create public database link to_rds19c connect to "username" identified by "password"
USING '(DESCRIPTION = (ADDRESS = (PROTOCOL = TCP)(HOST = aws_rds_db_orcl_endpoint_name)(PORT = 1521)) (CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = orcl)))';
2
Database link created.
Step 4: validate created database link is working or not
SQL>
SQL>
SQL> select * from dual@to_rds19c;
D
-
X
SQL>
SQL>
Step 4: Transfer dumpfile to AWS RDS orcl database using database link using below API
BEGIN
DBMS_FILE_TRANSFER.PUT_FILE(
source_directory_object => 'DATA_PUMP_DIR',
source_file_name => 'sample.dmp',
destination_directory_object => 'DATA_PUMP_DIR',
destination_file_name => 'sample_copied.dmp',
destination_database => 'to_rds19c'
);
END;
/
SQL>
SQL> BEGIN
2 DBMS_FILE_TRANSFER.PUT_FILE(
source_directory_object => 'DATA_PUMP_DIR',
source_file_name => 'sample.dmp',
destination_directory_object => 'DATA_PUMP_DIR',
destination_file_name => 'sample_copied.dmp',
destination_database 3 => 'to_rds19c'
);
END;
4 5 6 7 8 9 10 /
PL/SQL procedure successfully completed.
SQL>
SQL>
Step 5: On AWS RDS database validate dumpfile which you transfer using below command
select * from table(RDSADMIN.RDS_FILE_UTIL.LISTDIR('DATA_PUMP_DIR')) order by mtime;
Step 6: create schema in target AWS database (orcl) with required privilege after that Perform import usig datapump API
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'IMPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'sample_copied.dmp', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'sample_imp.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''TEST'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
Step 7: To monitor import log using below command
SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('DATA_PUMP_DIR','sample_imp.log'))
* You have to enable the port for access in AWS security group
https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-ug.pdf
==============================================
Environment Details:
-------------------------------
Schema Name : TEST
Source (Normal)
-----------------
Database Name : india
Database Version : 19c
Platform : oracle linux
Target (RDS)
--------------
Database Name : orcl
Dabase version : 19c
Platform : AWS Oracle RDS
Step 1 : perform schema export
expdp '/ as sysdba' directory=DATA_PUMP_DIR dumpfile=test_schema.dmp LOGFILE=test_schema.log schemas=test
example
[oracle@anurag ~]$ expdp directory=DATA_PUMP_DIR dumpfile=test_schema.dmp LOGFILE=test_schema.log schemas=test
Export: Release 19.0.0.0.0 - Production on Thu May 28 17:21:14 2020
Version 19.3.0.0.0
Copyright (c) 1982, 2019, Oracle and/or its affiliates. All rights reserved.
Username: / as sysdba
Password:
Connected to: Oracle Database 19c Enterprise Edition Release 19.0.0.0.0 - Production
Starting "SYS"."SYS_EXPORT_SCHEMA_01": /******** AS SYSDBA directory=DATA_PUMP_DIR dumpfile=test_schema.dmp LOGFILE=test_schema.log schemas=test
Processing object type SCHEMA_EXPORT/TABLE/TABLE_DATA
Processing object type SCHEMA_EXPORT/TABLE/STATISTICS/TABLE_STATISTICS
Processing object type SCHEMA_EXPORT/STATISTICS/MARKER
Processing object type SCHEMA_EXPORT/USER
Processing object type SCHEMA_EXPORT/SYSTEM_GRANT
Processing object type SCHEMA_EXPORT/ROLE_GRANT
Processing object type SCHEMA_EXPORT/DEFAULT_ROLE
Processing object type SCHEMA_EXPORT/PRE_SCHEMA/PROCACT_SCHEMA
Processing object type SCHEMA_EXPORT/TABLE/TABLE
. . exported "TEST"."TEST" 5.539 KB 2 rows
Master table "SYS"."SYS_EXPORT_SCHEMA_01" successfully loaded/unloaded
******************************************************************************
Dump file set for SYS.SYS_EXPORT_SCHEMA_01 is:
/u01/oracle/admin/india/dpdump/test_schema.dmp
Job "SYS"."SYS_EXPORT_SCHEMA_01" successfully completed at Thu May 28 17:21:53 2020 elapsed 0 00:00:29
[oracle@anurag ~]$
we can perform export using dataump API as well
login as sysdba
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'EXPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE(handle => hdnl, filename => 'sample.dmp', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'exp.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file); DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''TEST'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
example :
SQL>
SQL> DECLARE
2 hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'EXPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE(handle => hdnl, filename => 'sample.dmp', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file); 3
4 5 6 DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'exp.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file); DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''TEST'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/ 7 8 9
PL/SQL procedure successfully completed.
SQL>
SQL>
Step 2: ADD RDS database tns entry in TNSNAMES.ora file also validate using tnsping
ORCL =
(DESCRIPTION =
(ADDRESS = (PROTOCOL = TCP)(HOST = aws_rds_db_orcl_endpoint_name)(PORT = 1521))
(CONNECT_DATA =
(SERVER = DEDICATED)
(SERVICE_NAME = orcl)
(UR = A)
)
)
Step 3: Create public database link to using AWS rds database endpoint (create as per your requirement like public or private)
create public database link to_rds19c connect to "username" identified by "password"
USING '(DESCRIPTION = (ADDRESS = (PROTOCOL = TCP)(HOST = aws_rds_db_orcl_endpoint_name)(PORT = 1521)) (CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = orcl)))';
SQL> create public database link to_rds19c connect to "username" identified by "password"
USING '(DESCRIPTION = (ADDRESS = (PROTOCOL = TCP)(HOST = aws_rds_db_orcl_endpoint_name)(PORT = 1521)) (CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = orcl)))';
2
Database link created.
Step 4: validate created database link is working or not
SQL>
SQL>
SQL> select * from dual@to_rds19c;
D
-
X
SQL>
SQL>
Step 4: Transfer dumpfile to AWS RDS orcl database using database link using below API
BEGIN
DBMS_FILE_TRANSFER.PUT_FILE(
source_directory_object => 'DATA_PUMP_DIR',
source_file_name => 'sample.dmp',
destination_directory_object => 'DATA_PUMP_DIR',
destination_file_name => 'sample_copied.dmp',
destination_database => 'to_rds19c'
);
END;
/
SQL>
SQL> BEGIN
2 DBMS_FILE_TRANSFER.PUT_FILE(
source_directory_object => 'DATA_PUMP_DIR',
source_file_name => 'sample.dmp',
destination_directory_object => 'DATA_PUMP_DIR',
destination_file_name => 'sample_copied.dmp',
destination_database 3 => 'to_rds19c'
);
END;
4 5 6 7 8 9 10 /
PL/SQL procedure successfully completed.
SQL>
SQL>
Step 5: On AWS RDS database validate dumpfile which you transfer using below command
select * from table(RDSADMIN.RDS_FILE_UTIL.LISTDIR('DATA_PUMP_DIR')) order by mtime;
Step 6: create schema in target AWS database (orcl) with required privilege after that Perform import usig datapump API
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'IMPORT', job_mode => 'SCHEMA', job_name=>null);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'sample_copied.dmp', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'sample_imp.log', directory => 'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''TEST'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
Step 7: To monitor import log using below command
SELECT text FROM table(rdsadmin.rds_file_util.read_text_file('DATA_PUMP_DIR','sample_imp.log'))
* You have to enable the port for access in AWS security group
https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-ug.pdf
This was an authentic and useful piece of information. Thank you for giving this useful content.
ReplyDeleteHow To Hack Emails
How To Hack an Email Account
ReplyDeleteThanks for this blog keep sharing your thoughts like this...
Introduction of CC++
What is C++ Programming Language
Great post. keep sharing such a worthy information.
ReplyDeleteDigital Marketing Training in Chennai