I am doing it manually as export , uploading it to s3 bucket and deleting the old dumps.
有人帮助自动化它
1) 脚本导出架构 ICO_AV_PRD_OWR
DECLARE
hdnl NUMBER;
BEGIN
hdnl := DBMS_DATAPUMP.OPEN( operation => 'EXPORT', job_mode => 'SCHEMA', job_name=>null,
version=>12);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'dump.dmp', directory =>
'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_dump_file);
DBMS_DATAPUMP.ADD_FILE( handle => hdnl, filename => 'dump.log', directory =>
'DATA_PUMP_DIR', filetype => dbms_datapump.ku$_file_type_log_file);
DBMS_DATAPUMP.METADATA_FILTER(hdnl,'SCHEMA_EXPR','IN (''schemaname'')');
DBMS_DATAPUMP.START_JOB(hdnl);
END;
/
2)将转储复制到 S3 存储桶
set lines 399 pages 999
col filename for a45
select * from table(RDSADMIN.RDS_FILE_UTIL.LISTDIR('DATA_PUMP_DIR')) order by mtime; ---
listing the files
SELECT rdsadmin.rdsadmin_s3_tasks.upload_to_s3(
p_bucket_name => 'bucketname',
p_directory_name => 'DATA_PUMP_DIR')
AS TASK_ID FROM DUAL;
3) 从 RDS 中删除转储 exec utl_file.fremove('DATA_PUMP_DIR','dump.dmp'); 执行 utl_file.fremove('DATA_PUMP_DIR','dump.log');