Skip to content

Instantly share code, notes, and snippets.

@jariolaine
Last active March 25, 2023 19:31
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jariolaine/60bfc2c08c0aa01658c78ea5cc918b46 to your computer and use it in GitHub Desktop.
Save jariolaine/60bfc2c08c0aa01658c78ea5cc918b46 to your computer and use it in GitHub Desktop.
Package to export APEX workspaces, applications and parsing schmeas
create or replace
package "BACKUP_API"
authid current_user
as
--------------------------------------------------------------------------------
procedure export_apex_workspaces(
p_credential_name in varchar2,
p_region in varchar2,
p_namespace in varchar2,
p_bucket in varchar2,
p_workspace in varchar2 default null
);
--------------------------------------------------------------------------------
procedure import_schema(
p_credential_name in varchar2,
p_region in varchar2,
p_namespace in varchar2,
p_bucket in varchar2,
p_schema in varchar2,
p_new_schema in varchar2 default null
);
--------------------------------------------------------------------------------
end "BACKUP_API";
/
create or replace
package body "BACKUP_API"
as
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- Private constants and variables.
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- OCI object storage URL template.
c_object_uri constant varchar2(256) := 'https://objectstorage.%s.oraclecloud.com/n/%s/b/%s/o/%s';
-- Database directory where export is created.
g_directory constant varchar2(256) := 'DATA_PUMP_DIR';
-- Object storage variables.
g_credential_name varchar2(256);
g_region varchar2(256);
g_namespace varchar2(256);
g_bucket varchar2(256);
-- APEX workspace export variables.
g_workspace_name varchar2(256);
g_workspace_id number;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- Private procedures and functions.
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
function get_object_url(
p_file_name varchar2
) return varchar2
as
begin
-- Construct and return object storage URL.
return
apex_string.format(
p_message => c_object_uri
,p0 => g_region
,p1 => g_namespace
,p2 => g_bucket
,p3 => p_file_name
)
;
end get_object_url;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
function get_apex_object_uri(
p_file_name varchar2
) return varchar2
as
l_file_name varchar2(256);
begin
-- APEX export zip file name with prefix.
l_file_name :=
apex_string.format(
p_message => 'apex/%s/%s'
,p0 => g_workspace_name
,p1 => replace( p_file_name, '.sql', '.zip' )
)
;
-- Return file object storage URL.
return
get_object_url(
p_file_name => l_file_name
)
;
end get_apex_object_uri;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure file_to_bucket(
p_file_name in varchar2,
p_object_uri in varchar2
)
as
begin
-- Copy file from database directory to bucket.
dbms_cloud.put_object(
credential_name => g_credential_name
,file_name => p_file_name
,directory_name => g_directory
,object_uri => p_object_uri
);
-- Delete file from database directory.
dbms_cloud.delete_file(
directory_name => g_directory
,file_name => p_file_name
);
end file_to_bucket;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure export_application(
p_application_id in number
)
as
l_object_uri varchar2(1024);
l_files apex_t_export_files;
l_zip blob;
begin
-- Export application.
l_files :=
apex_export.get_application(
p_application_id => p_application_id
)
;
-- Zip application export.
l_zip :=
apex_export.zip(
p_source_files => l_files
)
;
-- File URL.
l_object_uri := get_apex_object_uri( l_files(1).name );
-- Place application export to object storage bucket.
dbms_cloud.put_object(
credential_name => g_credential_name
,contents => l_zip
,object_uri => l_object_uri
);
end export_application;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure export_workspace
as
l_object_uri varchar2(1024);
l_files apex_t_export_files;
l_zip blob;
begin
-- Export workspace.
l_files :=
apex_export.get_workspace(
p_workspace_id => g_workspace_id
)
;
-- Zip workspace export.
l_zip :=
apex_export.zip(
p_source_files => l_files
)
;
-- File URL.
l_object_uri := get_apex_object_uri( l_files(1).name );
-- Place workspace export to object storage bucket.
dbms_cloud.put_object(
credential_name => g_credential_name
,contents => l_zip
,object_uri => l_object_uri
);
end export_workspace;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure export_workspace_files
as
l_object_uri varchar2(1024);
l_files apex_t_export_files;
l_zip blob;
begin
-- Export workspace files.
l_files :=
apex_export.get_workspace_files(
p_workspace_id => g_workspace_id
)
;
-- Zip workspace files export.
l_zip :=
apex_export.zip(
p_source_files => l_files
)
;
-- File URL.
l_object_uri := get_apex_object_uri( l_files(1).name );
-- Place workspace files export to object storage bucket.
dbms_cloud.put_object(
credential_name => g_credential_name
,contents => l_zip
,object_uri => l_object_uri
);
end export_workspace_files;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure export_schema(
p_schema in varchar2
)
as
l_dp_handle number;
l_job_state varchar2(256);
l_dmp_file_name varchar2(256);
l_log_file_name varchar2(256);
l_object_uri varchar2(256);
begin
-- Object storage URL.
l_object_uri :=
get_object_url(
p_file_name => 'datapump/'
)
;
-- Dump file name.
l_dmp_file_name :=
apex_string.format(
p_message => '%s.dmp'
,p0 => lower( p_schema )
)
;
-- Log file name.
l_log_file_name :=
apex_string.format(
p_message => '%s.log'
,p0 => lower( p_schema )
)
;
-- Define export job.
-- Open a schema export job.
l_dp_handle :=
dbms_datapump.open(
operation => 'EXPORT'
,job_mode => 'SCHEMA'
)
;
-- Set the dump file name and directory.
dbms_datapump.add_file(
handle => l_dp_handle
,reusefile => 1
,directory => g_directory
,filetype => dbms_datapump.ku$_file_type_dump_file
,filename => l_dmp_file_name
);
-- Set the log file name and directory.
dbms_datapump.add_file(
handle => l_dp_handle
,reusefile => 1
,directory => g_directory
,filetype => dbms_datapump.ku$_file_type_log_file
,filename => l_log_file_name
);
-- Set the schema to be exported.
dbms_datapump.metadata_filter(
handle => l_dp_handle
,name => 'SCHEMA_LIST'
,value =>
apex_string.format(
p_message => '''%s'''
,p0 => upper( p_schema )
)
);
-- Exlude statistics from export.
dbms_datapump.metadata_filter(
handle => l_dp_handle
,name => 'EXCLUDE_PATH_LIST'
,value => '''STATISTICS'''
);
-- Start export job.
dbms_datapump.start_job(
handle => l_dp_handle
);
-- Wait export job to complete.
dbms_datapump.wait_for_job(
handle => l_dp_handle
,job_state => l_job_state
);
-- Detrach export job.
dbms_datapump.detach(
handle => l_dp_handle
);
-- Move files to object storage bucket.
-- Export to bucket.
file_to_bucket(
p_file_name => l_dmp_file_name
,p_object_uri => l_object_uri || l_dmp_file_name
);
-- Log to bucket.
file_to_bucket(
p_file_name => l_log_file_name
,p_object_uri => l_object_uri || l_log_file_name
);
exception when others
then
-- Case if error happens detach export job.
begin
dbms_datapump.detach( l_dp_handle );
exception when others
then
null;
end;
raise;
end export_schema;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- Global functions and procedures.
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure export_apex_workspaces(
p_credential_name in varchar2,
p_region in varchar2,
p_namespace in varchar2,
p_bucket in varchar2,
p_workspace in varchar2 default null
)
as
begin
-- Set object storage related private variables.
g_credential_name := p_credential_name;
g_region := p_region;
g_namespace := p_namespace;
g_bucket := p_bucket;
-- Loop workspaces and get id and name.
for c1 in(
select
v1.workspace
,v1.workspace_id
from apex_workspaces v1
where 1 = 1
and v1.workspace = coalesce( upper( p_workspace), v1.workspace )
and exists(
select 1
from apex_applications x1
where 1 = 1
and x1.owner != apex_application.g_flow_schema_owner
and x1.workspace_id = v1.workspace_id
)
) Loop
-- Set workspace related private variables.
g_workspace_name := lower( c1.workspace );
g_workspace_id := c1.workspace_id;
-- Set workspace id.
apex_util.set_security_group_id(
p_security_group_id => g_workspace_id
);
-- Export workspace.
export_workspace;
-- Export workspace files.
export_workspace_files;
-- Loop workspace applications.
for c2 in(
select
v1.application_id
from apex_applications v1
where 1 = 1
and v1.workspace_id = g_workspace_id
) loop
-- Export application.
export_application(
p_application_id => c2.application_id
);
end loop;
end loop;
-- Loop all application owners (schemas).
for c1 in(
select distinct
v1.owner
from apex_applications v1
where 1 = 1
and v1.owner != apex_application.g_flow_schema_owner
and v1.workspace = coalesce( upper( p_workspace), v1.workspace )
) Loop
-- Export schema.
export_schema(
p_schema => c1.owner
);
end loop;
end export_apex_workspaces;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
procedure import_schema(
p_credential_name in varchar2,
p_region in varchar2,
p_namespace in varchar2,
p_bucket in varchar2,
p_schema in varchar2,
p_new_schema in varchar2 default null
)
as
l_dp_handle number;
l_job_state varchar2(256);
l_dmp_file_name varchar2(256);
l_log_file_name varchar2(256);
l_object_uri varchar2(256);
begin
-- Set object storage related private variables.
g_credential_name := p_credential_name;
g_region := p_region;
g_namespace := p_namespace;
g_bucket := p_bucket;
-- Object storage URL.
l_object_uri :=
get_object_url(
p_file_name => 'datapump/'
)
;
-- Dump file name.
l_dmp_file_name :=
apex_string.format(
p_message => '%s.dmp'
,p0 => lower( p_schema )
)
;
-- Log file name.
l_log_file_name :=
apex_string.format(
p_message => 'import_%s.log'
,p0 => lower( coalesce( p_new_schema, p_schema ) )
)
;
-- Define import job.
-- Open a schema import job.
l_dp_handle :=
dbms_datapump.open(
operation => 'IMPORT'
,job_mode => 'SCHEMA'
)
;
-- Set the dump file name and credential.
dbms_datapump.add_file(
handle => l_dp_handle
,reusefile => 1
,directory => g_credential_name
,filetype => dbms_datapump.ku$_file_type_uridump_file
,filename => l_object_uri || l_dmp_file_name
);
-- Set the log file name and directory.
dbms_datapump.add_file(
handle => l_dp_handle
,reusefile => 1
,directory => g_directory
,filetype => dbms_datapump.ku$_file_type_log_file
,filename => l_log_file_name
);
-- Set the schema to be imported.
dbms_datapump.metadata_filter(
handle => l_dp_handle
,name => 'SCHEMA_LIST'
,value =>
apex_string.format(
p_message => '''%s'''
,p0 => upper( p_schema )
)
);
if p_new_schema is not null
then
dbms_datapump.metadata_remap(
handle => l_dp_handle
,name => 'REMAP_SCHEMA'
,old_value => upper( p_schema )
,value => upper( p_new_schema )
);
end if;
-- Start import job.
dbms_datapump.start_job(
handle => l_dp_handle
);
-- Wait import job to complete.
dbms_datapump.wait_for_job(
handle => l_dp_handle
,job_state => l_job_state
);
-- Detach import job.
dbms_datapump.detach(
handle => l_dp_handle
);
-- Log to bucket.
file_to_bucket(
p_file_name => l_log_file_name
,p_object_uri => l_object_uri || l_log_file_name
);
exception when others
then
-- Case if error happens detach import job.
begin
dbms_datapump.detach( l_dp_handle );
exception when others
then
null;
end;
raise;
end import_schema;
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
end "BACKUP_API";
/
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment