Get complete DDL for a schema

There are plenty of tools out there that you can use to get DDL scripts for various objects in the database. However, a lot of them are not ideally suited to automation, because they are driven by wizards or similar developer driven input. Often we have home grown scripts that cycle through USER_OBJECTS, but that can be challenging to ensure that everything is captured.

(For example, did you remember to collect all the scheduler programs, arguments, programs, windows, etc etc etc?)

As we know, the mechanism via which we copy an entire schema is DataPump, so if we use that, then we can be sure that we are grabbing everything, but of course, that means jumping out to the command line which is not always possible. However, we can use the DBMS_DATAPUMP package to build a routine to get the DDL for an entire schema from directly inside the database.

Here’s an example of a such a routine that I wrote to unload DDL for a schema. It also allows you define a subset of object types if your nee and also to remap the schema name to a target schema. Whereas in a ‘typical’ DataPump done on the command line, we would use the SQLFILE parameter to get a script, by nominating an output file of type DBMS_DATAPUMP.KU$_FILE_TYPE_SQL_FILE, this will result in a SQLFILE being generated rather than an export dump file.

SQL> create or replace
  2  procedure dmpfile_to_script(
  3                      p_dumpfile varchar2,
  4                      p_dumpdir  varchar2,
  5                      p_jobname  varchar2,
  6                      p_schema   varchar2,
  7                      p_new_schema varchar2 default null) is
  8
  9    l_jobid     number;
 10    l_job_state varchar2(30);
 11    l_sts       ku$_Status;
 12
 13    --
 14    -- list of object types we'd allow from the dump
 15    --
 16    l_obj_nt    sys.odcivarchar2list :=
 17                   sys.odcivarchar2list(
 18                      'ANALYTIC_VIEW',
 19                      'ATTRIBUTE_DIMENSION',
 20                      'CLUSTER',
 21                      'DIMENSION',
 22                      'FUNCTION',
 23                      'HIERARCHY',
 24                      'MATERIALIZED_VIEW',
 25                      'PACKAGE',
 26                      'PROCEDURE',
 27                      'REFRESH_GROUP',
 28                      'SEQUENCE',
 29                      'TABLE',
 30                      'TYPE',
 31                      'VIEW');
 32
 33    l_obj_types varchar2(4000);
 34  begin
 35    l_jobid := dbms_datapump.open(
 36      operation   => 'SQL_FILE',
 37      job_mode    => 'SCHEMA',
 38      job_name    => upper(p_jobname)
 39      );
 40
 41    dbms_datapump.add_file(
 42      handle    => l_jobid,
 43      filename  => p_dumpfile,
 44      directory => p_dumpdir);
 45
 46    dbms_datapump.add_file(
 47      handle    => l_jobid,
 48      filename  => p_dumpfile||'.log',
 49      directory => p_dumpdir,
 50      filetype  => dbms_datapump.ku$_file_type_log_file);
 51
 52    dbms_datapump.add_file(
 53      handle    => l_jobid,
 54      filename  => p_dumpfile||'.sql',
 55      directory => p_dumpdir,
 56      filetype  => dbms_datapump.ku$_file_type_sql_file);
 57
 58    -- just in case they give us multiple schemas or a full
 59
 60    dbms_datapump.metadata_filter(
 61      handle => l_jobid,
 62      name   => 'SCHEMA_EXPR',
 63      value  => '= '''||p_schema||'''');
 64
 65    dbms_datapump.metadata_remap(l_jobid,
 66                                 'REMAP_SCHEMA',
 67                                 p_schema,
 68                                 p_new_schema);
 69
 70    -- don't need storage params
 71
 72    dbms_datapump.metadata_transform(
 73      handle      => l_jobid,
 74      name        => 'STORAGE',
 75      value       => 0);
 76
 77    -- don't need tablespace
 78
 79    dbms_datapump.metadata_transform(
 80      handle      => l_jobid,
 81      name        => 'SEGMENT_ATTRIBUTES',
 82      value       => 0);
 83
 84    -- filter the list of valid object types we'll permit
 85
 86    l_obj_types := 'IN ('''||l_obj_nt(1)||'''';
 87    for i in 2 .. l_obj_nt.count
 88    loop
 89      l_obj_types := l_obj_types || ','''||l_obj_nt(i)||'''';
 90    end loop;
 91    l_obj_types := l_obj_types || ')';
 92
 93    dbms_datapump.metadata_filter(
 94      handle => l_jobid,
 95      name => 'INCLUDE_PATH_EXPR',
 96      value => l_obj_types
 97    );
 98
 99    dbms_datapump.start_job(l_jobid);
100
101    dbms_datapump.wait_for_job(
102       handle => l_jobid,
103       job_state => l_job_state);
104
105    dbms_datapump.detach(l_jobid);
106  end;
107  /

Procedure created.

SQL>
SQL> begin
  2    dmpfile_to_script(
  3      p_dumpfile   =>'scott.dmp',
  4      p_dumpdir    =>'TEMP',
  5      p_jobname    =>'SCOTT_DP',
  6      p_schema     =>'SCOTT',
  7      p_new_schema =>'SCOTT2'
  8      );
  9  end;
 10  /

PL/SQL procedure successfully completed.

SQL>
SQL> host cat c:\temp\scott.dmp.sql

ALTER SESSION SET EVENTS '10150 TRACE NAME CONTEXT FOREVER, LEVEL 1';
ALTER SESSION SET EVENTS '10904 TRACE NAME CONTEXT FOREVER, LEVEL 1';
ALTER SESSION SET EVENTS '25475 TRACE NAME CONTEXT FOREVER, LEVEL 1';
ALTER SESSION SET EVENTS '10407 TRACE NAME CONTEXT FOREVER, LEVEL 1';
ALTER SESSION SET EVENTS '10851 TRACE NAME CONTEXT FOREVER, LEVEL 1';
ALTER SESSION SET EVENTS '22830 TRACE NAME CONTEXT FOREVER, LEVEL 192 ';
-- new object type path: SCHEMA_EXPORT/TABLE/TABLE
CREATE TABLE "SCOTT2"."SALGRADE"
   (    "GRADE" NUMBER,
        "LOSAL" NUMBER,
        "HISAL" NUMBER
   )  DEFAULT COLLATION "USING_NLS_COMP" ;
CREATE TABLE "SCOTT2"."DEPT"
   (    "DEPTNO" NUMBER(2,0),
        "DNAME" VARCHAR2(14 BYTE) COLLATE "USING_NLS_COMP",
        "LOC" VARCHAR2(13 BYTE) COLLATE "USING_NLS_COMP"
   )  DEFAULT COLLATION "USING_NLS_COMP" ;
CREATE TABLE "SCOTT2"."EMP"
   (    "EMPNO" NUMBER(4,0) NOT NULL ENABLE,
        "ENAME" VARCHAR2(10 BYTE) COLLATE "USING_NLS_COMP",
        "JOB" VARCHAR2(9 BYTE) COLLATE "USING_NLS_COMP",
        "MGR" NUMBER(4,0),
        "HIREDATE" DATE,
        "SAL" NUMBER(7,2),
        "COMM" NUMBER(7,2),
        "DEPTNO" NUMBER(2,0)
   )  DEFAULT COLLATION "USING_NLS_COMP" ;
CREATE TABLE "SCOTT2"."BONUS"
   (    "ENAME" VARCHAR2(10 BYTE) COLLATE "USING_NLS_COMP",
        "JOB" VARCHAR2(9 BYTE) COLLATE "USING_NLS_COMP",
        "SAL" NUMBER,
        "COMM" NUMBER
   )  DEFAULT COLLATION "USING_NLS_COMP" ;
-- new object type path: SCHEMA_EXPORT/TABLE/CONSTRAINT/CONSTRAINT
ALTER TABLE "SCOTT2"."DEPT" ADD CONSTRAINT "DEPT_PK" PRIMARY KEY ("DEPTNO")
  USING INDEX  ENABLE;
ALTER TABLE "SCOTT2"."EMP" ADD CONSTRAINT "EMP_PK" PRIMARY KEY ("EMPNO")
  USING INDEX  ENABLE;
-- new object type path: SCHEMA_EXPORT/TABLE/INDEX/STATISTICS/INDEX_STATISTICS
-- new object type path: SCHEMA_EXPORT/TABLE/CONSTRAINT/REF_CONSTRAINT
ALTER TABLE "SCOTT2"."EMP" ADD CONSTRAINT "EMP_FK" FOREIGN KEY ("DEPTNO")
          REFERENCES "SCOTT2"."DEPT" ("DEPTNO") ENABLE;
-- new object type path: SCHEMA_EXPORT/TABLE/STATISTICS/TABLE_STATISTICS
-- new object type path: SCHEMA_EXPORT/STATISTICS/MARKER
-- fixup virtual columns...
-- done fixup virtual columns
...
...

This is easily customised to whatever your particular requirements are, simply by following the documentation for DBMS_DATAPUMP. Enjoy!