Skip to content

Data Pump Overview

Oracle Data Pump is the fastest and most flexible way to move data and metadata in and out of Oracle databases. It provides significant performance improvements over traditional export/import utilities through parallel processing and direct path operations.

Data Pump is Oracle’s high-performance data movement utility that includes:

  • expdp - Export Data Pump utility
  • impdp - Import Data Pump utility
  • DBMS_DATAPUMP - PL/SQL API for programmatic access
  • Enterprise Manager integration - GUI interface
FeatureLegacy exp/impData Pump
PerformanceSingle-threadedParallel processing
Restart CapabilityNoneFull restart support
Network ModeLimitedDirect database-to-database
FilteringBasicAdvanced row/metadata filtering
CompressionNoneBuilt-in compression
EncryptionNoneTransparent encryption
MonitoringLimitedComprehensive job monitoring
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
│ Client Tool │ │ Master Process │ │ Worker Processes│
│ (expdp/impdp) │◄──►│ (Coordination) │◄──►│ (Parallel I/O) │
└─────────────────┘ └─────────────────┘ └─────────────────┘
│ │ │
▼ ▼ ▼
┌─────────────────────────────────────────────────────────────────┐
│ Storage Layer │
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
│ │ Dump Files │ │ Log Files │ │ Database │ │
│ │ (Data/Meta) │ │ (Progress) │ │ (Source/ │ │
│ │ │ │ │ │ Target) │ │
│ └─────────────┘ └─────────────┘ └─────────────┘ │
└─────────────────────────────────────────────────────────────────┘

Data Pump requires Oracle directory objects for file operations:

-- Create directory object (as DBA)
CREATE OR REPLACE DIRECTORY dp_dir AS '/u01/app/oracle/datapump';
-- Grant permissions
GRANT READ, WRITE ON DIRECTORY dp_dir TO hr;
-- Check existing directories
SELECT directory_name, directory_path FROM dba_directories;

Data Pump operations run as database jobs:

-- Check running jobs
SELECT job_name, state, degree
FROM dba_datapump_jobs
WHERE state = 'EXECUTING';
-- Attach to existing job
expdp hr/password attach=HR_EXPORT_JOB
Terminal window
# Export with 4 parallel processes
expdp hr/password directory=dp_dir dumpfile=hr_%U.dmp parallel=4
# Import with parallel processing
impdp hr/password directory=dp_dir dumpfile=hr_*.dmp parallel=4
full_database_export.sh
#!/bin/bash
export ORACLE_SID=ORCL
export ORACLE_HOME=/u01/app/oracle/product/19.0.0/dbhome_1
expdp system/password \
directory=dp_dir \
dumpfile=full_db_%U.dmp \
logfile=full_db_export.log \
full=y \
parallel=4 \
compression=all \
job_name=FULL_DB_EXPORT
schema_export.sh
#!/bin/bash
expdp hr/password \
directory=dp_dir \
dumpfile=hr_schema_%U.dmp \
logfile=hr_schema_export.log \
schemas=hr \
parallel=2 \
compression=all \
exclude=statistics \
job_name=HR_SCHEMA_EXPORT
filtered_table_export.sh
#!/bin/bash
expdp hr/password \
directory=dp_dir \
dumpfile=employees_recent.dmp \
logfile=employees_export.log \
tables=hr.employees \
query='hr.employees:"WHERE hire_date >= DATE'\''2020-01-01'\''"' \
job_name=EMPLOYEES_FILTERED_EXPORT
full_database_import.sh
#!/bin/bash
impdp system/password \
directory=dp_dir \
dumpfile=full_db_*.dmp \
logfile=full_db_import.log \
full=y \
parallel=4 \
job_name=FULL_DB_IMPORT
schema_import_remap.sh
#!/bin/bash
impdp system/password \
directory=dp_dir \
dumpfile=hr_schema_*.dmp \
logfile=hr_import.log \
schemas=hr \
remap_schema=hr:hr_test \
remap_tablespace=users:test_users \
parallel=2 \
job_name=HR_SCHEMA_IMPORT
table_import_transform.sh
#!/bin/bash
impdp hr/password \
directory=dp_dir \
dumpfile=employees_recent.dmp \
logfile=employees_import.log \
tables=hr.employees \
remap_table=hr.employees:hr.employees_backup \
transform=segment_attributes:n \
job_name=EMPLOYEES_IMPORT
Terminal window
# Direct database-to-database transfer
expdp hr/password \
network_link=source_db_link \
directory=dp_dir \
dumpfile=network_export.dmp \
schemas=hr
Terminal window
# Password-based encryption
expdp hr/password \
directory=dp_dir \
dumpfile=encrypted_export.dmp \
encryption=all \
encryption_algorithm=AES256 \
encryption_password=SecurePass123
Terminal window
# Export data as of specific SCN
expdp hr/password \
directory=dp_dir \
dumpfile=flashback_export.dmp \
schemas=hr \
flashback_scn=1234567890
# Export data as of specific time
expdp hr/password \
directory=dp_dir \
dumpfile=point_in_time_export.dmp \
schemas=hr \
flashback_time="TO_TIMESTAMP('2024-01-01 12:00:00', 'YYYY-MM-DD HH24:MI:SS')"
-- Check job progress
SELECT job_name, operation, job_mode, state,
degree, attached_sessions
FROM dba_datapump_jobs;
-- Detailed job information
SELECT job_name, owner_name, operation, job_mode, state,
degree, attached_sessions, datapump_sessions
FROM dba_datapump_jobs
WHERE job_name = 'HR_EXPORT_JOB';
Terminal window
# Attach to running job
expdp hr/password attach=HR_EXPORT_JOB
# In interactive mode:
Export> status # Check status
Export> parallel=6 # Change parallelism
Export> stop_job=immediate # Stop job
Export> start_job # Resume job
Export> kill_job # Terminate job
-- Check Data Pump session performance
SELECT s.sid, s.serial#, s.username, s.program,
st.value/1024/1024 mb_processed
FROM v$session s, v$sesstat st, v$statname sn
WHERE s.sid = st.sid
AND st.statistic# = sn.statistic#
AND sn.name = 'bytes sent via SQL*Net to client'
AND s.program LIKE '%DM%'
ORDER BY st.value DESC;
Terminal window
# Optimal parallel settings
# For export: parallel = number of CPU cores
# For import: parallel = number of CPU cores / 2
# Use appropriate dump file sizing
expdp hr/password \
directory=dp_dir \
dumpfile=hr_%U.dmp \
filesize=2G \
parallel=4
-- Estimate export size
SELECT table_name,
num_rows,
blocks * 8192 bytes,
ROUND(blocks * 8192 / 1024 / 1024, 2) mb
FROM user_tables
WHERE table_name = 'EMPLOYEES';
export_with_error_handling.sh
#!/bin/bash
LOG_FILE="/tmp/datapump_export.log"
expdp hr/password \
directory=dp_dir \
dumpfile=hr_export.dmp \
logfile=hr_export.log \
schemas=hr \
parallel=2 2>&1 | tee $LOG_FILE
# Check for errors
if grep -i "error\|ora-" $LOG_FILE > /dev/null; then
echo "Data Pump export encountered errors" | \
mail -s "Data Pump Export Failed" $EMAIL
exit 1
fi
Terminal window
# Secure directory permissions
chmod 750 /u01/app/oracle/datapump
chown oracle:oinstall /u01/app/oracle/datapump
# Limit directory object access
GRANT READ, WRITE ON DIRECTORY dp_dir TO hr;
REVOKE READ, WRITE ON DIRECTORY dp_dir FROM public;
Terminal window
# Use strong encryption for sensitive data
expdp hr/password \
directory=dp_dir \
dumpfile=sensitive_data.dmp \
encryption=all \
encryption_algorithm=AES256 \
encryption_password=ComplexPassword123!
ParameterDescriptionExample
directoryDirectory object namedirectory=dp_dir
dumpfileDump file name(s)dumpfile=export_%U.dmp
logfileLog file namelogfile=export.log
parallelParallel processesparallel=4
compressionCompression typecompression=all
encryptionEncryption levelencryption=all
excludeExclude objectsexclude=statistics
includeInclude objectsinclude=table
queryRow filteringquery="WHERE rownum <= 1000"
ParameterDescriptionExample
remap_schemaSchema remappingremap_schema=old:new
remap_tablespaceTablespace remappingremap_tablespace=old:new
remap_tableTable remappingremap_table=old:new
transformObject transformationstransform=segment_attributes:n
table_exists_actionAction for existing tablestable_exists_action=replace
-- Check directory permissions
SELECT privilege FROM dba_tab_privs
WHERE grantee = 'HR' AND table_name = 'DP_DIR';
-- Grant missing permissions
GRANT READ, WRITE ON DIRECTORY dp_dir TO hr;
Terminal window
# Check directory space and permissions
df -h /u01/app/oracle/datapump
ls -la /u01/app/oracle/datapump
-- Check for blocking sessions
SELECT blocking_session, sid, serial#, wait_event
FROM v$session
WHERE blocking_session IS NOT NULL;
daily_schema_backup.sh
#!/bin/bash
DATE=$(date +%Y%m%d)
SCHEMA="HR"
expdp system/password \
directory=dp_dir \
dumpfile=${SCHEMA}_${DATE}.dmp \
logfile=${SCHEMA}_${DATE}.log \
schemas=$SCHEMA \
compression=all \
job_name=${SCHEMA}_BACKUP_${DATE}
# Cleanup old backups (keep 7 days)
find /u01/app/oracle/datapump -name "${SCHEMA}_*.dmp" -mtime +7 -delete
cross_platform_migration.sh
#!/bin/bash
# Export from source (different endianness)
expdp system/password@source \
directory=dp_dir \
dumpfile=migration.dmp \
full=y \
version=12.2 \
compression=all
# Import to target
impdp system/password@target \
directory=dp_dir \
dumpfile=migration.dmp \
full=y \
transform=segment_attributes:n
  1. Export Operations - Detailed export procedures and examples
  2. Import Operations - Comprehensive import techniques
  3. Data Pump Scripts - Production-ready automation scripts
  4. Performance Tuning - Optimization strategies
Terminal window
# Basic export
expdp user/password directory=dp_dir dumpfile=export.dmp schemas=user
# Basic import
impdp user/password directory=dp_dir dumpfile=export.dmp
# Check job status
SELECT job_name, state FROM dba_datapump_jobs;
# Attach to job
expdp user/password attach=JOB_NAME

This overview provides the foundation for using Oracle Data Pump effectively in your environment. Each section links to detailed procedures and examples for specific use cases.