Skip to content

Data Pump Export (expdp) Guide

This guide provides detailed instructions for using Oracle Data Pump Export (expdp) to extract data and metadata from Oracle databases efficiently.

Terminal window
expdp [username/password[@connect_identifier]] [parameter=value]...
ParameterDescriptionExample
directoryDirectory object for dump filesdirectory=dp_dir
dumpfileOutput dump file name(s)dumpfile=export_%U.dmp
logfileExport log filelogfile=export.log
schemasSchema(s) to exportschemas=hr,scott
tablesSpecific tables to exporttables=hr.employees
fullFull database exportfull=y
parallelParallel worker processesparallel=4
full_database_export.sh
#!/bin/bash
# Complete database export with compression and encryption
export ORACLE_SID=PROD
export DATE=$(date +%Y%m%d_%H%M%S)
expdp system/password \
directory=dp_dir \
dumpfile=full_db_${DATE}_%U.dmp \
logfile=full_db_${DATE}.log \
full=y \
parallel=8 \
compression=all \
encryption=all \
encryption_password=SecurePass123 \
cluster=n \
filesize=10G \
job_name=FULL_DB_EXPORT_${DATE}
schema_export.sh
#!/bin/bash
# Export multiple schemas with metadata filtering
expdp system/password \
directory=dp_dir \
dumpfile=schemas_export_%U.dmp \
logfile=schemas_export.log \
schemas=hr,scott,sh \
parallel=4 \
compression=metadata_only \
exclude=statistics \
exclude=index:"IN ('EMP_EMAIL_UK')" \
content=all \
job_name=SCHEMA_EXPORT
table_export.sh
#!/bin/bash
# Export specific tables with query filtering
expdp hr/password \
directory=dp_dir \
dumpfile=emp_tables.dmp \
logfile=emp_tables.log \
tables=employees,departments,job_history \
query=employees:"WHERE department_id IN (10,20,30)" \
query=job_history:"WHERE end_date >= DATE'2020-01-01'" \
compression=all \
job_name=EMP_TABLE_EXPORT
partition_export.sh
#!/bin/bash
# Export specific partitions from partitioned tables
expdp sales/password \
directory=dp_dir \
dumpfile=sales_2024.dmp \
logfile=sales_2024.log \
tables=sales_data:sales_2024_q1,sales_data:sales_2024_q2 \
parallel=2 \
job_name=PARTITION_EXPORT
consistent_export.sh
#!/bin/bash
# Export data as of specific point in time
# Get current SCN
CURRENT_SCN=$(sqlplus -s system/password <<EOF
set heading off feedback off
select current_scn from v\$database;
EOF
)
expdp system/password \
directory=dp_dir \
dumpfile=consistent_export.dmp \
logfile=consistent_export.log \
schemas=hr,finance \
flashback_scn=$CURRENT_SCN \
parallel=4 \
job_name=CONSISTENT_EXPORT
network_export.sh
#!/bin/bash
# Direct transfer from source to target database
expdp system/password@source_db \
network_link=target_db_link \
schemas=hr \
parallel=4 \
job_name=NETWORK_TRANSFER
metadata_export.sh
#!/bin/bash
# Export database structure without data
expdp system/password \
directory=dp_dir \
dumpfile=metadata_only.dmp \
logfile=metadata_only.log \
full=y \
content=metadata_only \
compression=all \
job_name=METADATA_EXPORT
filtered_export.sh
#!/bin/bash
# Export with complex filtering rules
expdp system/password \
directory=dp_dir \
dumpfile=filtered_export.dmp \
schemas=hr \
exclude=table:"LIKE 'TMP_%'" \
exclude=table:"IN ('AUDIT_LOG','ERROR_LOG')" \
exclude=index:"LIKE '%_TMP'" \
exclude=constraint \
exclude=ref_constraint \
exclude=statistics \
include=table:"LIKE 'EMP%'" \
job_name=FILTERED_EXPORT
subquery_export.sh
#!/bin/bash
# Export with complex WHERE conditions
expdp hr/password \
directory=dp_dir \
dumpfile=active_employees.dmp \
tables=employees \
query=employees:"WHERE employee_id IN (
SELECT employee_id FROM employees e
WHERE e.department_id IN (
SELECT department_id FROM departments
WHERE location_id IN (1700,1800)
) AND e.hire_date >= DATE'2020-01-01'
)" \
job_name=ACTIVE_EMP_EXPORT
optimal_parallel_export.sh
#!/bin/bash
# Configure parallel export for maximum performance
# Calculate optimal parallel degree
PARALLEL_DEGREE=$(nproc)
expdp system/password \
directory=dp_dir \
dumpfile=optimized_%U.dmp \
logfile=optimized_export.log \
full=y \
parallel=$PARALLEL_DEGREE \
cluster=n \
compression=all \
metrics=y \
logtime=all \
job_name=OPTIMIZED_EXPORT
large_table_export.sh
#!/bin/bash
# Export large tables efficiently
expdp system/password \
directory=dp_dir \
dumpfile=large_table_%U.dmp \
tables=transactions.large_table \
parallel=16 \
compression=all \
cluster=n \
access_method=direct_path \
job_name=LARGE_TABLE_EXPORT
-- monitor_export.sql
-- Monitor running Data Pump export jobs
-- Job overview
SELECT job_name, operation, job_mode, state,
degree, attached_sessions,
TO_CHAR(start_time, 'DD-MON-YY HH24:MI:SS') start_time
FROM dba_datapump_jobs
WHERE operation = 'EXPORT'
ORDER BY start_time DESC;
-- Detailed progress
SELECT sid, serial#, username, opname,
target_desc, sofar, totalwork,
ROUND(sofar/totalwork*100,2) "% Complete",
time_remaining/60 "Minutes Remaining"
FROM v$session_longops
WHERE opname LIKE 'Data Pump%'
AND sofar <> totalwork
ORDER BY start_time;
-- Worker process status
SELECT owner_name, job_name, operation, job_mode,
state, degree, attached_sessions,
datapump_sessions
FROM dba_datapump_jobs
WHERE state = 'EXECUTING';
control_export_job.sh
#!/bin/bash
# Attach to and control running export job
# Attach to running job
expdp system/password attach=FULL_DB_EXPORT
# Interactive commands:
# Export> status # Check current status
# Export> parallel=8 # Change parallel degree
# Export> add_file=extra_01.dmp size=5G # Add dump file
# Export> stop_job=immediate # Stop job immediately
# Export> start_job # Resume stopped job
# Export> kill_job # Terminate and remove job
encrypted_export.sh
#!/bin/bash
# Export with multiple encryption options
expdp system/password \
directory=dp_dir \
dumpfile=encrypted_data.dmp \
schemas=sensitive_data \
encryption=all \
encryption_algorithm=AES256 \
encryption_mode=password \
encryption_password=ComplexPass123! \
job_name=ENCRYPTED_EXPORT
Terminal window
# Create parameter file with sensitive information
cat > export_params.par << EOF
userid=system/password
directory=dp_dir
dumpfile=secure_export_%U.dmp
logfile=secure_export.log
schemas=hr,finance
encryption_password=SecurePassword123
compression=all
parallel=4
exclude=statistics
EOF
# Set restrictive permissions
chmod 600 export_params.par
# Run export with parameter file
expdp parfile=export_params.par
automated_daily_export.sh
#!/bin/bash
# Automated export with retention management
# Configuration
EXPORT_DIR="/u01/exports"
RETENTION_DAYS=7
SCHEMAS="hr,finance,sales"
LOG_DIR="/u01/logs"
# Set date variables
DATE=$(date +%Y%m%d)
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
# Create log file
LOG_FILE="${LOG_DIR}/export_${TIMESTAMP}.log"
exec 1> >(tee -a "$LOG_FILE")
exec 2>&1
echo "Starting automated export at $(date)"
# Export schemas
expdp system/password \
directory=dp_dir \
dumpfile=daily_${DATE}_%U.dmp \
logfile=daily_${DATE}.log \
schemas=$SCHEMAS \
compression=all \
parallel=4 \
job_name=DAILY_EXPORT_${DATE}
# Check export status
if [ $? -eq 0 ]; then
echo "Export completed successfully"
# Clean up old exports
find $EXPORT_DIR -name "daily_*.dmp" -mtime +$RETENTION_DAYS -delete
find $EXPORT_DIR -name "daily_*.log" -mtime +$RETENTION_DAYS -delete
# Send success notification
echo "Daily export completed successfully" | \
mail -s "Export Success - ${DATE}" $EMAIL
else
echo "Export failed with error"
# Send failure notification
tail -100 $LOG_FILE | \
mail -s "Export Failed - ${DATE}" $EMAIL
fi
schema_refresh_export.sh
#!/bin/bash
# Export for test environment refresh
# Parameters
SOURCE_SCHEMA=$1
TARGET_ENV=$2
SCRAMBLE_DATA=${3:-false}
if [ -z "$SOURCE_SCHEMA" ] || [ -z "$TARGET_ENV" ]; then
echo "Usage: $0 <schema_name> <target_env> [scramble_data]"
exit 1
fi
DATE=$(date +%Y%m%d_%H%M%S)
# Base export
expdp system/password \
directory=dp_dir \
dumpfile=${SOURCE_SCHEMA}_${TARGET_ENV}_${DATE}.dmp \
logfile=${SOURCE_SCHEMA}_${TARGET_ENV}_${DATE}.log \
schemas=$SOURCE_SCHEMA \
compression=all \
exclude=statistics \
job_name=REFRESH_${SOURCE_SCHEMA}_${DATE}
# If data scrambling required
if [ "$SCRAMBLE_DATA" = "true" ]; then
echo "Note: Data scrambling must be performed during import"
echo "Use REMAP_DATA parameter during import for sensitive columns"
fi
echo "Export completed: ${SOURCE_SCHEMA}_${TARGET_ENV}_${DATE}.dmp"
-- pre_export_checks.sql
-- Run before major exports
-- Check database size
SELECT SUM(bytes)/1024/1024/1024 size_gb
FROM dba_segments;
-- Check directory space
!df -h /u01/exports
-- Check running jobs
SELECT job_name, state FROM dba_datapump_jobs;
-- Verify directory permissions
SELECT directory_name, directory_path
FROM dba_directories
WHERE directory_name = 'DP_DIR';
-- Check user privileges
SELECT * FROM session_privs
WHERE privilege LIKE '%DATAPUMP%';
validate_export.sh
#!/bin/bash
# Validate export completion and integrity
DUMPFILE=$1
LOGFILE=$2
if [ -z "$DUMPFILE" ] || [ -z "$LOGFILE" ]; then
echo "Usage: $0 <dumpfile> <logfile>"
exit 1
fi
# Check if files exist
if [ ! -f "$DUMPFILE" ] || [ ! -f "$LOGFILE" ]; then
echo "ERROR: Export files not found"
exit 1
fi
# Check for errors in log
ERROR_COUNT=$(grep -c "ORA-" "$LOGFILE")
if [ $ERROR_COUNT -gt 0 ]; then
echo "WARNING: Found $ERROR_COUNT errors in log file"
grep "ORA-" "$LOGFILE"
fi
# Check completion status
if grep -q "successfully completed" "$LOGFILE"; then
echo "Export completed successfully"
# Get export statistics
grep -A5 "Master table" "$LOGFILE"
grep "exported" "$LOGFILE" | tail -10
else
echo "ERROR: Export did not complete successfully"
exit 1
fi
ErrorCauseSolution
ORA-39001Invalid argumentCheck parameter syntax
ORA-39002Invalid operationVerify privileges
ORA-39070Unable to open fileCheck directory permissions
ORA-39095Dump file spaceIncrease filesize or add files
ORA-39126Worker process errorCheck alert log
Terminal window
# Enable detailed tracing
expdp system/password \
directory=dp_dir \
dumpfile=debug_export.dmp \
schemas=hr \
trace=480300 \
metrics=y \
logtime=all \
job_name=DEBUG_EXPORT

This comprehensive guide provides everything needed for successful Data Pump export operations in production environments.