Skip to content

SQL*Loader Examples & Templates

This page provides practical, ready-to-use SQL*Loader examples and control file templates for common data loading scenarios.

Data File (employees.csv):

1001,John,Doe,2023-01-15,5000,10
1002,Jane,Smith,2023-02-01,4500,20
1003,Bob,Johnson,2023-03-01,3500,10

Control File (load_employees.ctl):

LOAD DATA
INFILE 'employees.csv'
INTO TABLE employees
FIELDS TERMINATED BY ','
(
employee_id INTEGER EXTERNAL,
first_name CHAR(50),
last_name CHAR(50),
hire_date DATE "YYYY-MM-DD",
salary INTEGER EXTERNAL,
department_id INTEGER EXTERNAL
)

Command:

Terminal window
sqlldr hr/password control=load_employees.ctl log=load_employees.log

Data File (products.txt):

P001 Laptop Computer Electronics 999.99 50
P002 Office Chair Furniture 149.99 25
P003 Desk Lamp Furniture 39.99 100

Control File (load_products.ctl):

LOAD DATA
INFILE 'products.txt'
INTO TABLE products
FIELDS TERMINATED BY X'09' -- Tab character
(
product_code CHAR(10),
product_name CHAR(100),
category CHAR(50),
unit_price DECIMAL EXTERNAL,
quantity_on_hand INTEGER EXTERNAL
)

Data File (customers.dat):

001John Doe 123 Main St Springfield IL60601
002Jane Smith 456 Oak Ave Chicago IL60602
003Bob Johnson 789 Pine Rd Peoria IL61601

Control File (load_customers.ctl):

LOAD DATA
INFILE 'customers.dat'
INTO TABLE customers
(
customer_id POSITION(1:3) INTEGER EXTERNAL,
first_name POSITION(4:13) CHAR,
last_name POSITION(14:23) CHAR,
address POSITION(24:38) CHAR,
city POSITION(39:50) CHAR,
state POSITION(51:52) CHAR,
zip_code POSITION(53:57) CHAR
)

Data File (employee_import.csv):

john.doe,SALES,50000,active,2023-01-15,[email protected]
jane.smith,MARKETING,45000,inactive,2023-02-01,[email protected]
bob.johnson,IT,55000,active,2023-03-01,[email protected]

Control File (transform_employees.ctl):

LOAD DATA
INFILE 'employee_import.csv'
INTO TABLE employees
FIELDS TERMINATED BY ','
(
username CHAR(50),
first_name "INITCAP(SUBSTR(:username, 1, INSTR(:username, '.') - 1))",
last_name "INITCAP(SUBSTR(:username, INSTR(:username, '.') + 1))",
department "UPPER(:department)",
department_id "DECODE(UPPER(:department), 'SALES', 10, 'MARKETING', 20, 'IT', 30, 99)",
salary INTEGER EXTERNAL,
status "UPPER(:status)",
is_active "DECODE(UPPER(:status), 'ACTIVE', 'Y', 'N')",
hire_date DATE "YYYY-MM-DD",
email "LOWER(:email)",
created_date SYSDATE,
created_by CONSTANT 'SYSTEM'
)

Control File (validate_data.ctl):

LOAD DATA
INFILE 'messy_data.csv'
BADFILE 'messy_data.bad'
INTO TABLE clean_contacts
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
TRAILING NULLCOLS
(
contact_id "CASE WHEN REGEXP_LIKE(:contact_id, '^[0-9]+$')
THEN TO_NUMBER(:contact_id)
ELSE NULL END",
name "CASE WHEN LENGTH(TRIM(:name)) > 0
THEN INITCAP(TRIM(:name))
ELSE NULL END",
email "CASE WHEN REGEXP_LIKE(LOWER(TRIM(:email)),
'^[a-z0-9._%+-]+@[a-z0-9.-]+\.[a-z]{2,}$')
THEN LOWER(TRIM(:email))
ELSE NULL END",
phone "REGEXP_REPLACE(:phone, '[^0-9]', '')",
state "CASE WHEN LENGTH(:state) = 2
THEN UPPER(:state)
ELSE NULL END",
zip_code "CASE WHEN REGEXP_LIKE(:zip_code, '^[0-9]{5}$')
THEN :zip_code
ELSE NULL END"
)

Data File (multi_record.csv):

E,1001,John,Doe,SALES
D,10,Sales Department,Building A
E,1002,Jane,Smith,MARKETING
D,20,Marketing Department,Building B
E,1003,Bob,Johnson,IT
D,30,IT Department,Building C

Control File (multi_table_load.ctl):

LOAD DATA
INFILE 'multi_record.csv'
-- Load employees (records starting with 'E')
INTO TABLE employees
WHEN (1:1) = 'E'
FIELDS TERMINATED BY ','
(
record_type FILLER,
employee_id INTEGER EXTERNAL,
first_name CHAR(50),
last_name CHAR(50),
department_name CHAR(50)
)
-- Load departments (records starting with 'D')
INTO TABLE departments
WHEN (1:1) = 'D'
FIELDS TERMINATED BY ','
(
record_type FILLER,
department_id INTEGER EXTERNAL,
department_name CHAR(100),
location CHAR(100)
)

Data File (hierarchy.csv):

CAT,1,Electronics,
CAT,2,Computers,1
CAT,3,Laptops,2
PROD,P001,Dell Laptop,3,999.99
PROD,P002,HP Laptop,3,899.99
CAT,4,Furniture,
PROD,P003,Office Chair,4,149.99

Control File (hierarchy_load.ctl):

LOAD DATA
INFILE 'hierarchy.csv'
-- Load categories
INTO TABLE categories
WHEN (record_type = 'CAT')
FIELDS TERMINATED BY ','
(
record_type FILLER,
category_id INTEGER EXTERNAL,
category_name CHAR(100),
parent_category_id INTEGER EXTERNAL NULLIF parent_category_id = BLANKS
)
-- Load products
INTO TABLE products
WHEN (record_type = 'PROD')
FIELDS TERMINATED BY ','
(
record_type FILLER,
product_code CHAR(20),
product_name CHAR(200),
category_id INTEGER EXTERNAL,
unit_price DECIMAL EXTERNAL
)

Data File (dates.csv):

1,2023-01-15,15-JAN-23,01/15/2023 14:30:00,2023-01-15T14:30:00
2,2023-02-28,28-FEB-23,02/28/2023 09:15:30,2023-02-28T09:15:30
3,2023-03-31,31-MAR-23,03/31/2023 16:45:45,2023-03-31T16:45:45

Control File (date_formats.ctl):

LOAD DATA
INFILE 'dates.csv'
INTO TABLE date_examples
FIELDS TERMINATED BY ','
(
id INTEGER EXTERNAL,
iso_date DATE "YYYY-MM-DD",
oracle_date DATE "DD-MON-RR",
us_datetime DATE "MM/DD/YYYY HH24:MI:SS",
iso_datetime DATE "YYYY-MM-DD\"T\"HH24:MI:SS",
current_timestamp TIMESTAMP WITH TIME ZONE EXTERNAL,
load_date SYSDATE
)

Control File (null_dates.ctl):

LOAD DATA
INFILE 'employee_dates.csv'
INTO TABLE employee_dates
FIELDS TERMINATED BY ','
TRAILING NULLCOLS
(
employee_id INTEGER EXTERNAL,
hire_date DATE "YYYY-MM-DD" NULLIF hire_date = BLANKS,
termination_date DATE "YYYY-MM-DD" NULLIF termination_date = 'NULL',
birth_date DATE "MM/DD/YYYY" DEFAULTIF birth_date = BLANKS,
review_date DATE "YYYY-MM-DD" "CASE WHEN :review_date = '' THEN NULL ELSE TO_DATE(:review_date, 'YYYY-MM-DD') END"
)

Control File (error_handling.ctl):

LOAD DATA
INFILE 'problem_data.csv'
BADFILE 'problem_data.bad'
DISCARDFILE 'problem_data.dis'
APPEND INTO TABLE robust_table
ERRORS=500 -- Allow up to 500 bad records
ROWS=1000 -- Commit every 1000 rows
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
TRAILING NULLCOLS
(
id "CASE WHEN REGEXP_LIKE(:id, '^[0-9]+$') AND TO_NUMBER(:id) > 0
THEN TO_NUMBER(:id)
ELSE NULL END",
name "CASE WHEN LENGTH(TRIM(:name)) BETWEEN 1 AND 100
THEN TRIM(:name)
ELSE NULL END",
email "CASE WHEN REGEXP_LIKE(:email, '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$')
THEN LOWER(:email)
ELSE NULL END",
amount "CASE WHEN REGEXP_LIKE(:amount, '^[0-9.]+$') AND TO_NUMBER(:amount) >= 0
THEN TO_NUMBER(:amount)
ELSE 0 END",
created_date "CASE WHEN :created_date IS NOT NULL AND :created_date != ''
THEN TO_DATE(:created_date, 'YYYY-MM-DD')
ELSE SYSDATE END"
)

Control File (high_performance.ctl):

OPTIONS (
DIRECT=TRUE,
PARALLEL=TRUE,
SKIP=1, -- Skip header row
LOAD=1000000, -- Load first 1M rows only
ERRORS=1000, -- Allow errors
ROWS=50000, -- Commit frequency
BINDSIZE=2097152, -- 2MB bind buffer
READSIZE=2097152 -- 2MB read buffer
)
LOAD DATA
INFILE 'large_file.csv'
APPEND INTO TABLE large_table
FIELDS TERMINATED BY ','
(
id INTEGER EXTERNAL,
description CHAR(200),
amount DECIMAL EXTERNAL,
status CHAR(20),
created_date DATE "YYYY-MM-DD HH24:MI:SS"
)

Bash Script (parallel_load.sh):

#!/bin/bash
# parallel_load.sh - Load multiple files in parallel
FILES=(file1.csv file2.csv file3.csv file4.csv)
# Create control file template
create_control_file() {
local file=$1
local control_file="load_${file%.csv}.ctl"
cat > $control_file << EOF
LOAD DATA
INFILE '$file'
APPEND INTO TABLE parallel_data
FIELDS TERMINATED BY ','
(
file_source CONSTANT '$file',
id INTEGER EXTERNAL,
data1 CHAR(100),
data2 CHAR(100),
amount DECIMAL EXTERNAL,
load_timestamp SYSDATE
)
EOF
echo $control_file
}
# Load files in parallel
for file in "${FILES[@]}"; do
control_file=$(create_control_file $file)
sqlldr hr/password control=$control_file \
log=load_${file%.csv}.log \
direct=true parallel=true &
done
# Wait for all processes to complete
wait
echo "All files loaded in parallel"

Data File (documents.csv):

1,Manual,"/docs/user_manual.txt"
2,Specification,"/docs/tech_spec.txt"
3,README,"/docs/readme.txt"

Control File (lob_load.ctl):

LOAD DATA
INFILE 'documents.csv'
INTO TABLE document_store
FIELDS TERMINATED BY ','
(
doc_id INTEGER EXTERNAL,
doc_type CHAR(50),
file_path FILLER,
content LOBFILE(file_path) TERMINATED BY EOF CHAR(1000000)
)

Control File (binary_load.ctl):

LOAD DATA
INFILE 'image_metadata.csv'
INTO TABLE image_store
FIELDS TERMINATED BY ','
(
image_id INTEGER EXTERNAL,
filename CHAR(255),
image_path FILLER,
image_data LOBFILE(image_path) TERMINATED BY EOF,
upload_date SYSDATE
)

Control File (financial_import.ctl):

LOAD DATA
INFILE 'transactions.csv'
INTO TABLE financial_transactions
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
TRAILING NULLCOLS
(
transaction_id "TO_NUMBER(:transaction_id)",
account_number "LPAD(:account_number, 12, '0')",
transaction_date DATE "MM/DD/YYYY",
transaction_type "UPPER(:transaction_type)",
amount "TO_NUMBER(REPLACE(:amount, '$', ''))",
description "SUBSTR(:description, 1, 500)",
reference_number CHAR(50),
posting_date "NVL(TO_DATE(:posting_date, 'MM/DD/YYYY'), :transaction_date)",
balance_after "TO_NUMBER(REPLACE(:balance_after, '$', ''))",
created_by CONSTANT 'BATCH_IMPORT',
created_date SYSDATE
)

Control File (customer_import.ctl):

LOAD DATA
INFILE 'customer_data.csv'
BADFILE 'customer_errors.bad'
INTO TABLE customers
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
TRAILING NULLCOLS
(
customer_id "TO_NUMBER(:customer_id)",
company_name "INITCAP(:company_name)",
contact_first_name "INITCAP(:contact_first_name)",
contact_last_name "INITCAP(:contact_last_name)",
full_contact_name ":contact_first_name || ' ' || :contact_last_name",
email "LOWER(TRIM(:email))",
phone "REGEXP_REPLACE(:phone, '[^0-9]', '')",
address_line1 "INITCAP(:address_line1)",
address_line2 "INITCAP(:address_line2)",
city "INITCAP(:city)",
state "UPPER(:state)",
postal_code CHAR(20),
country "UPPER(NVL(:country, 'USA'))",
credit_limit "NVL(TO_NUMBER(:credit_limit), 0)",
customer_since DATE "MM/DD/YYYY" NULLIF customer_since = BLANKS,
status "NVL(UPPER(:status), 'ACTIVE')",
sales_rep_id "NVL(TO_NUMBER(:sales_rep_id), 1)",
created_date SYSDATE,
modified_date SYSDATE
)

Control File (product_catalog.ctl):

LOAD DATA
INFILE 'product_catalog.csv'
INTO TABLE products
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
TRAILING NULLCOLS
(
sku CHAR(50),
product_name CHAR(200),
category_code CHAR(20),
subcategory_code CHAR(20),
brand CHAR(100),
model CHAR(100),
description CHAR(4000),
unit_cost "TO_NUMBER(:unit_cost, '999999.99')",
list_price "TO_NUMBER(:list_price, '999999.99')",
sale_price "TO_NUMBER(:sale_price, '999999.99')",
margin_percent "ROUND((:list_price - :unit_cost) / :list_price * 100, 2)",
weight "TO_NUMBER(:weight, '9999.99')",
dimensions CHAR(100),
color CHAR(50),
size_code CHAR(20),
inventory_flag "DECODE(UPPER(:inventory_flag), 'Y', 'Y', 'YES', 'Y', 'TRUE', 'Y', 'N')",
active_flag "DECODE(UPPER(:active_flag), 'N', 'N', 'NO', 'N', 'FALSE', 'N', 'Y')",
supplier_id "TO_NUMBER(:supplier_id)",
lead_time_days "NVL(TO_NUMBER(:lead_time_days), 0)",
created_date SYSDATE,
last_updated SYSDATE
)

Bash Script (generate_control.sh):

#!/bin/bash
# generate_control.sh - Generate control file from CSV header
CSV_FILE=$1
TABLE_NAME=$2
CONTROL_FILE="${TABLE_NAME}.ctl"
if [ -z "$CSV_FILE" ] || [ -z "$TABLE_NAME" ]; then
echo "Usage: $0 <csv_file> <table_name>"
exit 1
fi
# Read first line (header) from CSV
HEADER=$(head -n1 "$CSV_FILE")
# Generate control file
cat > $CONTROL_FILE << EOF
LOAD DATA
INFILE '$CSV_FILE'
INTO TABLE $TABLE_NAME
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
TRAILING NULLCOLS
(
EOF
# Convert header to column definitions
echo "$HEADER" | tr ',' '\n' | while read -r column; do
# Clean column name
clean_column=$(echo "$column" | tr -d '"' | tr ' ' '_' | tr '[:upper:]' '[:lower:]')
echo " $clean_column CHAR(255)," >> $CONTROL_FILE
done
# Remove last comma and close parenthesis
sed -i '$s/,$//' $CONTROL_FILE
echo ")" >> $CONTROL_FILE
echo "Control file generated: $CONTROL_FILE"

Bash Script (validate_load.sh):

#!/bin/bash
# validate_load.sh - Validate SQL*Loader results
LOG_FILE=$1
if [ -z "$LOG_FILE" ]; then
echo "Usage: $0 <sqlldr_log_file>"
exit 1
fi
echo "SQL*Loader Validation Report"
echo "============================"
echo "Log File: $LOG_FILE"
echo "Date: $(date)"
echo
# Extract load statistics
LOADED=$(grep "successfully loaded" $LOG_FILE | head -1 | awk '{print $1}')
REJECTED=$(grep "rejected" $LOG_FILE | head -1 | awk '{print $1}')
DISCARDED=$(grep "discarded" $LOG_FILE | head -1 | awk '{print $1}')
echo "Load Statistics:"
echo " Records Loaded: ${LOADED:-0}"
echo " Records Rejected: ${REJECTED:-0}"
echo " Records Discarded: ${DISCARDED:-0}"
echo
# Check for errors
ERROR_COUNT=$(grep -c "ORA-" $LOG_FILE)
if [ $ERROR_COUNT -gt 0 ]; then
echo "Errors Found ($ERROR_COUNT):"
grep "ORA-" $LOG_FILE | head -10
echo
fi
# Performance metrics
ELAPSED=$(grep "Elapsed time" $LOG_FILE | tail -1)
if [ -n "$ELAPSED" ]; then
echo "Performance:"
echo " $ELAPSED"
if [ -n "$LOADED" ] && [ "$LOADED" -gt 0 ]; then
echo " Rate: Approximately $((LOADED/60)) records/minute"
fi
fi
# Summary
if [ "${REJECTED:-0}" -eq 0 ] && [ $ERROR_COUNT -eq 0 ]; then
echo "Status: SUCCESS - No errors detected"
exit 0
else
echo "Status: WARNING - Errors or rejections detected"
exit 1
fi

These examples provide comprehensive templates for most SQL*Loader scenarios you’ll encounter in production environments.