Skip to content

ORA-00001 Unique Constraint Violated - Duplicate Key Prevention

Error Text: ORA-00001: unique constraint ([schema].[constraint_name]) violated

This error occurs when an INSERT or UPDATE operation attempts to create duplicate values in a column or combination of columns that have a unique constraint (including primary keys). It’s a data integrity error that prevents the creation of duplicate records.

Unique Constraints
├── Primary Key (PK) - Unique + Not Null
├── Unique Key (UK) - Unique, allows NULL
├── Composite Unique - Multiple columns together
└── Function-Based Unique - Based on expression
  • Primary key violations - Duplicate IDs during INSERT
  • Natural key conflicts - Business keys like email, SSN
  • Composite key violations - Multiple column combinations
  • Sequence gaps - Manual ID assignment conflicts
-- Find constraint details from error message
SELECT
c.owner,
c.constraint_name,
c.table_name,
c.constraint_type,
c.status,
c.validated,
LISTAGG(cc.column_name, ', ') WITHIN GROUP (ORDER BY cc.position) as columns
FROM dba_constraints c, dba_cons_columns cc
WHERE c.constraint_name = 'CONSTRAINT_NAME_FROM_ERROR' -- Replace with actual constraint name
AND c.owner = cc.owner
AND c.constraint_name = cc.constraint_name
GROUP BY c.owner, c.constraint_name, c.table_name, c.constraint_type, c.status, c.validated;
-- Alternative: Search by table name if constraint name is unclear
SELECT
c.owner,
c.constraint_name,
c.table_name,
c.constraint_type,
LISTAGG(cc.column_name, ', ') WITHIN GROUP (ORDER BY cc.position) as columns,
c.status
FROM dba_constraints c, dba_cons_columns cc
WHERE c.table_name = 'YOUR_TABLE_NAME' -- Replace with actual table
AND c.constraint_type IN ('P', 'U') -- Primary key or Unique
AND c.owner = cc.owner
AND c.constraint_name = cc.constraint_name
GROUP BY c.owner, c.constraint_name, c.table_name, c.constraint_type, c.status
ORDER BY c.constraint_type, c.constraint_name;
-- Find duplicate values for single column constraint
SELECT
column_name,
COUNT(*) as duplicate_count
FROM schema.table_name
WHERE column_name IS NOT NULL
GROUP BY column_name
HAVING COUNT(*) > 1
ORDER BY duplicate_count DESC;
-- For composite unique constraints (adjust column names)
SELECT
col1, col2, col3,
COUNT(*) as duplicate_count,
MIN(rowid) as keep_rowid,
LISTAGG(rowid, ', ') WITHIN GROUP (ORDER BY rowid) as all_rowids
FROM schema.table_name
GROUP BY col1, col2, col3
HAVING COUNT(*) > 1
ORDER BY duplicate_count DESC;
-- Find the specific values that would cause the constraint violation
-- (Use the values from your failed INSERT/UPDATE)
SELECT rowid, col1, col2, col3
FROM schema.table_name
WHERE col1 = 'problem_value_1'
AND col2 = 'problem_value_2'; -- Adjust based on your constraint columns
-- Check for near-duplicates (case, whitespace issues)
SELECT
original_column,
UPPER(TRIM(original_column)) as normalized,
COUNT(*) as count
FROM schema.table_name
GROUP BY original_column, UPPER(TRIM(original_column))
HAVING COUNT(*) > 1
ORDER BY count DESC;
-- Check data insertion patterns
SELECT
TO_CHAR(created_date, 'YYYY-MM-DD HH24') as hour,
COUNT(*) as inserts,
COUNT(DISTINCT unique_column) as unique_values,
COUNT(*) - COUNT(DISTINCT unique_column) as potential_duplicates
FROM schema.table_name
WHERE created_date > SYSDATE - 7 -- Last week
GROUP BY TO_CHAR(created_date, 'YYYY-MM-DD HH24')
HAVING COUNT(*) - COUNT(DISTINCT unique_column) > 0
ORDER BY hour DESC;
-- Check for sequence vs manual ID conflicts (for primary keys)
SELECT
MAX(id_column) as max_existing_id,
sequence_name.NEXTVAL as next_sequence_value,
CASE
WHEN MAX(id_column) >= sequence_name.NEXTVAL THEN 'CONFLICT: Sequence behind data'
ELSE 'OK: Sequence ahead of data'
END as status
FROM schema.table_name, (SELECT sequence_name.NEXTVAL FROM dual);
-- Find recent failed operations from logs (if available)
SELECT
timestamp,
username,
sql_text,
error_code
FROM dba_fga_audit_trail
WHERE error_code = 1 -- ORA-00001
AND timestamp > SYSDATE - 1
ORDER BY timestamp DESC;

Solution 1: Handle Single Duplicate Record

Section titled “Solution 1: Handle Single Duplicate Record”
-- For single column duplicates - keep the first occurrence
DELETE FROM schema.table_name
WHERE rowid NOT IN (
SELECT MIN(rowid)
FROM schema.table_name
GROUP BY unique_column
);
-- For composite duplicates - keep the one with earliest rowid
DELETE FROM schema.table_name
WHERE rowid NOT IN (
SELECT MIN(rowid)
FROM schema.table_name
GROUP BY col1, col2, col3 -- Adjust to match constraint columns
);
-- Verify duplicates are removed
SELECT col1, col2, col3, COUNT(*)
FROM schema.table_name
GROUP BY col1, col2, col3
HAVING COUNT(*) > 1;
-- Use MERGE statement to handle insert or update
MERGE INTO schema.table_name target
USING (
SELECT 'new_value1' as col1, 'new_value2' as col2, 'data' as data_col FROM dual
) source
ON (target.col1 = source.col1 AND target.col2 = source.col2)
WHEN MATCHED THEN
UPDATE SET data_col = source.data_col, last_updated = SYSDATE
WHEN NOT MATCHED THEN
INSERT (col1, col2, data_col, created_date)
VALUES (source.col1, source.col2, source.data_col, SYSDATE);
-- Alternative: Update if exists, insert if not
DECLARE
v_count NUMBER;
BEGIN
-- Check if record exists
SELECT COUNT(*) INTO v_count
FROM schema.table_name
WHERE unique_column = 'your_value';
IF v_count > 0 THEN
-- Update existing record
UPDATE schema.table_name
SET data_column = 'new_data', last_updated = SYSDATE
WHERE unique_column = 'your_value';
ELSE
-- Insert new record
INSERT INTO schema.table_name (unique_column, data_column, created_date)
VALUES ('your_value', 'new_data', SYSDATE);
END IF;
COMMIT;
END;
/
-- Check current sequence value vs max data value
SELECT
MAX(id_column) as max_id,
sequence_name.NEXTVAL as current_seq_val
FROM schema.table_name;
-- Reset sequence to be ahead of existing data
DECLARE
v_max_id NUMBER;
v_increment NUMBER;
BEGIN
-- Get maximum existing ID
SELECT NVL(MAX(id_column), 0) INTO v_max_id FROM schema.table_name;
-- Calculate how much to increment sequence
v_increment := v_max_id - sequence_name.CURRVAL + 100; -- Add buffer
-- Increment sequence
IF v_increment > 0 THEN
EXECUTE IMMEDIATE 'ALTER SEQUENCE sequence_name INCREMENT BY ' || v_increment;
SELECT sequence_name.NEXTVAL INTO v_increment FROM dual; -- Advance sequence
EXECUTE IMMEDIATE 'ALTER SEQUENCE sequence_name INCREMENT BY 1'; -- Reset increment
END IF;
END;
/
-- Alternative: Drop and recreate sequence
-- DROP SEQUENCE sequence_name;
-- CREATE SEQUENCE sequence_name START WITH max_id_value + 1;

Solution 3: Temporarily Disable Constraint

Section titled “Solution 3: Temporarily Disable Constraint”

Disable for Data Loading (Use with Caution)

Section titled “Disable for Data Loading (Use with Caution)”
-- Disable constraint (allows duplicates temporarily)
ALTER TABLE schema.table_name DISABLE CONSTRAINT constraint_name;
-- Perform your data operation
-- INSERT INTO schema.table_name VALUES (...);
-- Clean up duplicates before re-enabling
-- [Execute duplicate removal queries from above]
-- Re-enable constraint
ALTER TABLE schema.table_name ENABLE CONSTRAINT constraint_name;
-- Validate constraint is working
ALTER TABLE schema.table_name MODIFY CONSTRAINT constraint_name VALIDATE;
// Java example with proper duplicate handling
public class CustomerService {
public void createCustomer(Customer customer) throws DuplicateCustomerException {
try {
// First check if customer exists
if (customerExists(customer.getEmail())) {
throw new DuplicateCustomerException("Customer with email " +
customer.getEmail() + " already exists");
}
// Proceed with insert
insertCustomer(customer);
} catch (SQLException e) {
if (e.getErrorCode() == 1) { // ORA-00001
// Handle unique constraint violation
if (e.getMessage().contains("UK_CUSTOMER_EMAIL")) {
throw new DuplicateCustomerException("Email address already in use", e);
} else if (e.getMessage().contains("PK_CUSTOMER_ID")) {
// Sequence issue - retry with new ID
customer.setId(generateNewId());
insertCustomer(customer);
} else {
throw new DuplicateCustomerException("Duplicate customer data", e);
}
} else {
throw e; // Re-throw other SQL exceptions
}
}
}
private boolean customerExists(String email) {
String sql = "SELECT COUNT(*) FROM customers WHERE email = ?";
try (PreparedStatement pstmt = connection.prepareStatement(sql)) {
pstmt.setString(1, email);
try (ResultSet rs = pstmt.executeQuery()) {
rs.next();
return rs.getInt(1) > 0;
}
} catch (SQLException e) {
throw new RuntimeException("Error checking customer existence", e);
}
}
}
-- Create function to check for duplicates before insert
CREATE OR REPLACE FUNCTION check_duplicate_customer(
p_email VARCHAR2,
p_customer_id NUMBER DEFAULT NULL
) RETURN BOOLEAN AS
v_count NUMBER;
BEGIN
SELECT COUNT(*) INTO v_count
FROM customers
WHERE email = p_email
AND (p_customer_id IS NULL OR customer_id != p_customer_id);
RETURN v_count > 0;
END;
/
-- Create trigger for duplicate prevention
CREATE OR REPLACE TRIGGER trg_prevent_duplicate_email
BEFORE INSERT OR UPDATE ON customers
FOR EACH ROW
BEGIN
IF check_duplicate_customer(:NEW.email, :NEW.customer_id) THEN
RAISE_APPLICATION_ERROR(-20001,
'Email address ' || :NEW.email || ' already exists for another customer');
END IF;
END;
/

2. Implement Duplicate Detection and Cleanup

Section titled “2. Implement Duplicate Detection and Cleanup”
-- Create duplicate monitoring table
CREATE TABLE duplicate_violations (
violation_id NUMBER PRIMARY KEY,
table_name VARCHAR2(30),
constraint_name VARCHAR2(30),
duplicate_columns VARCHAR2(500),
duplicate_values VARCHAR2(1000),
record_count NUMBER,
detected_date TIMESTAMP DEFAULT SYSTIMESTAMP,
cleaned_date TIMESTAMP,
status VARCHAR2(20) DEFAULT 'DETECTED'
);
-- Create sequence for violation tracking
CREATE SEQUENCE duplicate_violation_seq;
-- Procedure to detect duplicates across critical tables
CREATE OR REPLACE PROCEDURE detect_duplicates AS
TYPE constraint_rec IS RECORD (
owner VARCHAR2(30),
table_name VARCHAR2(30),
constraint_name VARCHAR2(30),
columns VARCHAR2(1000)
);
TYPE constraint_tab IS TABLE OF constraint_rec;
v_constraints constraint_tab;
v_sql VARCHAR2(4000);
v_count NUMBER;
BEGIN
-- Get all unique constraints on important tables
SELECT owner, table_name, constraint_name,
LISTAGG(column_name, ', ') WITHIN GROUP (ORDER BY position) as columns
BULK COLLECT INTO v_constraints
FROM (
SELECT c.owner, c.table_name, c.constraint_name, cc.column_name, cc.position
FROM dba_constraints c, dba_cons_columns cc
WHERE c.constraint_type IN ('P', 'U')
AND c.status = 'ENABLED'
AND c.owner = cc.owner
AND c.constraint_name = cc.constraint_name
AND c.table_name IN ('CUSTOMERS', 'ORDERS', 'PRODUCTS') -- Adjust table list
)
GROUP BY owner, table_name, constraint_name;
-- Check each constraint for violations
FOR i IN 1..v_constraints.COUNT LOOP
v_sql := 'SELECT COUNT(*) FROM (SELECT ' || v_constraints(i).columns ||
', COUNT(*) FROM ' || v_constraints(i).owner || '.' || v_constraints(i).table_name ||
' GROUP BY ' || v_constraints(i).columns || ' HAVING COUNT(*) > 1)';
EXECUTE IMMEDIATE v_sql INTO v_count;
IF v_count > 0 THEN
-- Log duplicate violation
INSERT INTO duplicate_violations (
violation_id, table_name, constraint_name, duplicate_columns, record_count
) VALUES (
duplicate_violation_seq.NEXTVAL, v_constraints(i).table_name,
v_constraints(i).constraint_name, v_constraints(i).columns, v_count
);
END IF;
END LOOP;
COMMIT;
END;
/
-- Schedule duplicate detection
BEGIN
DBMS_SCHEDULER.CREATE_JOB(
job_name => 'DETECT_DUPLICATES',
job_type => 'STORED_PROCEDURE',
job_action => 'detect_duplicates',
repeat_interval => 'FREQ=DAILY; BYHOUR=2', -- Run at 2 AM daily
enabled => TRUE
);
END;
/
-- Create function to standardize data before insert
CREATE OR REPLACE FUNCTION standardize_email(p_email VARCHAR2) RETURN VARCHAR2 AS
BEGIN
RETURN LOWER(TRIM(p_email));
END;
/
CREATE OR REPLACE FUNCTION standardize_phone(p_phone VARCHAR2) RETURN VARCHAR2 AS
BEGIN
-- Remove all non-numeric characters
RETURN REGEXP_REPLACE(p_phone, '[^0-9]', '');
END;
/
-- Create trigger to standardize data
CREATE OR REPLACE TRIGGER trg_standardize_customer_data
BEFORE INSERT OR UPDATE ON customers
FOR EACH ROW
BEGIN
-- Standardize email
:NEW.email := standardize_email(:NEW.email);
-- Standardize phone
:NEW.phone := standardize_phone(:NEW.phone);
-- Standardize name (proper case, trim spaces)
:NEW.first_name := INITCAP(TRIM(:NEW.first_name));
:NEW.last_name := INITCAP(TRIM(:NEW.last_name));
END;
/
-- Create procedure for bulk data cleanup
CREATE OR REPLACE PROCEDURE cleanup_customer_data AS
BEGIN
-- Update existing data to match standardization
UPDATE customers
SET email = standardize_email(email),
phone = standardize_phone(phone),
first_name = INITCAP(TRIM(first_name)),
last_name = INITCAP(TRIM(last_name))
WHERE email != standardize_email(email)
OR phone != standardize_phone(phone)
OR first_name != INITCAP(TRIM(first_name))
OR last_name != INITCAP(TRIM(last_name));
COMMIT;
DBMS_OUTPUT.PUT_LINE('Updated ' || SQL%ROWCOUNT || ' customer records');
END;
/
-- Use meaningful constraint names for better error handling
ALTER TABLE customers
ADD CONSTRAINT uk_customers_email UNIQUE (email);
ALTER TABLE orders
ADD CONSTRAINT uk_orders_tracking_number UNIQUE (tracking_number);
-- Create composite unique constraints where appropriate
ALTER TABLE order_items
ADD CONSTRAINT uk_order_items_order_product UNIQUE (order_id, product_id);
-- Consider using surrogate keys with natural key unique constraints
CREATE TABLE customers (
customer_id NUMBER PRIMARY KEY, -- Surrogate key
email VARCHAR2(255) NOT NULL,
phone VARCHAR2(20),
created_date DATE DEFAULT SYSDATE,
CONSTRAINT uk_customers_email UNIQUE (email)
);
// Implement proper exception handling for constraint violations
public class DatabaseHelper {
public void handleUniqueConstraintViolation(SQLException e, String operation) {
String errorMessage = e.getMessage().toLowerCase();
if (errorMessage.contains("uk_customers_email")) {
throw new BusinessException("This email address is already registered. " +
"Please use a different email or try logging in.");
} else if (errorMessage.contains("pk_customers")) {
// Retry with new ID
retryWithNewId(operation);
} else if (errorMessage.contains("uk_orders_tracking")) {
throw new BusinessException("This tracking number already exists. " +
"Please check the order or generate a new tracking number.");
} else {
// Generic handling
throw new BusinessException("This record already exists in the system. " +
"Please check your data and try again.");
}
}
// Use INSERT ... ON DUPLICATE KEY UPDATE equivalent for Oracle
public void upsertCustomer(Customer customer) {
String mergeSql = """
MERGE INTO customers c
USING (SELECT ? as email, ? as first_name, ? as last_name FROM dual) s
ON (c.email = s.email)
WHEN MATCHED THEN
UPDATE SET first_name = s.first_name, last_name = s.last_name,
updated_date = SYSDATE
WHEN NOT MATCHED THEN
INSERT (customer_id, email, first_name, last_name, created_date)
VALUES (customer_seq.NEXTVAL, s.email, s.first_name, s.last_name, SYSDATE)
""";
try (PreparedStatement pstmt = connection.prepareStatement(mergeSql)) {
pstmt.setString(1, customer.getEmail());
pstmt.setString(2, customer.getFirstName());
pstmt.setString(3, customer.getLastName());
pstmt.executeUpdate();
}
}
}
  • ORA-02291 - Integrity constraint violated (parent key not found)
  • ORA-02290 - Check constraint violated
  • ORA-01400 - Cannot insert NULL into column
  • ORA-12899 - Value too large for column
  1. ✓ Identify the constraint and affected columns
  2. ✓ Find the duplicate data causing the conflict
  3. ✓ Decide whether to remove duplicates or update existing records
  4. ✓ Fix sequence issues if dealing with primary keys
  5. ✓ Implement proper error handling in applications
  6. ✓ Add validation to prevent future duplicates
-- Find constraint details
SELECT constraint_name, table_name, column_name
FROM dba_cons_columns
WHERE constraint_name = 'CONSTRAINT_NAME_FROM_ERROR';
-- Find duplicates
SELECT column_name, COUNT(*)
FROM table_name
GROUP BY column_name
HAVING COUNT(*) > 1;
-- Remove duplicates (keep first)
DELETE FROM table_name
WHERE rowid NOT IN (SELECT MIN(rowid) FROM table_name GROUP BY unique_column);
-- Fix sequence
ALTER SEQUENCE seq_name INCREMENT BY large_number;
SELECT seq_name.NEXTVAL FROM dual;
ALTER SEQUENCE seq_name INCREMENT BY 1;
-- Use MERGE for upsert
MERGE INTO target USING source ON (condition)
WHEN MATCHED THEN UPDATE SET ...
WHEN NOT MATCHED THEN INSERT ...;
  • Use meaningful constraint names - Easier error handling
  • Implement proper validation - Check before insert/update
  • Standardize data - Consistent format prevents near-duplicates
  • Use MERGE statements - Handle insert/update scenarios
  • Monitor for duplicates - Regular detection and cleanup
  • Handle gracefully - Provide meaningful error messages to users