Consider yourselves in a position to do a data conversion for a table which has more than 500,000 records: The query may be simple solution but what if you want to do a join with another huge table, what if the table has a child table which will need data conversion too, What if you want to do the data conversion in a controlled manner? The solution is to do the data conversion in batches of either 1000 or 10000 records. And doing a commit for every batch, for this BULK COLLECT –LIMIT clause comes in handy.
DECLARE
CURSOR cur_empno IS
SELECT DISTINCT em.empno,
ch.deptno || '.'|| em.em_empno new_empno
FROM emp em, dept ch
WHERE 1 = 1
AND em.deptno = ch.deptno
AND em_empno <> ch.deptno || '.'|| em.em_empno;
TYPE typ_em_empno IS TABLE OF emp.empno%TYPE;
TYPE typ_em_empno_new IS TABLE OF emp.empno%TYPE;
larr_em_empno typ_em_empno;
larr_em_empno_new typ_em_empno_new;
l_count PLS_INTEGER := 0;
ln_limit PLS_INTEGER := 10000;
ssql VARCHAR2(500);
BEGIN
OPEN cur_empno;
FETCH cur_empno BULK COLLECT
INTO larr_em_empno, larr_em_empno_new LIMIT ln_limit;
BEGIN
sp_insert_log('Total in Cursor: ' || larr_em_empno.COUNT);
ssql := 'Emp Update';
FORALL i IN 1 .. larr_em_empno.COUNT SAVE EXCEPTIONS
UPDATE emp
SET empno = larr_em_empno_new(i)
WHERE empno = larr_em_empno(i);
sp_insert_log(ssql || ' Total rows processed = ' || SQL%ROWCOUNT);
ssql := 'EntryID Update';
FORALL i IN 1 .. larr_em_empno.COUNT SAVE EXCEPTIONS
UPDATE emp_det
SET empno = larr_em_empno_new(i)
WHERE empno = larr_em_empno(i);
sp_insert_log(ssql || ' Total rows processed = ' || SQL%ROWCOUNT);
FORALL i IN 1 .. larr_em_empno.COUNT SAVE EXCEPTIONS
UPDATE emp_addr
SET empno = larr_em_empno_new(i)
WHERE empno = larr_em_empno(i);
sp_insert_log(ssql || ' Total rows processed = ' || SQL%ROWCOUNT);
ssql := 'Update_empno_Log Insert';
FORALL i IN 1 .. larr_em_empno.COUNT SAVE EXCEPTIONS
INSERT INTO update_empno_log
VALUES
(larr_em_empno(i), larr_em_empno_new(i), SYSDATE);
sp_insert_log(ssql || ' Total rows processed = ' || SQL%ROWCOUNT);
EXCEPTION
WHEN OTHERS THEN
ROLLBACK;
sp_insert_log(ssql || 'Total No of Errors:' ||
SQL%BULK_EXCEPTIONS.COUNT);
FOR i IN 1 .. SQL%BULK_EXCEPTIONS.COUNT
sp_insert_log('Error #:' || SQL%BULK_EXCEPTIONS(i).ERROR_INDEX ||
' Err code:' || SQL%BULK_EXCEPTIONS(i).ERROR_CODE ||
' empno:' || larr_em_empno(i) ||
' Err Msg:' || SQLERRM(-SQL%BULK_EXCEPTIONS(i).ERROR_CODE));
END LOOP ;
RAISE;
END;
COMMIT;
EXIT WHEN cur_empno%NOTFOUND;
END LOOP ;
CLOSE cur_empno;
EXCEPTION
WHEN OTHERS THEN
sp_insert_log('ERROR' || SQLCODE || ' ' || substr(SQLERRM, 1, 200));
dbms_output.put_line('ERROR' || SQLCODE || ' ' ||
substr(SQLERRM, 1, 200));
END;
Having the records in cursor will avoid the over head of query cost by join. Committing in batches helps us, as the data only commits when the child table is also converted for that batch.
Supporting tables for the above script
create table update_track_Log
(
ROWNO number not null,
TEXT varchar2(4000),
UPDATED date default SYSDATE not null
);
create table Update_empno_Log
(
em_empno VARCHAR2(30),
em_empno_new VARCHAR2(30),
UPDATED date default SYSDATE not null
)