I have a CSV file with 2,000,000 records inserted into a table (blob column). I tried to create the following block:
DECLARE
v_doc BLOB;
v_file_content CLOB;
zip_files as_zip.file_list;
BEGIN
--Get file corresponding to process in progress
SELECT
file_content
INTO v_doc
FROM
int_dat_journals_cv027_files
WHERE
file_name = 'in_gl_journals_20230322101527_v3.zip' -->This value is dynamic
AND id_proceso_oic = '1'; -->This value is dynamic
--Get Files from .zip file
zip_files := as_zip.get_file_list(v_doc);
--Iterate each file from .zip file
FOR i IN zip_files.first()..zip_files.last LOOP
--convert blob file to clob
SELECT
blob_to_clob_fn(as_zip.get_file(v_doc, zip_files(i)))
INTO v_file_content
FROM
dual;
--dbms_output.put_line(v_file_content);
--iterate and split line by line by chr(10), for example:'a,b,c,,,f,g'
FOR linea IN (
SELECT
TRIM(column_value) l
FROM
dual,
xmltable ( ( '"'
|| replace(v_file_content, CHR(10), '","')
|| '"' ) )
) LOOP
dbms_output.put_line(linea.l);
END LOOP;
END LOOP;
EXCEPTION
WHEN OTHERS THEN
dbms_output.put_line('ERROR: ' || sqlerrm);
dbms_output.put_line(dbms_utility.format_error_backtrace);
END;
I need to specifically optimize this section to be able to process millions of records:
FOR line IN ( select TRIM(column_value) l DESDE dual, xmltable ( ( '"' || replace(v_file_content, CHR(10), '","') || '"' ) ) ) LOOP dbms_output.put_line(line.l); END LOOP;
How can it be optimized?