Here is a generic example of loading a CSV file using APEX and external tables.
Using the external table feature is orders of magnitude faster than parsing the BLOB stream!
1. Create a generic csv-style external table definition like the following. This can read any CSV file (upto 50 columns)
CREATE TABLE ADHOC_CSV_EXT
(
C1 VARCHAR2(4000),
C2 VARCHAR2(4000),
C3 VARCHAR2(4000),
...
C50 VARCHAR2(4000)
)
ORGANIZATION EXTERNAL
( TYPE ORACLE_LOADER
DEFAULT DIRECTORY SOME_DIR
ACCESS PARAMETERS
(records delimited BY newline
fields
terminated BY ','
optionally enclosed BY '"'
lrtrim
missing field VALUES are NULL
)
LOCATION ('foo.csv')
);
2. Create some utility APIs to read/write the files.
CREATE OR REPLACE PACKAGE htmldb_file_util
AS
PROCEDURE blob_to_file (
p_blob IN BLOB,
p_directory IN VARCHAR2,
p_filename IN VARCHAR2
);
PROCEDURE file_to_collection (
p_directory IN VARCHAR2,
p_filename IN VARCHAR2,
p_collection IN VARCHAR2,
p_delete_file IN BOOLEAN DEFAULT TRUE
);
END htmldb_file_util;
/
CREATE OR REPLACE PACKAGE BODY htmldb_file_util
AS
PROCEDURE blob_to_file (
p_blob IN BLOB,
p_directory IN VARCHAR2,
p_filename IN VARCHAR2
)
IS
l_fp utl_file.file_type;
l_amt INTEGER DEFAULT 32000;
l_offset INTEGER DEFAULT 1;
l_length INTEGER DEFAULT NVL(dbms_lob.getlength(p_blob),0);
BEGIN
l_fp := utl_file.fopen(upper(p_directory),p_filename,'w',32760);
WHILE (l_offset < l_length)
LOOP
utl_file.put_raw(l_fp,dbms_lob.substr(p_blob,l_amt,l_offset),TRUE);
l_offset := l_offset + l_amt;
END LOOP;
utl_file.fclose(l_fp);
END blob_to_file;
PROCEDURE file_to_collection (
p_directory IN VARCHAR2,
p_filename IN VARCHAR2,
p_collection IN VARCHAR2,
p_delete_file IN BOOLEAN DEFAULT TRUE
)
IS
c_lock_name CONSTANT VARCHAR2(100) := 'HTMLDB';
l_lock_handle VARCHAR2(100);
l_retval NUMBER;
l_sql VARCHAR2(1000);
l_collection VARCHAR2(100) := upper(p_collection);
BEGIN
-- The adhoc_csv_ext external table can only "read" one file
-- at a time. If multiple APEX sessions try to upload files at
-- the same time, they would step on each other's toes.
--
-- So use DBMS_LOCK to "serialize" access to the shared resource by
-- requesting a exclusive named lock.
--
-- Ensure that the lock is released as soon as possible.
dbms_lock.allocate_unique(c_lock_name,l_lock_handle);
l_retval := dbms_lock.request(l_lock_handle,dbms_lock.x_mode,1);
IF (l_retval != 0)
THEN
raise_application_error(-20000,'Error uploading file. Please try again in a few moments');
END IF;
l_sql := 'alter table adhoc_csv_ext location ('||upper(p_directory)||':'||''''||p_filename||''')';
EXECUTE IMMEDIATE l_sql;
IF (apex_collection.collection_exists(l_collection))
THEN
apex_collection.delete_collection(l_collection);
END IF;
apex_collection.create_collection_from_query(l_collection,'select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16, c17, c18, c19, c20, c21, c22, c23, c24, c25, c26, c27, c28, c29, c30, c31, c32, c33, c34, c35, c36, c37, c38, c39, c40, c41, c42, c43, c44, c45, c46, c47, c48, c49, c50 from adhoc_csv_ext');
l_retval := dbms_lock.release(l_lock_handle);
IF (l_retval != 0)
THEN
raise_application_error(-20000,'Unexpected internal error occured');
END IF;
IF (p_delete_file)
THEN
utl_file.fremove(upper(p_directory),p_filename);
END IF;
EXCEPTION WHEN OTHERS THEN
l_retval := dbms_lock.release(l_lock_handle);
COMMIT;
RAISE;
END file_to_collection;
END htmldb_file_util;
/
3. On an APEX page with a File Browse page item, create the following After Submit process conditional upon some Upload button being pressed
declare
l_blob blob;
l_filename varchar2(500);
begin
select blob_content,filename
into l_blob,l_filename
from apex_application_files
where name=:P1_FILENAME;
htmldb_file_util.blob_to_file(l_blob,'TMP_DIR',l_filename);
htmldb_file_util.file_to_collection('TMP_DIR',l_filename,'MY_COLLECTION');
delete apex_application_files
where name=:P1_FILENAME;
exception when others then
delete apex_application_files
where name=:P1_FILENAME;
commit;
raise;
end;
4. That's it. MY_COLLECTION should now contain the contents of the file in the corresponding columns.
Hope this helps.