declare
    C_REGION constant    varchar2(20) := 'us-ashburn-1';
    C_NAMESPACE constant varchar2(20) := 'ネームスペースに置き換える';
    C_BUCKET constant    varchar2(20) := 'bricks';
    C_FILENAME constant  varchar2(20) := 'bricks';
    C_SQL constant varchar2(4000) := q'~select * from bricks where rownum < 1000001~';
    l_context apex_exec.t_context; 
    l_export  apex_data_export.t_export;
    l_blob       blob;
    l_blob_src   blob;
    l_filename   varchar2(80);
    l_file     utl_file.file_type;
    l_buffer   raw(32767);
    l_amount   binary_integer := 32767;
    l_pos      integer := 1;
    l_blob_len integer;
    l_path     varchar2(400);
    procedure set_checkpoint(ids varchar2)
    as
    begin
        dbms_output.put_line(systimestamp || ': ' || ids);
        dbms_session.set_identifier('csv_export-' || ids);
    end;
begin
    SET_CHECKPOINT('start');
    apex_session.create_session(
        p_app_id => 100
        , p_page_id => 1
        , p_username => 'APEXDEV'
    );
    SET_CHECKPOINT('apex_exec.open_query_context');
    l_context := apex_exec.open_query_context(
        p_location     => apex_exec.c_location_local_db
        , p_sql_query  => C_SQL
    );
$IF true $THEN
    SET_CHECKPOINT('apex_data_export.export');
    l_export := apex_data_export.export(
        p_context     => l_context
        , p_format    => apex_data_export.c_format_csv
        , p_file_name => C_FILENAME
    );
    l_blob_src := l_export.content_blob;
$ELSE
    SET_CHECKPOINT('apex_json');
    apex_json.initialize_clob_output;
    apex_json.open_object;
    apex_json.write_context(
        p_name => 'bricks'
        , p_context => l_context
    );
    apex_json.close_object;
    SET_CHECKPOINT('clob_to_blob');
    l_blob_src := wwv_flow_utilities.clob_to_blob(
        p_clob => apex_json.get_clob_output
        , p_charset => 'AL32UTF8'
        , p_include_bom => false
    );
$END
    SET_CHECKPOINT('utl_compress.lz_compress');
    l_blob := utl_compress.lz_compress(l_blob_src);
    SET_CHECKPOINT('utl_file');
    l_filename := C_FILENAME || '.csv.gz';
    l_blob_len := dbms_lob.getlength(l_blob);
    l_file := utl_file.fopen('DATA_PUMP_DIR',l_filename,'wb',32767);
    l_pos := 1;
    l_amount := 32767;
    while l_pos <= l_blob_len
    loop
        dbms_lob.read(l_blob, l_amount, l_pos, l_buffer);
        utl_file.put_raw(l_file, l_buffer, TRUE);
        l_pos := l_pos + l_amount;
    end loop;
    utl_file.fclose(l_file);
    SET_CHECKPOINT('dbms_cloud.put_object');
    l_path := 'https://objectstorage.' || C_REGION || '.oraclecloud.com/n/' || C_NAMESPACE
        || '/b/' || C_BUCKET || '/o/' || l_filename;
    dbms_cloud.put_object(
        credential_name => 'DEF_CRED'
        , object_uri => l_path
        , directory_name => 'DATA_PUMP_DIR'
        , file_name => l_filename
    );
    SET_CHECKPOINT('end');
    apex_exec.close( l_context );
    dbms_cloud.delete_file('DATA_PUMP_DIR',l_filename);
exception
    when others then
        apex_exec.close( l_context );
        raise;
end;
/