In the application requirement, the current and required funcationality are as follows:-
Current scenario:-
(I)A MySQL stored proc is called from the shell script as given in below code.
(ii)The procedure generates the transaction dump file by querying the relevant table
And using the syntax "
SELECT * FROM table INTO OUTFILE <file_name>".
(iii)A single large file is generated based on volume of transactions.
(iv) A cron is scheduled to generate the files at 1 hour interval.
Required scenario:-
Required that, at any given run of the cron job, the maximum size of file
Generated by the stored proc should not exceed, say 500MB. If it exceeds,
The limit, write the rest of the records into a second / third file and so on.
That is it is required to control the file generation from the time the
Procedure is called.
Note:-In the code given below the place where stored proc is called is marked in bold red.
I presume the change has to be done here.
Code:
Code:
#The shell script is shown below:-
#----------------------------------------------------------------------------------------
#!/bin/bash
#!/usr/bin/bsh
#TEST_HOME : Path of application home
#SDF_CONFIG.DAT : Configuration file defining variables for schema,port,host,MySQL Home,Application home,file dump path etc.
#MYSQL_HOME : MySQL Home path defined in SDF_CONFIG.DAT file
#ROOT_PSWD : Root Password as defined in SDF_CONFIG.DAT file
#MYSQL_HOST : MySQL Host IP as defined in SDF_CONFIG.DAT file
#MYSQL_PORT : Port as defined in SDF_CONFIG.DAT file
#DUMP_HOST_PATH: Path of the generated tranasaction dump file, defined in SDF_CONFIG.DAT file
TEST_HOME=/root/Test
if test -f $TEST_HOME/Program/SysConfigs/SDF_CONFIG.DAT; then
source $TEST_HOME/Program/SysConfigs/SDF_CONFIG.DAT
else
echo "Error: SDF_CONFIG.DAT not found"
exit
fi;
DG=$(date +%d-%m-%Y_%T) # date of generation
#Call MySQL Procedure that will generate the transaction files
#Required to generate file with a configured size (fetched from database or hardcoded)
#also if the file size exceeds the specified size, write into a new file (again of max size and so on till data is exhausted)
$MYSQL_HOME/bin/mysql -e 'Call transaction.offline_transaction_proc()' -u root -p$ROOT_PSWD -h $MYSQL_HOST -P$MYSQL_PORT --skip-column-names
echo "File generation Completed !!"
exit 0;
#---------------------------------------------------------------------------------------
########################################################################################
The MySQL stored procedure being called is :-
----------------------------------------------------------------------------------------
DELIMITER $$
DROP PROCEDURE IF EXISTS `tranasction`.`offline_transaction_proc`$$
CREATE PROCEDURE `offline_transaction_proc`()
BEGIN
DECLARE v_reason_code int(2);
DECLARE v_offline_file_path_name varchar(750);
DECLARE v_offline_query varchar(5000);
DECLARE v_offline_where_clause varchar(1024);
DECLARE v_csv_path varchar(512);
DECLARE v_from_date varchar(30);
DECLARE v_from_date_temp varchar(30);
DECLARE v_to_date varchar(30);
DECLARE v_current_date varchar(30);
label: BEGIN DECLARE EXIT HANDLER FOR SQLEXCEPTION SET v_reason_code = -99;
--datetime from which to generate the transaction dump
SELECT param_value INTO v_from_date
FROM configuration.configuration_detail
WHERE param_id=4;
--update param value to the datetime upto which the transactions are picked up for processing
UPDATE configuration.configuration_detail
SET param_value= now()
WHERE param_id=5;
--fetch and store in variable the datetime upto which to be fetched
SELECT param_value INTO v_to_date
FROM rt_configuration.configuration_detail
WHERE param_id=5;
--fetch the CSV path wherin to store the file
SELECT param_value INTO v_csv_path
FROM rt_configuration.configuration_detail
WHERE param_id=7;
SET v_from_date_temp=v_from_date;
--do following until the from datetime range is less than current time fetched in variable
--ie;increment the from time by one hour and see if it is less than to time, if not continue
WHILE DATE_ADD(v_from_date_temp, INTERVAL 1 hour) < v_to_date
DO
--read tranasctions from main table for writing to file
SET v_reason_code = 0;
--Prepared statement using SELECT INTO OUTFILE <filename>
--filename is dynamic based on time of generation
SET v_offline_query = "SELECT * FROM offline_transaction WHERE dump_time BETWEEN ";
SET v_offline_file_path_name = concat(v_csv_path, "/OFFLINE/OFFLINE_", DATE_FORMAT(DATE_ADD(v_from_date_temp,
INTERVAL 1 hour),'%Y%m%d.%H.%i.%s'), ".TXT");
SET v_offline_file_path_name = concat("'", v_offline_file_path_name, "'");
SET @v_offline_where_clause = concat( "'", v_from_date_temp , "' AND '",
DATE_ADD(v_from_date_temp, INTERVAL 1 hour), "' INTO OUTFILE ");
SET @v_offline_final_query = concat(v_offline_query, @v_offline_where_clause,
v_offline_file_path_name, " FIELDS TERMINATED BY ',' ");
PREPARE offline_stmt FROM @v_offline_final_query;
EXECUTE offline_stmt;
DEALLOCATE PREPARE offline_stmt;--increment and update the configuration table to fromtime plus one hour
UPDATE configuration.configuration_detail
SET param_value=DATE_ADD(v_from_date_temp, INTERVAL 1 hour)
WHERE param_id=4;
SET v_from_date_temp = DATE_ADD(v_from_date_temp, INTERVAL 1 hour);
-- call another procedure to drop and recreate the partition corresponding to the dump time
-- from the offline_transaction table inorder to purge the data.
Call partition_proc('offline');
END WHILE; --continue loop till from time is less than to time.
--update the dumptime completion time
UPDATE configuration.configuration_detail
SET param_value=now()
WHERE param_id=6;
END;
END$$
DELIMITER ;
----------------------------------------------------------------------------------------