/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package adlzanchetta.hydrology.hydrographAnaliser;
import adlzanchetta.chronoData.DataSequence;
import adlzanchetta.hydrology.hydrographAnaliser.hydrographElements.Peak;
import adlzanchetta.hydrology.hydrographAnaliser.hydrographElements.RecessionCurve;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
/**
*
* @author Worker
*/
public abstract class HydrographAnaliser {
private static int peakIndowSize = 4;
public static double getK3(DataSequence dataSeq_arg){
return (HydrographAnaliser.getK3(dataSeq_arg, null, null));
}
public static double getK3(DataSequence dataSeq_arg, Date startDate_arg){
return (HydrographAnaliser.getK3(dataSeq_arg, startDate_arg, null));
}
public static double getK3(DataSequence dataSeq_arg,
Date startDate_arg,
Date endDate_arg){
ArrayList<RecessionCurve> recCurveList;
Iterator<RecessionCurve> recCurveIt;
RecessionCurve curRecCurve;
// 1 - get all recession curves
// 2 - evaluate all rec. curves data
// 1
recCurveList = HydrographAnaliser.getAllRecessionCurves(dataSeq_arg,
startDate_arg,
endDate_arg);
// 2
recCurveIt = recCurveList.iterator();
while(recCurveIt.hasNext()){
curRecCurve = recCurveIt.next();
System.out.println("Curve X: k3=" + curRecCurve.getK3Value() + "; stdDev=" + curRecCurve.getK3StdDeviation());
}
// 3 - correct that
return(Double.NaN);
}
public static ArrayList<RecessionCurve> getAllRecessionCurves(DataSequence dataSeq_arg){
return(HydrographAnaliser.getAllRecessionCurves(dataSeq_arg,
null,
null));
}
public static ArrayList<RecessionCurve> getAllRecessionCurves(DataSequence dataSeq_arg,
Date startDate_arg,
Date endDate_arg){
ArrayList<RecessionCurve> recCurveList;
ArrayList<Peak<Double>> allPeaks;
Iterator<Peak<Double>> itPeaks;
Peak<Double> curPeak, prevPeak;
RecessionCurve curRecCurve;
// 0 - basic check
// 1 - identify all peaks
// 2 - for each superior peak followed by a inferior peak
// 2a - get its ressecion curve
// 2b - analyse it
// 3 - return recession curve
// basic check - list not null
if (dataSeq_arg == null) return (null);
// basic check - start date inside data seq.
if ((startDate_arg != null) && (!HydrographAnaliser.isInside(dataSeq_arg,
startDate_arg))){
return (null);
}
if ((startDate_arg != null) && (!HydrographAnaliser.isInside(dataSeq_arg,
endDate_arg))){
return (null);
}
// 1
allPeaks = HydrographAnaliser.identifyPeaks(dataSeq_arg,
startDate_arg,
endDate_arg);
if (allPeaks == null) return (null);
recCurveList = new ArrayList<>();
// 2
itPeaks = allPeaks.iterator();
prevPeak = null;
while(itPeaks.hasNext()){
curPeak = itPeaks.next();
if ((curPeak.getPeakType() == Peak.INFERIOR) &&
(prevPeak != null) &&
(prevPeak.getPeakType() == Peak.SUPERIOR)){
try{
//2a
curRecCurve = new RecessionCurve(dataSeq_arg,
prevPeak.getPeakDate(),
curPeak.getPeakDate());
//2b
curRecCurve.determineK3((short)0);
recCurveList.add(curRecCurve);
} catch (Exception exp){
System.err.println("EXCEPTION: " + exp.getMessage());
}
}
prevPeak = curPeak;
}
// 3
return(recCurveList);
}
private static ArrayList<Peak<Double>> identifyPeaks(DataSequence dataSeq_arg,
Date startDate_arg,
Date endDate_arg){
Date currentEvalDate, previousEvalDate;
Date startDate, currentDate, endDate;
ArrayList<Peak<Double>> returnList;
Peak<Double> currentPeak;
int currentDatePos;
boolean isMiniPeak;
Date[] allDates;
double curData;
returnList = new ArrayList<>();
// define starting and ending dates
if(startDate_arg == null){
startDate = dataSeq_arg.getFirstDataDate();
} else {
startDate = startDate_arg;
}
if (endDate_arg == null) {
endDate = dataSeq_arg.getLastDataDate();
} else {
endDate = endDate_arg;
}
// identify date position
allDates = dataSeq_arg.getAllDataDates();
if ((allDates == null) || (allDates.length <= 0)) return (null);
currentDatePos = 0;
previousEvalDate = null;
while(true){
currentEvalDate = allDates[currentDatePos];
if (startDate.before(currentEvalDate)){
currentDatePos ++;
previousEvalDate = currentEvalDate;
} else if (startDate.after(currentEvalDate)) {
if (previousEvalDate == null){
currentDatePos = -1;
} else {
currentDatePos = currentDatePos - 1;
}
break;
} else {
break;
}
}
// go date by date
currentDate = allDates[currentDatePos];
while(!currentDate.after(endDate)){
curData = dataSeq_arg.getData(allDates[currentDatePos]).doubleValue();
// determine if is minipeak
isMiniPeak = false;
if ((currentDatePos > 0) && (currentDatePos <= (allDates.length - 2))){
isMiniPeak = HydrographAnaliser.isMiniPeak(dataSeq_arg.getData(allDates[currentDatePos - 1]).doubleValue(),
curData,
dataSeq_arg.getData(allDates[currentDatePos + 1]).doubleValue());
} else if ((currentDatePos == 0) && (currentDatePos < (allDates.length - 2))) {
isMiniPeak = HydrographAnaliser.isMiniPeak(Double.NaN,
curData,
dataSeq_arg.getData(allDates[currentDatePos + 1]).doubleValue());
} else if ((currentDatePos > 0) && (currentDatePos == (allDates.length - 1))) {
isMiniPeak = HydrographAnaliser.isMiniPeak(dataSeq_arg.getData(allDates[currentDatePos - 1]).doubleValue(),
curData,
Double.NaN);
} else {
isMiniPeak = false;
}
if (isMiniPeak){
// verify if mini peak is tr00 peak
if (HydrographAnaliser.isTruePeakSuperior(allDates,
currentDatePos,
dataSeq_arg)){
currentPeak = new Peak<>(curData,
allDates[currentDatePos],
Peak.SUPERIOR);
returnList.add(currentPeak);
} else if (HydrographAnaliser.isTruePeakInferior(allDates,
currentDatePos,
dataSeq_arg)) {
currentPeak = new Peak<>(curData,
allDates[currentDatePos],
Peak.INFERIOR);
returnList.add(currentPeak);
}
}
// go next date
if (currentDatePos < (allDates.length - 1)){
currentDatePos += 1;
currentDate = allDates[currentDatePos];
} else {
break;
}
}
// TODO - review - using tools to convert ArrayList to vector
return (returnList);
}
private static boolean isTruePeakSuperior(Date[] dates_arg,
int datePos_arg,
DataSequence dataSeq_arg){
return (HydrographAnaliser.isTruePeak(dates_arg,
datePos_arg,
dataSeq_arg,
Peak.SUPERIOR));
}
private static boolean isTruePeakInferior(Date[] dates_arg,
int datePos_arg,
DataSequence dataSeq_arg){
return (HydrographAnaliser.isTruePeak(dates_arg,
datePos_arg,
dataSeq_arg,
Peak.INFERIOR));
}
/**
*
* @param dates_arg
* @param datePos_arg
* @param dataSeq_arg
* @param peakType_arg 1: superior peak, 2: inferior peak
* @return
*/
private static boolean isTruePeak(Date[] dates_arg,
int datePos_arg,
DataSequence dataSeq_arg,
byte peakType_arg){
int beforeWindowSize, afterWindowSize, subCount;
tendencyWindow beforeWindow, afterWindow;
Date tmpDate;
// define effective window sizes
if (datePos_arg > (HydrographAnaliser.peakIndowSize - 1)){
beforeWindowSize = HydrographAnaliser.peakIndowSize;
} else {
beforeWindowSize = datePos_arg + 1;
}
if (dates_arg.length < (datePos_arg + 4)) {
afterWindowSize = dates_arg.length - datePos_arg;
} else {
afterWindowSize = HydrographAnaliser.peakIndowSize;
}
// alloc windows
beforeWindow = new tendencyWindow(beforeWindowSize);
afterWindow = new tendencyWindow(afterWindowSize);
// fills before window
subCount = 0;
while(subCount < beforeWindowSize){
tmpDate = dates_arg[datePos_arg - (beforeWindowSize - 1) + subCount];
beforeWindow.setDate(subCount, tmpDate, dataSeq_arg.getData(tmpDate).doubleValue());
subCount++;
}
// fills after window
subCount = 0;
while(subCount < (afterWindowSize)){
tmpDate = dates_arg[datePos_arg + subCount];
afterWindow.setDate(subCount, tmpDate, dataSeq_arg.getData(tmpDate).doubleValue());
subCount++;
}
// update tendencies
afterWindow.refreshTendency();
beforeWindow.refreshTendency();
// compare tendences
if (peakType_arg == Peak.SUPERIOR){
if (((beforeWindow.mainTendency > 0) && (afterWindow.mainTendency < 0)) ||
((Double.isNaN(beforeWindow.mainTendency)) && (afterWindow.mainTendency < 0)) ||
((beforeWindow.mainTendency > 0) && Double.isNaN(afterWindow.mainTendency))){
System.out.println("Sup. peak at " + HydrographAnaliser.readDate(dates_arg[datePos_arg]));
return (true);
} else {
return (false);
}
} else if (peakType_arg == Peak.INFERIOR) {
if (((beforeWindow.mainTendency < 0) && (afterWindow.mainTendency > 0)) ||
((Double.isNaN(beforeWindow.mainTendency)) && (afterWindow.mainTendency > 0)) ||
((beforeWindow.mainTendency < 0) && Double.isNaN(afterWindow.mainTendency))){
System.out.println("Inf. peak at " + HydrographAnaliser.readDate(dates_arg[datePos_arg]));
return (true);
} else {
return (false);
}
} else {
return (false);
}
}
private static boolean isMiniPeak(double prev_arg, double midl_arg, double aftr_arg){
if ((Double.isNaN(prev_arg)) && (midl_arg != aftr_arg)){
return (true);
} else if ((Double.isNaN(aftr_arg)) && (midl_arg != prev_arg)) {
return (true);
} else if ((midl_arg > prev_arg) && (midl_arg > aftr_arg)){
return (true);
} else if ((midl_arg < prev_arg) && (midl_arg < aftr_arg)) {
return (true);
} else {
return (false);
}
}
// TODO - this method should be in ChronoData package
private static boolean isInside(DataSequence dataSeq_arg, Date internalDate_arg){
Date lastDate, firstDate;
firstDate = dataSeq_arg.getFirstDataDate();
lastDate = dataSeq_arg.getLastDataDate();
if ((internalDate_arg.after(firstDate)) && (internalDate_arg.before(lastDate))){
return (true);
} else {
return (false);
}
}
/**
* TODO - this must be removed from here
* @param toRead_arg
* @return
*/
public static String readDate(Date toRead_arg){
SimpleDateFormat dateFormatter;
if (toRead_arg != null){
dateFormatter = new SimpleDateFormat("HH:mm dd/MM/yyyy");
return(dateFormatter.format(toRead_arg));
} else {
return("NULL");
}
}
/**
*
* @param toRead_arg In [HH:mm dd/MM/yyyy]
* @return
*/
public static Date getDate(String toRead_arg){
SimpleDateFormat dateFormatter;
if (toRead_arg != null){
dateFormatter = new SimpleDateFormat("HH:mm dd/MM/yyyy");
try{
return(dateFormatter.parse(toRead_arg));
} catch (ParseException exp) {
System.err.println("ParseException: " + exp.getMessage());
return(null);
}
} else {
return(null);
}
}
}
class tendencyWindow{
boolean tendRefreshed;
Double[] evaluatedData; // TODO - need to be generalized
Date[] evaluatedDate;
double mainTendency;
public tendencyWindow(){
this(4);
}
public tendencyWindow(int windowSize_arg){
tendRefreshed = true;
mainTendency = Double.NaN;
this.evaluatedDate = new Date[windowSize_arg];
this.evaluatedData = new Double[windowSize_arg];
}
public boolean setDate(int position_arg, Date date_arg, double data_arg){
if (this.evaluatedData.length > position_arg -1){
this.evaluatedData[position_arg] = data_arg;
this.evaluatedDate[position_arg] = date_arg;
this.tendRefreshed = false;
return (true);
} else {
return (false);
}
}
/**
* TODO 1 - make NULL more tolerant
* TODO 2 - evaluate possibility of doing an linear regression
*/
public void refreshTendency(){
int countAll, countFound;
double accumTendency, currentTendency;
double deltaD;
long deltaT;
accumTendency = 0;
countFound = 0;
for(countAll = 1; countAll < this.evaluatedDate.length; countAll++){
if (this.evaluatedDate[countAll] == null) continue;
if (this.evaluatedDate[countAll - 1] == null) continue;
deltaT = this.evaluatedDate[countAll].getTime() - this.evaluatedDate[countAll - 1].getTime();
deltaD = this.evaluatedData[countAll] - this.evaluatedData[countAll - 1];
currentTendency = deltaD / deltaT;
accumTendency += currentTendency;
countFound += 1;
}
if (countFound == 0){
this.mainTendency = Double.NaN;
} else {
this.mainTendency = accumTendency / countFound;
}
this.tendRefreshed = true;
}
}