package org.syrup.sql;
import org.syrup.Context;
import org.syrup.Link;
import org.syrup.LogEntry;
import org.syrup.PTask;
import org.syrup.Result;
import org.syrup.Task;
import org.syrup.Workflow;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.Hashtable;
import java.util.logging.Logger;
/**
* Utility functions to execute Syrup PTasks using JDBC.
*
* @author Robbert van Dalen
*/
public class ExecutionFunctions extends Functions
{
static final String COPYRIGHT = "Copyright 2005 Robbert van Dalen."
+ "At your option, you may copy, distribute, or make derivative works under "
+ "the terms of The Artistic License. This License may be found at "
+ "http://www.opensource.org/licenses/artistic-license.php. "
+ "THERE IS NO WARRANTY; USE THIS PRODUCT AT YOUR OWN RISK.";
private final static Logger logger = Logger.getLogger("org.syrup.sql.ExecutionFunctions");
/**
* Constructor for the ExecutionFunctions object
*
* @param sqlImpl
* The SQLImpl that is held by the Function instance.
*/
public ExecutionFunctions(SQLImpl sqlImpl)
{
super(sqlImpl);
}
/**
* Starts the execution of a PTask by executing SQL statements over a
* Connection. Returns the associated Context from the WorkSpace.
*
* @param pt
* The Ptask to be executed.
* @param w
* The Worker address (URL) that requested execution.
* @param con
* The SyrupConnection over which SQL statements are executed.
* @return The associated Context.
*/
protected Context start(PTask pt, String w, SyrupConnection con)
throws Exception
{
PreparedStatement s = null;
ResultSet result = null;
try
{
s = con.prepareStatementFromCache(sqlImpl().sqlStatements().checkIsExecutableTaskStatement());
s.setString(1, pt.key());
result = s.executeQuery();
if (result.next())
{
// The PTask has proven to be executable and not retained by
// another Worker.
java.util.Date dd = new java.util.Date();
PreparedStatement s2 = null;
// Sets the PTask Worker to be the current Thread.
s2 = con.prepareStatementFromCache(sqlImpl().sqlStatements().updateWorkerStatement());
s2.setString(1, w);
s2.setString(2, pt.key());
s2.executeUpdate();
sqlImpl().loggingFunctions().log(pt.key(), LogEntry.STARTED, con);
Context c = sqlImpl().queryFunctions().readContext(pt, con);
con.commit();
return c;
}
}
finally
{
con.rollback();
sqlImpl().genericFunctions().close(result);
}
return null;
}
/**
* Commits the Result of an execution by executing SQL statements over a
* Connection. Returns the executed PTask that produced the Result.
*
* @param r
* The Result to be commited.
* @param con
* The SyrupConnection over which SQL statements are executed.
* @return The executed PTask that produced the Result.
*/
protected PTask commit_result(Result r, SyrupConnection con) throws Exception
{
try
{
int logAction = LogEntry.ENDED;
String kk = r.context().task().key();
// Update the PTask's surrounding Context if the first input was
// consumed.
if (r.in_1_consumed()
&& r.context().in_1_link() != null)
{
sqlImpl().updateFunctions().updateInLink(kk, false, null, con);
logAction = logAction
| LogEntry.IN_1;
}
// Update the PTask's surrounding Context if the second input was
// consumed.
if (r.in_2_consumed()
&& r.context().in_2_link() != null)
{
sqlImpl().updateFunctions().updateInLink(kk, true, null, con);
logAction = logAction
| LogEntry.IN_2;
}
// Update the PTask's surrounding Context if a first output was
// returned.
if (r.out_1_result() != null
&& r.context().out_1_link() != null)
{
sqlImpl().updateFunctions().updateOutLink(kk, false, r.out_1_result(), con);
logAction = logAction
| LogEntry.OUT_1;
}
// Update the PTask's surrounding Context if a second output was
// returned.
if (r.out_2_result() != null
&& r.context().out_2_link() != null)
{
sqlImpl().updateFunctions().updateOutLink(kk, true, r.out_2_result(), con);
logAction = logAction
| LogEntry.OUT_2;
}
sqlImpl().loggingFunctions().log(r.context().task().key(), logAction, con);
boolean isParent = r.context().task().isParent();
// If the Result is of type Workflow, create additional (child)
// PTasks.
if (r instanceof Workflow)
{
Workflow w = (Workflow) r;
Task[] tt = w.tasks();
Link[] ll = w.links();
Hashtable tkeyMap = new Hashtable();
// For each newly created PTask there is an associated key.
// These keys are used to connect Links later.
for (int i = 0; i < tt.length; i++)
{
String key = sqlImpl().creationFunctions().newTask(tt[i], r.context().task(), con);
tkeyMap.put(tt[i], key);
}
for (int j = 0; j < ll.length; j++)
{
sqlImpl().creationFunctions().newLink(ll[j], tkeyMap, con);
}
// Rewires the Links stated by the Context bindings
String in_link_1 = sqlImpl().queryFunctions().readInTask(kk, false, con);
String in_link_2 = sqlImpl().queryFunctions().readInTask(kk, true, con);
String out_link_1 = sqlImpl().queryFunctions().readOutTask(kk, false, con);
String out_link_2 = sqlImpl().queryFunctions().readOutTask(kk, true, con);
sqlImpl().updateFunctions().rewireInLink(kk, false, w.in_1_binding(), tkeyMap, con);
sqlImpl().updateFunctions().rewireInLink(kk, true, w.in_2_binding(), tkeyMap, con);
sqlImpl().updateFunctions().rewireOutLink(kk, false, w.out_1_binding(), tkeyMap, con);
sqlImpl().updateFunctions().rewireOutLink(kk, true, w.out_2_binding(), tkeyMap, con);
// For each new PTask, check if it is done.
for (int k = 0; k < tt.length; k++)
{
String kkey = (String) tkeyMap.get(tt[k]);
sqlImpl().updateFunctions().checkAndUpdateDone(kkey, con);
}
// Check if the Tasks that have been rewired have turned into done.
sqlImpl().updateFunctions().checkAndUpdateDone(in_link_1, con);
sqlImpl().updateFunctions().checkAndUpdateDone(in_link_2, con);
sqlImpl().updateFunctions().checkAndUpdateDone(out_link_1, con);
sqlImpl().updateFunctions().checkAndUpdateDone(out_link_2, con);
// For each new PTask, check if it is executable.
for (int k = 0; k < tt.length; k++)
{
String kkey = (String) tkeyMap.get(tt[k]);
sqlImpl().updateFunctions().checkAndUpdateTargetExecutable(kkey, con);
}
// Check if the Tasks that have been rewired have turned into executable.
sqlImpl().updateFunctions().checkAndUpdateTargetExecutable(in_link_1, con);
sqlImpl().updateFunctions().checkAndUpdateTargetExecutable(in_link_2, con);
sqlImpl().updateFunctions().checkAndUpdateTargetExecutable(out_link_1, con);
sqlImpl().updateFunctions().checkAndUpdateTargetExecutable(out_link_2, con);
// Indicate that the PTask that is executed has turned into
// a parent.
isParent = true;
}
sqlImpl().updateFunctions().checkAndUpdateDone(kk, con);
sqlImpl().updateFunctions().checkAndUpdateTargetExecutable(kk, con);
PreparedStatement s3 = null;
// 'Touch' the executed PTask by updating modificationTime and
// number of modifications.
s3 = con.prepareStatementFromCache(sqlImpl().sqlStatements().updateTaskModificationStatement());
java.util.Date dd = new java.util.Date();
s3.setLong(1, dd.getTime());
s3.setBoolean(2, isParent);
s3.setString(3, r.context().task().key());
s3.executeUpdate();
sqlImpl().loggingFunctions().log(kk, LogEntry.ENDED, con);
con.commit();
// Returns the new PTask state to indicate that the execution
// was succesful.
return sqlImpl().queryFunctions().readPTask(kk, con);
}
finally
{
con.rollback();
}
}
}