this.past_partitions.addAll(element.getPartitions());
if (trace.val) LOG.trace("Current Vertex: " + element);
Statement cur_catalog_stmt = element.getCatalogItem();
int cur_catalog_stmt_index = element.getQueryCounter();
MarkovGraph markov = (MarkovGraph)this.getGraph();
// At our current vertex we need to gather all of our neighbors
// and get unique Statements that we could be executing next
Collection<MarkovVertex> next_vertices = markov.getSuccessors(element);
if (next_vertices == null || next_vertices.isEmpty()) {
if (debug.val) LOG.debug("No succesors were found for " + element + ". Halting traversal");
return;
}
if (trace.val) LOG.trace("Successors: " + next_vertices);
// Step #1
// Get all of the unique Statement+StatementInstanceIndex pairs for the vertices
// that are adjacent to our current vertex
for (MarkovVertex next : next_vertices) {
Statement next_catalog_stmt = next.getCatalogItem();
int next_catalog_stmt_index = next.getQueryCounter();
// Sanity Check: If this vertex is the same Statement as the current vertex,
// then its instance counter must be greater than the current vertex's counter
if (next_catalog_stmt.equals(cur_catalog_stmt)) {
if (next_catalog_stmt_index <= cur_catalog_stmt_index) {
LOG.error("CURRENT: " + element + " [commit=" + element.isCommitVertex() + "]");
LOG.error("NEXT: " + next + " [commit=" + next.isCommitVertex() + "]");
}
assert(next_catalog_stmt_index > cur_catalog_stmt_index) :
String.format("%s[#%d] > %s[#%d]",
next_catalog_stmt.fullName(), next_catalog_stmt_index,
cur_catalog_stmt.fullName(), cur_catalog_stmt_index);
}
// Check whether it's COMMIT/ABORT
if (next.isCommitVertex() || next.isAbortVertex()) {
MarkovEdge candidate = markov.findEdge(element, next);
assert(candidate != null);
this.candidate_edges.add(candidate);
} else {
this.next_statements.add(next.getCountedStatement());
}
} // FOR
// Now for the unique set of Statement+StatementIndex pairs, figure out which partitions
// the queries will go to.
MarkovEdge candidate_edge;
for (CountedStatement cstmt : this.next_statements) {
Statement catalog_stmt = cstmt.statement;
Integer catalog_stmt_index = cstmt.counter;
if (debug.val) LOG.debug("Examining " + cstmt);
// Get the mapping objects (if any) for next
// This is the only way we can predict what partitions we will touch
Map<StmtParameter, SortedSet<ParameterMapping>> stmtMappings = this.allMappings.get(catalog_stmt, catalog_stmt_index);
if (stmtMappings == null) {
if (debug.val) {
LOG.warn("No parameter mappings for " + catalog_stmt);
if (trace.val) LOG.trace(this.allMappings.debug(catalog_stmt));
}
continue;
}
// Go through the StmtParameters and map values from ProcParameters
StmtParameter stmt_params[] = catalog_stmt.getParameters().values();
Object stmt_args[] = new Object[stmt_params.length]; // this.getStatementParamsArray(catalog_stmt);
boolean stmt_args_set = false;
// XXX: This method may return null because it's being used for other
// purposes in the BatchPlanner.
int stmt_args_offsets[] = this.p_estimator.getStatementEstimationParameters(catalog_stmt);
if (stmt_args_offsets == null) {
stmt_args_offsets = new int[stmt_args.length];
for (int i = 0; i < stmt_args.length; i++)
stmt_args_offsets[i] = i;
}
assert(stmt_args_offsets != null) :
"Unexpected null StmtParameter offsets for " + catalog_stmt.fullName();
for (int offset : stmt_args_offsets) {
StmtParameter catalog_stmt_param = stmt_params[offset];
assert(catalog_stmt_param != null);
if (trace.val)
LOG.trace("Retrieving ParameterMappings for " + catalog_stmt_param.fullName());
Collection<ParameterMapping> mappings = stmtMappings.get(catalog_stmt_param);
if (mappings == null || mappings.isEmpty()) {
if (trace.val)
LOG.trace("No parameter mappings exists for " + catalog_stmt_param.fullName());
continue;
}
if (debug.val)
LOG.debug("Found " + mappings.size() + " mapping(s) for " + catalog_stmt_param.fullName());
// Special Case:
// If the number of possible Statements we could execute next is greater than one,
// then we need to prune our list by removing those Statements who have a StmtParameter
// that are correlated to a ProcParameter that doesn't exist (such as referencing an
// array element that is greater than the size of that current array)
// TODO: For now we are just going always pick the first mapping
// that comes back. Is there any choice that we would need to make in order
// to have a better prediction about what the transaction might do?
if (debug.val && mappings.size() > 1) {
LOG.warn("Multiple parameter mappings for " + catalog_stmt_param.fullName());
if (trace.val) {
int ctr = 0;
for (ParameterMapping m : mappings) {
LOG.trace("[" + (ctr++) + "] Mapping: " + m);
} // FOR
}
}
for (ParameterMapping m : mappings) {
if (trace.val) LOG.trace("Mapping: " + m);
ProcParameter catalog_proc_param = m.getProcParameter();
if (catalog_proc_param.getIsarray()) {
Object proc_inner_args[] = (Object[])procParams[m.getProcParameter().getIndex()];
if (trace.val)
LOG.trace(CatalogUtil.getDisplayName(m.getProcParameter(), true) + " is an array: " +
Arrays.toString(proc_inner_args));
// TODO: If this Mapping references an array element that is not available for this
// current transaction, should we just skip this mapping or skip the entire query?
if (proc_inner_args.length <= m.getProcParameterIndex()) {
if (trace.val)
LOG.trace("Unable to map parameters: " +
"proc_inner_args.length[" + proc_inner_args.length + "] <= " +
"c.getProcParameterIndex[" + m.getProcParameterIndex() + "]");
continue;
}
stmt_args[offset] = proc_inner_args[m.getProcParameterIndex()];
stmt_args_set = true;
if (trace.val)
LOG.trace("Mapped " + CatalogUtil.getDisplayName(m.getProcParameter()) + "[" + m.getProcParameterIndex() + "] to " +
CatalogUtil.getDisplayName(catalog_stmt_param) + " [value=" + stmt_args[offset] + "]");
} else {
stmt_args[offset] = procParams[m.getProcParameter().getIndex()];
stmt_args_set = true;
if (trace.val)
LOG.trace("Mapped " + CatalogUtil.getDisplayName(m.getProcParameter()) + " to " +
CatalogUtil.getDisplayName(catalog_stmt_param) + " [value=" + stmt_args[offset] + "]");
}
break;
} // FOR (Mapping)
} // FOR (StmtParameter)
// If we set any of the stmt_args in the previous step, then we can throw it
// to our good old friend the PartitionEstimator and see whether we can figure
// things out for this Statement
if (stmt_args_set) {
if (trace.val)
LOG.trace("Mapped StmtParameters: " + Arrays.toString(stmt_args));
this.stmt_partitions.clear();
try {
this.p_estimator.getAllPartitions(this.stmt_partitions, catalog_stmt, stmt_args, this.base_partition);
} catch (Exception ex) {
String msg = "Failed to calculate partitions for " + catalog_stmt + " using parameters " + Arrays.toString(stmt_args);
LOG.error(msg, ex);
this.stop();
return;
}
if (trace.val)
LOG.trace("Estimated Partitions for " + catalog_stmt + ": " + this.stmt_partitions);
// Now for this given list of partitions, find a Vertex in our next set
// that has the same partitions
if (this.stmt_partitions.isEmpty() == false) {
candidate_edge = null;
if (trace.val)
LOG.trace("Partitions:" + this.stmt_partitions + " / Past:" + this.past_partitions);
for (MarkovVertex next_v : next_vertices) {
if (trace.val) LOG.trace("Checking whether " + next_v + " is the correct transition");
if (next_v.isEqual(catalog_stmt, this.stmt_partitions, this.past_partitions, catalog_stmt_index, true)) {
// BINGO!!!
assert(candidate_edge == null);
try {
candidate_edge = markov.findEdge(element, next_v);
} catch (NullPointerException ex) {
continue;
}
assert(candidate_edge != null);
this.candidate_edges.add(candidate_edge);
if (trace.val)
LOG.trace("Found candidate edge to " + next_v + " [" + candidate_edge + "]");
break;
} else if (trace.val) {
Map<String, Object> m = new LinkedHashMap<String, Object>();
m.put("stmt", next_v.getCatalogItem().equals(catalog_stmt));
m.put("stmtCtr", next_v.getQueryCounter() == catalog_stmt_index);
m.put("partitions", next_v.getPartitions().equals(this.stmt_partitions));
m.put("past", next_v.getPastPartitions().equals(this.past_partitions));
LOG.trace("Invalid candidate transition:\n" + StringUtil.formatMaps(m));
}
} // FOR (Vertex
if (trace.val && candidate_edge == null)
LOG.trace(String.format("Failed to find candidate edge from %s to %s [partitions=%s]",
element, catalog_stmt.fullName(), this.stmt_partitions));
}
}
// Without any stmt_args, there's nothing we can do here...
else if (trace.val) {
LOG.trace("No stmt_args for " + catalog_stmt + ". Skipping...");
} // IF
} // FOR
// If we don't have any candidate edges and the FORCE TRAVERSAL flag is set, then we'll just
// grab all of the edges from our current vertex
int num_candidates = this.candidate_edges.size();
boolean was_forced = false;
if (num_candidates == 0 && this.force_traversal) {
if (debug.val)
LOG.debug(String.format("No candidate edges were found. " +
"Checking whether we can create our own. [nextStatements=%s]",
this.next_statements));
// We're allow to create the vertices that we know are missing
if (this.learning_enabled && this.next_statements.size() == 1) {
CountedStatement cntStmt = CollectionUtil.first(this.next_statements);
MarkovVertex v = new MarkovVertex(cntStmt.statement,
MarkovVertex.Type.QUERY,
cntStmt.counter,
this.stmt_partitions,
this.past_partitions);
markov.addVertex(v);
// For now we'll set the new edge's probability to 1.0 to just
// make the calculations down below work. This will get updated
// overtime when we recompute the probabilities in the entire graph.
candidate_edge = new MarkovEdge(markov, 1, 1.0f);
markov.addEdge(candidate_edge, element, v, EdgeType.DIRECTED);
this.candidate_edges.add(candidate_edge);
if (this.created_vertices == null) this.created_vertices = new HashSet<MarkovVertex>();
this.created_vertices.add(v);
if (trace.val)
LOG.trace(String.format("Created new vertex %s and connected it to %s", v, element));
// 2012-10-21
// The problem with allowing the estimator to create a new vertex is that
// we don't know what it's children are going to be. That means that when
// we invoke this method again at the next vertex (the one we just made above)
// then it's not going to have any children, so we don't know what it's
// going to do. We are actually better off with just grabbing the next best
// vertex from the existing edges and then updating the graph after
// the txn has finished, since now we know exactly what it did.
}
// Otherwise we'll just make all of the outbound edges from the
// current vertex be our candidates
else {
if (trace.val)
LOG.trace("No candidate edges were found. Force travesal flag is set to true, so taking all");
Collection<MarkovEdge> out_edges = markov.getOutEdges(element);
if (out_edges != null) this.candidate_edges.addAll(out_edges);
}
num_candidates = this.candidate_edges.size();
was_forced = true;
}
// So now we have our list of candidate edges. We can pick the first one
// since they will be sorted by their probability
if (trace.val) LOG.trace("Candidate Edges: " + this.candidate_edges);
if (num_candidates > 0) {
MarkovEdge next_edge = CollectionUtil.first(this.candidate_edges);
assert(next_edge != null) : "Unexpected null edge " + this.candidate_edges;
MarkovVertex next_vertex = markov.getOpposite(element, next_edge);
children.addAfter(next_vertex);
if (was_forced) {
if (this.forced_vertices == null) this.forced_vertices = new HashSet<MarkovVertex>();
this.forced_vertices.add(next_vertex);
}
if (debug.val) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("#%02d CANDIDATES:\n", this.getDepth()));
int i = 0;
for (MarkovEdge e : this.candidate_edges) {
MarkovVertex v = markov.getOpposite(element, e);
sb.append(String.format(" [%d] %s --[%s]--> %s%s%s",
i++, element, e, v,
(next_vertex.equals(v) ? " <== SELECTED" : ""),
(trace.val && this.candidate_edges.size() > 1 ? "\n"+StringUtil.addSpacers(v.debug()) : "")));
} // FOR