Package edu.brown.markov

Examples of edu.brown.markov.MarkovGraph


                      TransactionUtil.formatTxnName(catalog_proc, txn_id), base_partition));

        // If we don't have a graph for this procedure, we should probably just return null
        // This will be the case for all sysprocs
        if (this.markovs == null) return (null);
        MarkovGraph markov = this.markovs.getFromParams(txn_id, base_partition, args, catalog_proc);
        if (markov == null) {
            if (debug.val)
                LOG.debug(String.format("%s - No MarkovGraph is available for transaction",
                          TransactionUtil.formatTxnName(catalog_proc, txn_id)));
            if (this.profiler != null) this.profiler.start_time.appendTime(timestamp);
            return (null);
        }
       
        if (trace.val)
            LOG.trace(String.format("%s - Creating new MarkovEstimatorState",
                      TransactionUtil.formatTxnName(catalog_proc, txn_id)));
        MarkovEstimatorState state = null;
        try {
            state = (MarkovEstimatorState)statesPool.borrowObject();
            assert(state.isInitialized() == false);
            state.init(txn_id, base_partition, markov, args, start_time);
        } catch (Throwable ex) {
            throw new RuntimeException(ex);
        }
        assert(state.isInitialized()) :
            "Unexpectted uninitialized MarkovEstimatorState\n" + state;
       
        MarkovVertex start = markov.getStartVertex();
        assert(start != null) : "The start vertex is null. This should never happen!";
        MarkovEstimate initialEst = state.createNextEstimate(start, true);
        this.estimatePath(state, initialEst, catalog_proc, args);
       
        if (debug.val) {
View Full Code Here


            LOG.debug(String.format("Processing %d queries for txn #%d",
                      catalog_stmts.length, state.getTransactionId()));
        int batch_size = catalog_stmts.length;
       
        // If we get here, then we should definitely have a MarkovGraph
        MarkovGraph markov = state.getMarkovGraph();
        assert(markov != null);
           
        MarkovVertex current = state.getCurrent();
        PartitionSet touchedPartitions = state.getTouchedPartitions();
        MarkovVertex next_v = null;
        MarkovEdge next_e = null;
        Statement last_stmt = null;
        int stmt_idxs[] = null;

        // We can cache what the path is based on the first and last query in the batch
        // We only want to do this for batches that are large enough.
        if (hstore_conf.site.markov_endpoint_caching &&
                batch_size >= hstore_conf.site.markov_batch_caching_min) {
            assert(current != null);
            if (debug.val)
                LOG.debug("Attempting cache look-up for last statement in batch: " + Arrays.toString(catalog_stmts));
           
            state.cache_last_partitions.clear();
            state.cache_past_partitions.clear();
           
            PartitionSet last_partitions;
            stmt_idxs = new int[batch_size];
            for (int i = 0; i < batch_size; i++) {
                last_stmt = catalog_stmts[i];
                last_partitions = partitions[batch_size - 1];
                stmt_idxs[i] = state.updateQueryInstanceCount(last_stmt);
                if (i+1 != batch_size) {
                    state.cache_past_partitions.addAll(last_partitions);
                }
                else {
                    state.cache_last_partitions.addAll(last_partitions);
                }
            } // FOR
           
            Pair<MarkovEdge, MarkovVertex> pair = this.getCachedBatchEnd(current,
                                                                         last_stmt,
                                                                         stmt_idxs[batch_size-1],
                                                                         state.cache_last_partitions,
                                                                         state.cache_past_partitions);
            if (pair != null) {
                next_e = pair.getFirst();
                assert(next_e != null);
                next_v = pair.getSecond();
                assert(next_v != null);
                if (debug.val)
                    LOG.debug(String.format("Got cached batch end for %s: %s -> %s",
                              markov, current, next_v));
               
                // Update the counters and other info for the next vertex and edge
                if (this.enable_recomputes) {
                    this.markovTimes.addInstanceTime(next_v,
                                                     state.getTransactionId(),
                                                     state.getExecutionTimeOffset());
                }
               
                // Update the state information
                state.setCurrent(next_v, next_e);
                touchedPartitions.addAll(state.cache_last_partitions);
                touchedPartitions.addAll(state.cache_past_partitions);
            }
        }
       
        // Roll through the Statements in this batch and move the current vertex
        // for the txn's State handle along the path in the MarkovGraph
        if (next_v == null) {
            for (int i = 0; i < batch_size; i++) {
                int queryCount = (stmt_idxs != null ? stmt_idxs[i] : -1);
                this.consume(state, markov, catalog_stmts[i], partitions[i], queryCount);
                if (stmt_idxs == null) touchedPartitions.addAll(partitions[i]);
            } // FOR
           
            // Update our cache if we tried and failed before
            if (hstore_conf.site.markov_endpoint_caching && stmt_idxs != null) {
                if (debug.val)
                    LOG.debug(String.format("Updating cache batch end for %s: %s -> %s",
                              markov, current, state.getCurrent()));
                this.addCachedBatchEnd(current,
                                       CollectionUtil.last(state.actual_path_edges),
                                       state.getCurrent(),
                                       last_stmt,
                                       stmt_idxs[batch_size-1],
                                       state.cache_past_partitions,
                                       state.cache_last_partitions);
            }
        }

        // 2012-10-17: This is kind of funky because we have to populate the
        // probabilities for the MarkovEstimate here, whereas for the initial estimate
        // we did it inside of the MarkovPathEstimator
        MarkovEstimate estimate = state.createNextEstimate(state.getCurrent(), false);
        assert(estimate != null);
        Procedure catalog_proc = markov.getProcedure();
        Object procArgs[] = state.getProcedureParameters();
        this.estimatePath(state, estimate, catalog_proc, procArgs);
       
        if (debug.val)
            LOG.debug(String.format("Next MarkovEstimate for txn #%d\n%s",
                      state.getTransactionId(), estimate.toString()));
        assert(estimate.isInitialized()) :
            String.format("Unexpected uninitialized MarkovEstimate for txn #%d\n%s", state.getTransactionId(), estimate);
        assert(estimate.isValid()) :
            String.format("Invalid MarkovEstimate for txn #%d\n%s", state.getTransactionId(), estimate);
       
        // Once the workload shifts we detect it and trigger this method. Recomputes
        // the graph with the data we collected with the current workload method.
        if (this.enable_recomputes && markov.shouldRecompute(this.txn_count.get(), RECOMPUTE_TOLERANCE)) {
            markov.calculateProbabilities(catalogContext.getAllPartitionIds());
        }
       
        // We want to add the estimate to the state down here after we have initialized
        // everything. This prevents other threads from accessing it before we have
        // initialized it properly.
View Full Code Here

            return;
        }

        Long txn_id = state.getTransactionId();
        long end_time = EstTime.currentTimeMillis();
        MarkovGraph markov = state.getMarkovGraph();
        if (debug.val)
            LOG.debug(String.format("Cleaning up state info for txn #%d [status=%s]",
                      txn_id, status));

        // If there were no updates while the transaction was running, then
        // we don't want to try to update the model, because we will end up
        // connecting the START vertex to the COMMIT vertex, which is not correct
        if (state.isUpdatesEnabled()) {
            // We need to update the counter information in our MarkovGraph so that we know
            // that the procedure may transition to the ABORT vertex from where ever it was before
            MarkovVertex current = state.getCurrent();
            assert(current != null) :
                String.format("Missing current vertex for %s\n%s",
                              TransactionUtil.formatTxnName(markov.getProcedure(), txn_id), state);
           
            // If we don't have the terminal vertex, then we know that we don't care about
            // what this transaction actually did
            MarkovVertex next_v = markov.getFinishVertex(status);
            if (next_v == null) {
                if (this.profiler != null) this.profiler.finish_time.appendTime(timestamp);
                return;
            }
           
            // If no edge exists to the next vertex, then we need to create one
            MarkovEdge next_e = null;
            synchronized (next_v) {
                next_e = markov.addToEdge(current, next_v);
            } // SYNCH
            state.setCurrent(next_v, next_e); // For post-txn processing...
   
            // Update counters
            // We want to update the counters for the entire path right here so that
            // nobody gets incomplete numbers if they recompute probabilities
            for (MarkovVertex v : state.actual_path) v.incrementInstanceHits();
            for (MarkovEdge e : state.actual_path_edges) e.incrementInstanceHits();
            if (this.enable_recomputes) {
                this.markovTimes.addInstanceTime(next_v, txn_id, state.getExecutionTimeOffset(end_time));
            }
        }
       
        // Cache the path for the MarkovGraph if the path was correct for the txn
        if (hstore_conf.site.markov_path_caching &&
            this.cached_paths.containsKey(markov) == false && state.getInitialEstimate().isValid()) {
            MarkovEstimate initialEst = s.getInitialEstimate();
            synchronized (this.cached_paths) {
                if (this.cached_paths.containsKey(markov) == false) {
                    if (debug.val)
                        LOG.debug(String.format("Storing cached path through %s[#%d] that was used by txn #%d",
                                  markov, markov.getGraphId(), txn_id));
                    this.cached_paths.put(markov, initialEst.getMarkovPath());
                }
            } // SYNCH
        } else if (trace.val && hstore_conf.site.markov_path_caching) {
            LOG.trace(String.format("Not caching path through %s[#%d] used by txn #%d [alreadyCached=%s / isValid=%s]",
                      markov, markov.getGraphId(), txn_id,
                      this.cached_paths.containsKey(markov), state.getInitialEstimate().isValid()));
        }
        if (this.profiler != null) this.profiler.finish_time.appendTime(timestamp);
        return;
    }
View Full Code Here

        }
     
        // TODO: If the current vertex is in the initial estimate's list,
        // then we can just use the truncated list as the estimate, since we know
        // that the path will be the same. We don't need to recalculate everything
        MarkovGraph markov = state.getMarkovGraph();
        assert(markov != null) :
            String.format("Unexpected null MarkovGraph for %s [hashCode=%d]\n%s",
                          TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId()),
                          state.hashCode(), state);
        boolean compute_path = true;
        if (hstore_conf.site.markov_fast_path && currentVertex.isStartVertex() == false) {
            List<MarkovVertex> initialPath = ((MarkovEstimate)state.getInitialEstimate()).getMarkovPath();
            if (initialPath.contains(currentVertex)) {
                if (debug.val)
                    LOG.debug(String.format("%s - Using fast path estimation for %s[#%d]",
                              TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId()), markov, markov.getGraphId()));
                if (this.profiler != null) timestamp = ProfileMeasurement.getTime();
                try {
                    MarkovPathEstimator.fastEstimation(est, initialPath, currentVertex);
                    compute_path = false;
                } finally {
                    if (this.profiler != null) this.profiler.fastest_time.appendTime(timestamp);
                }
            }
        }
        // We'll reuse the last MarkovPathEstimator (and it's path) if the graph has been accurate for
        // other previous transactions. This prevents us from having to recompute the path every single time,
        // especially for single-partition transactions where the clustered MarkovGraphs are accurate
        else if (hstore_conf.site.markov_path_caching) {
            List<MarkovVertex> cached = this.cached_paths.get(markov);
            if (cached == null) {
                if (debug.val)
                    LOG.debug(String.format("%s - No cached path available for %s[#%d]",
                              TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId()),
                              markov, markov.getGraphId()));
            }
            else if (markov.getAccuracyRatio() < hstore_conf.site.markov_path_caching_threshold) {
                if (debug.val)
                    LOG.debug(String.format("%s - MarkovGraph %s[#%d] accuracy is below caching threshold [%.02f < %.02f]",
                              TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId()),
                              markov, markov.getGraphId(), markov.getAccuracyRatio(),
                              hstore_conf.site.markov_path_caching_threshold));
            }
            else {
                if (debug.val)
                    LOG.debug(String.format("%s - Using cached path for %s[#%d]",
                              TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId()),
                              markov, markov.getGraphId()));
                if (this.profiler != null) timestamp = ProfileMeasurement.getTime();
                try {
                    MarkovPathEstimator.fastEstimation(est, cached, currentVertex);
                    compute_path = false;
                } finally {
                    if (this.profiler != null) this.profiler.cachedest_time.appendTime(timestamp);
                }
            }
        }
       
        // Use the MarkovPathEstimator to estimate a new path for this txn
        if (compute_path) {
            if (debug.val)
                LOG.debug(String.format("%s - Need to compute new path in %s[#%d] using %s",
                          TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId()),
                          markov, markov.getGraphId(),
                          MarkovPathEstimator.class.getSimpleName()));
            MarkovPathEstimator pathEstimator = null;
            try {
                pathEstimator = (MarkovPathEstimator)this.pathEstimatorsPool.borrowObject();
                pathEstimator.init(state.getMarkovGraph(), est, args, state.getBasePartition());
               
                pathEstimator.setForceTraversal(hstore_conf.site.markov_force_traversal);
                pathEstimator.setLearningEnabled(hstore_conf.site.markov_learning_enable);
            } catch (Throwable ex) {
                String txnName = TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId());
                String msg = "Failed to intitialize new MarkovPathEstimator for " + txnName;
                LOG.error(msg, ex);
                throw new RuntimeException(msg, ex);
            }
           
            if (this.profiler != null) timestamp = ProfileMeasurement.getTime();
            try {
                pathEstimator.traverse(est.getVertex());
            } catch (Throwable ex) {
                try {
                    GraphvizExport<MarkovVertex, MarkovEdge> gv = MarkovUtil.exportGraphviz(markov, true, markov.getPath(pathEstimator.getVisitPath()));
                    LOG.error("GRAPH #" + markov.getGraphId() + " DUMP: " + gv.writeToTempFile(catalog_proc));
                } catch (Exception ex2) {
                    throw new RuntimeException(ex2);
                }
                String msg = "Failed to estimate path for " + TransactionUtil.formatTxnName(catalog_proc, state.getTransactionId());
                LOG.error(msg, ex);
View Full Code Here

        this.past_partitions.addAll(element.getPartitions());
       
        if (trace.val) LOG.trace("Current Vertex: " + element);
        Statement cur_catalog_stmt = element.getCatalogItem();
        int cur_catalog_stmt_index = element.getQueryCounter();
        MarkovGraph markov = (MarkovGraph)this.getGraph();
       
        // At our current vertex we need to gather all of our neighbors
        // and get unique Statements that we could be executing next
        Collection<MarkovVertex> next_vertices = markov.getSuccessors(element);
        if (next_vertices == null || next_vertices.isEmpty()) {
            if (debug.val) LOG.debug("No succesors were found for " + element + ". Halting traversal");
            return;
        }
        if (trace.val) LOG.trace("Successors: " + next_vertices);
       
        // Step #1
        // Get all of the unique Statement+StatementInstanceIndex pairs for the vertices
        // that are adjacent to our current vertex
        for (MarkovVertex next : next_vertices) {
            Statement next_catalog_stmt = next.getCatalogItem();
            int next_catalog_stmt_index = next.getQueryCounter();
           
            // Sanity Check: If this vertex is the same Statement as the current vertex,
            // then its instance counter must be greater than the current vertex's counter
            if (next_catalog_stmt.equals(cur_catalog_stmt)) {
                if (next_catalog_stmt_index <= cur_catalog_stmt_index) {
                    LOG.error("CURRENT: " + element + " [commit=" + element.isCommitVertex() + "]");
                    LOG.error("NEXT: " + next + " [commit=" + next.isCommitVertex() + "]");
                }
                assert(next_catalog_stmt_index > cur_catalog_stmt_index) :
                    String.format("%s[#%d] > %s[#%d]",
                                  next_catalog_stmt.fullName(), next_catalog_stmt_index,
                                  cur_catalog_stmt.fullName(), cur_catalog_stmt_index);
            }
           
            // Check whether it's COMMIT/ABORT
            if (next.isCommitVertex() || next.isAbortVertex()) {
                MarkovEdge candidate = markov.findEdge(element, next);
                assert(candidate != null);
                this.candidate_edges.add(candidate);
            } else {
                this.next_statements.add(next.getCountedStatement());
            }
        } // FOR

        // Now for the unique set of Statement+StatementIndex pairs, figure out which partitions
        // the queries will go to.
        MarkovEdge candidate_edge;
        for (CountedStatement cstmt : this.next_statements) {
            Statement catalog_stmt = cstmt.statement;
            Integer catalog_stmt_index = cstmt.counter;
            if (debug.val) LOG.debug("Examining " + cstmt);
           
            // Get the mapping objects (if any) for next
            // This is the only way we can predict what partitions we will touch
            Map<StmtParameter, SortedSet<ParameterMapping>> stmtMappings = this.allMappings.get(catalog_stmt, catalog_stmt_index);
            if (stmtMappings == null) {
                if (debug.val) {
                    LOG.warn("No parameter mappings for " + catalog_stmt);
                    if (trace.val) LOG.trace(this.allMappings.debug(catalog_stmt));
                }
                continue;
            }
           
            // Go through the StmtParameters and map values from ProcParameters
            StmtParameter stmt_params[] = catalog_stmt.getParameters().values();
            Object stmt_args[] = new Object[stmt_params.length]; // this.getStatementParamsArray(catalog_stmt);
            boolean stmt_args_set = false;
           
            // XXX: This method may return null because it's being used for other
            // purposes in the BatchPlanner.
            int stmt_args_offsets[] = this.p_estimator.getStatementEstimationParameters(catalog_stmt);
            if (stmt_args_offsets == null) {
                stmt_args_offsets = new int[stmt_args.length];
                for (int i = 0; i < stmt_args.length; i++)
                    stmt_args_offsets[i] = i;
            }
            assert(stmt_args_offsets != null) :
                "Unexpected null StmtParameter offsets for " + catalog_stmt.fullName();
            for (int offset : stmt_args_offsets) {
                StmtParameter catalog_stmt_param = stmt_params[offset];
                assert(catalog_stmt_param != null);
                if (trace.val)
                    LOG.trace("Retrieving ParameterMappings for " + catalog_stmt_param.fullName());
               
                Collection<ParameterMapping> mappings = stmtMappings.get(catalog_stmt_param);
                if (mappings == null || mappings.isEmpty()) {
                    if (trace.val)
                        LOG.trace("No parameter mappings exists for " + catalog_stmt_param.fullName());
                    continue;
                }
                if (debug.val)
                    LOG.debug("Found " + mappings.size() + " mapping(s) for " + catalog_stmt_param.fullName());
       
                // Special Case:
                // If the number of possible Statements we could execute next is greater than one,
                // then we need to prune our list by removing those Statements who have a StmtParameter
                // that are correlated to a ProcParameter that doesn't exist (such as referencing an
                // array element that is greater than the size of that current array)
                // TODO: For now we are just going always pick the first mapping
                // that comes back. Is there any choice that we would need to make in order
                // to have a better prediction about what the transaction might do?
                if (debug.val && mappings.size() > 1) {
                    LOG.warn("Multiple parameter mappings for " + catalog_stmt_param.fullName());
                    if (trace.val) {
                        int ctr = 0;
                        for (ParameterMapping m : mappings) {
                            LOG.trace("[" + (ctr++) + "] Mapping: " + m);
                        } // FOR
                    }
                }
                for (ParameterMapping m : mappings) {
                    if (trace.val) LOG.trace("Mapping: " + m);
                    ProcParameter catalog_proc_param = m.getProcParameter();
                    if (catalog_proc_param.getIsarray()) {
                        Object proc_inner_args[] = (Object[])procParams[m.getProcParameter().getIndex()];
                        if (trace.val)
                            LOG.trace(CatalogUtil.getDisplayName(m.getProcParameter(), true) + " is an array: " +
                                      Arrays.toString(proc_inner_args));
                       
                        // TODO: If this Mapping references an array element that is not available for this
                        // current transaction, should we just skip this mapping or skip the entire query?
                        if (proc_inner_args.length <= m.getProcParameterIndex()) {
                            if (trace.val)
                                LOG.trace("Unable to map parameters: " +
                                          "proc_inner_args.length[" + proc_inner_args.length + "] <= " +
                                          "c.getProcParameterIndex[" + m.getProcParameterIndex() + "]");
                            continue;
                        }
                        stmt_args[offset] = proc_inner_args[m.getProcParameterIndex()];
                        stmt_args_set = true;
                        if (trace.val)
                            LOG.trace("Mapped " + CatalogUtil.getDisplayName(m.getProcParameter()) + "[" + m.getProcParameterIndex() + "] to " +
                                      CatalogUtil.getDisplayName(catalog_stmt_param) + " [value=" + stmt_args[offset] + "]");
                    } else {
                        stmt_args[offset] = procParams[m.getProcParameter().getIndex()];
                        stmt_args_set = true;
                        if (trace.val)
                            LOG.trace("Mapped " + CatalogUtil.getDisplayName(m.getProcParameter()) + " to " +
                                      CatalogUtil.getDisplayName(catalog_stmt_param) + " [value=" + stmt_args[offset] + "]");
                    }
                    break;
                } // FOR (Mapping)
            } // FOR (StmtParameter)
               
            // If we set any of the stmt_args in the previous step, then we can throw it
            // to our good old friend the PartitionEstimator and see whether we can figure
            // things out for this Statement
            if (stmt_args_set) {
                if (trace.val)
                    LOG.trace("Mapped StmtParameters: " + Arrays.toString(stmt_args));
                this.stmt_partitions.clear();
                try {
                    this.p_estimator.getAllPartitions(this.stmt_partitions, catalog_stmt, stmt_args, this.base_partition);
                } catch (Exception ex) {
                    String msg = "Failed to calculate partitions for " + catalog_stmt + " using parameters " + Arrays.toString(stmt_args);
                    LOG.error(msg, ex);
                    this.stop();
                    return;
                }
                if (trace.val)
                    LOG.trace("Estimated Partitions for " + catalog_stmt + ": " + this.stmt_partitions);
               
                // Now for this given list of partitions, find a Vertex in our next set
                // that has the same partitions
                if (this.stmt_partitions.isEmpty() == false) {
                    candidate_edge = null;
                    if (trace.val)
                        LOG.trace("Partitions:" + this.stmt_partitions + " / Past:" + this.past_partitions);
                    for (MarkovVertex next_v : next_vertices) {
                        if (trace.val) LOG.trace("Checking whether " + next_v + " is the correct transition");
                        if (next_v.isEqual(catalog_stmt, this.stmt_partitions, this.past_partitions, catalog_stmt_index, true)) {
                            // BINGO!!!
                            assert(candidate_edge == null);
                            try {
                                candidate_edge = markov.findEdge(element, next_v);
                            } catch (NullPointerException ex) {
                                continue;
                            }
                            assert(candidate_edge != null);
                            this.candidate_edges.add(candidate_edge);
                            if (trace.val)
                                LOG.trace("Found candidate edge to " + next_v + " [" + candidate_edge + "]");
                            break;
                        } else if (trace.val) {
                            Map<String, Object> m = new LinkedHashMap<String, Object>();
                            m.put("stmt", next_v.getCatalogItem().equals(catalog_stmt));
                            m.put("stmtCtr", next_v.getQueryCounter() == catalog_stmt_index);
                            m.put("partitions", next_v.getPartitions().equals(this.stmt_partitions));
                            m.put("past", next_v.getPastPartitions().equals(this.past_partitions));
                            LOG.trace("Invalid candidate transition:\n" + StringUtil.formatMaps(m));
                        }
                    } // FOR (Vertex
                    if (trace.val && candidate_edge == null)
                        LOG.trace(String.format("Failed to find candidate edge from %s to %s [partitions=%s]",
                                  element, catalog_stmt.fullName(), this.stmt_partitions));
                }
            }
            // Without any stmt_args, there's nothing we can do here...
            else if (trace.val) {
                LOG.trace("No stmt_args for " + catalog_stmt + ". Skipping...");
            } // IF
        } // FOR
       
        // If we don't have any candidate edges and the FORCE TRAVERSAL flag is set, then we'll just
        // grab all of the edges from our current vertex
        int num_candidates = this.candidate_edges.size();
        boolean was_forced = false;
        if (num_candidates == 0 && this.force_traversal) {
            if (debug.val)
                LOG.debug(String.format("No candidate edges were found. " +
                      "Checking whether we can create our own. [nextStatements=%s]",
                      this.next_statements));
           
            // We're allow to create the vertices that we know are missing
            if (this.learning_enabled && this.next_statements.size() == 1) {
                CountedStatement cntStmt = CollectionUtil.first(this.next_statements);
                MarkovVertex v = new MarkovVertex(cntStmt.statement,
                                                  MarkovVertex.Type.QUERY,
                                                  cntStmt.counter,
                                                  this.stmt_partitions,
                                                  this.past_partitions);
                markov.addVertex(v);
               
                // For now we'll set the new edge's probability to 1.0 to just
                // make the calculations down below work. This will get updated
                // overtime when we recompute the probabilities in the entire graph.
                candidate_edge = new MarkovEdge(markov, 1, 1.0f);
                markov.addEdge(candidate_edge, element, v, EdgeType.DIRECTED);
                this.candidate_edges.add(candidate_edge);
               
                if (this.created_vertices == null) this.created_vertices = new HashSet<MarkovVertex>();
                this.created_vertices.add(v);
                if (trace.val)
                    LOG.trace(String.format("Created new vertex %s and connected it to %s", v, element));
               
                // 2012-10-21
                // The problem with allowing the estimator to create a new vertex is that
                // we don't know what it's children are going to be. That means that when
                // we invoke this method again at the next vertex (the one we just made above)
                // then it's not going to have any children, so we don't know what it's
                // going to do. We are actually better off with just grabbing the next best
                // vertex from the existing edges and then updating the graph after
                // the txn has finished, since now we know exactly what it did.
               
            }
            // Otherwise we'll just make all of the outbound edges from the
            // current vertex be our candidates
            else {
                if (trace.val)
                    LOG.trace("No candidate edges were found. Force travesal flag is set to true, so taking all");
                Collection<MarkovEdge> out_edges = markov.getOutEdges(element);
                if (out_edges != null) this.candidate_edges.addAll(out_edges);
            }
            num_candidates = this.candidate_edges.size();
            was_forced = true;
        }
       
        // So now we have our list of candidate edges. We can pick the first one
        // since they will be sorted by their probability
        if (trace.val) LOG.trace("Candidate Edges: " + this.candidate_edges);
        if (num_candidates > 0) {
            MarkovEdge next_edge = CollectionUtil.first(this.candidate_edges);
            assert(next_edge != null) : "Unexpected null edge " + this.candidate_edges;
            MarkovVertex next_vertex = markov.getOpposite(element, next_edge);
            children.addAfter(next_vertex);
            if (was_forced) {
                if (this.forced_vertices == null) this.forced_vertices = new HashSet<MarkovVertex>();
                this.forced_vertices.add(next_vertex);
            }

            if (debug.val) {
                StringBuilder sb = new StringBuilder();
                sb.append(String.format("#%02d CANDIDATES:\n", this.getDepth()));
                int i = 0;
                for (MarkovEdge e : this.candidate_edges) {
                    MarkovVertex v = markov.getOpposite(element, e);
                    sb.append(String.format("  [%d] %s  --[%s]--> %s%s%s",
                              i++, element, e, v,
                              (next_vertex.equals(v) ? " <== SELECTED" : ""),
                              (trace.val && this.candidate_edges.size() > 1 ? "\n"+StringUtil.addSpacers(v.debug()) : "")));
                } // FOR
View Full Code Here

    protected void setUp() throws Exception {
        super.setUp(ProjectType.TPCC);
        this.addPartitions(NUM_PARTITIONS);
       
        this.catalog_proc = this.getProcedure(TARGET_PROCEDURE);
        this.markov = new MarkovGraph(this.catalog_proc).initialize();
       
        this.est = new MarkovEstimate(catalogContext);
        assertFalse(this.est.isValid());
    }
View Full Code Here

    public void testAutoLearning() throws Exception {
        Logger LOG = Logger.getRootLogger();
       
        // Use a blank MarkovGraph and check to see whether the MarkovPathEstimator
        // can automatically learn what the states and transitions
        graph = new MarkovGraph(this.catalog_proc);
        graph.initialize();
       
        pathEstimator.setLearningEnabled(true);
        pathEstimator.setForceTraversal(true);
        boolean first = true;
View Full Code Here

//        assertEquals(est.toString(), 1.0f, est.getSinglePartitionProbability());
        assertEquals(est.toString(), 1.0f, est.getConfidenceCoefficient());

        //        System.err.println(est.toString());
       
        MarkovGraph markov = state.getMarkovGraph();
        List<MarkovVertex> initial_path = est.getMarkovPath();
        assertNotNull(initial_path);
        assertFalse(initial_path.isEmpty());
       
        System.err.println("# of Vertices: " + markov.getVertexCount());
        System.err.println("# of Edges:    " + markov.getEdgeCount());
        System.err.println("Confidence:    " + String.format("%.4f", est.getConfidenceCoefficient()));
        System.err.println("\nINITIAL PATH:\n" + StringUtil.join("\n", initial_path));
//        System.err.println(multip_trace.debug(catalog_db));

        PartitionSet partitions = new PartitionSet();
View Full Code Here

        assertTrue(initialEst.toString(), initialEst.getConfidenceCoefficient() >= 0f);
        assertTrue(initialEst.toString(), initialEst.getConfidenceCoefficient() <= 1f);
        assertFalse(initialEst.toString(), initialEst.isSinglePartitioned(this.thresholds));
        assertTrue(initialEst.toString(), initialEst.isAbortable(this.thresholds));
       
        MarkovGraph markov = state.getMarkovGraph();
        List<MarkovVertex> initial_path = initialEst.getMarkovPath();
        assertNotNull(initial_path);
        assertFalse(initial_path.isEmpty());
       
        System.err.println("# of Vertices: " + markov.getVertexCount());
        System.err.println("# of Edges:    " + markov.getEdgeCount());
        System.err.println("Confidence:    " + String.format("%.4f", initialEst.getConfidenceCoefficient()));
        System.err.println("\nINITIAL PATH:\n" + StringUtil.join("\n", initial_path));

        PartitionSet partitions = new PartitionSet();
        p_estimator.getAllPartitions(partitions, txn_trace);
View Full Code Here

       
        // ESTIMATOR STATE
        sb.append("\nESTIMATOR STATE:\n");
        EstimatorState s = ts.getEstimatorState();
        if (s instanceof MarkovEstimatorState) {
            MarkovGraph markov = ((MarkovEstimatorState)s).getMarkovGraph();
            MarkovEstimate initialEst = s.getInitialEstimate();
            List<MarkovEdge> initialPath = markov.getPath(initialEst.getMarkovPath());
            List<MarkovVertex> actualPath = ((MarkovEstimatorState)s).getActualPath();
           
            sb.append(s.toString());
            try {
                GraphvizExport<MarkovVertex, MarkovEdge> gv = MarkovUtil.exportGraphviz(markov, true, initialPath);
                gv.highlightPath(markov.getPath(actualPath), "blue");
                LOG.info("PARTITION: " + ts.getBasePartition());
                LOG.info("GRAPH: " + gv.writeToTempFile(ts.getProcedure().getName()));
            } catch (Exception ex2) {
                LOG.fatal("???????????????????????", ex2);
            }
View Full Code Here

TOP

Related Classes of edu.brown.markov.MarkovGraph

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.