Examples of TableBuilder


Examples of com.splout.db.hadoop.TableBuilder

   
  // ---- Partition field must be of same kind across tables ---- //
 
  @Test(expected=TablespaceBuilderException.class)
  public void testInvalidCoPartition() throws TableBuilderException, TablespaceBuilderException {
    Table table1 = new TableBuilder(SCHEMA_1).addCSVTextFile("foo1.txt").partitionBy("id1").build();
    Table table2 = new TableBuilder(SCHEMA_3).addCSVTextFile("foo2.txt").partitionBy("id3").build();

    TablespaceBuilder builder = new TablespaceBuilder();
    builder.add(table1);
    builder.add(table2);
View Full Code Here

Examples of com.splout.db.hadoop.TableBuilder

    builder.build();
  }
 
  @Test(expected=TablespaceBuilderException.class)
  public void testInvalidCoPartitionMultipleFields() throws TableBuilderException, TablespaceBuilderException {
    Table table1 = new TableBuilder(SCHEMA_1).addCSVTextFile("foo1.txt").partitionBy("value1", "id1").build();
    Table table2 = new TableBuilder(SCHEMA_3).addCSVTextFile("foo2.txt").partitionBy("value3", "id3").build();

    TablespaceBuilder builder = new TablespaceBuilder();
    builder.add(table1);
    builder.add(table2);
View Full Code Here

Examples of com.splout.db.hadoop.TableBuilder

      // define the schema of the input files: projectcode, pagename, pageviews, bytes
      Schema fileSchema = new Schema("pagecountsfile",
          Fields.parse("projectcode:string, pagename:string, pageviews:int, bytes:long"));

      // instantiate a TableBuilder
      TableBuilder tableBuilder = new TableBuilder(tableSchema);

      // for every input file...
      for(FileStatus fileStatus : fileStatuses) {
        String fileName = fileStatus.getPath().getName().toString();
        // strip the date and the hour from the file name
        String fileDate = fileName.split("-")[1];
        String fileHour = fileName.split("-")[2].substring(0, 2);
        // instantiate a custom RecordProcessor to process the records of this file
        PageCountsRecordProcessor recordProcessor = new PageCountsRecordProcessor(tableSchema, fileDate,
            fileHour);
        // use the tableBuilder method for adding each of the files to the mix
        tableBuilder.addCSVTextFile(fileStatus.getPath(), ' ', TupleTextInputFormat.NO_QUOTE_CHARACTER,
            TupleTextInputFormat.NO_ESCAPE_CHARACTER, false, false, TupleTextInputFormat.NO_NULL_STRING,
            fileSchema, recordProcessor);
      }

      // partition the dataset by pagename - which should give a fair even distribution.
      tableBuilder.partitionBy("pagename");
      // create a compound index on pagename, date so that typical queries for the dataset will be fast
      tableBuilder.createIndex("pagename", "date");

      long nonExactPageSize = memoryForIndexing / 32000; // number of pages
      int pageSize = (int) Math.pow(2, (int) Math.round(Math.log(nonExactPageSize) / Math.log(2)));
      Log.info("Pagesize = " + pageSize + " as memory for indexing was [" + memoryForIndexing
          + "] and there are 32000 pages.");

      tableBuilder.initialSQL("pragma page_size=" + pageSize);
      // insertion order is very important for optimizing query speed because it makes data be co-located in disk
      tableBuilder.insertionSortOrder(OrderBy.parse("pagename:asc, date:asc"));

      // instantiate a TablespaceBuilder
      TablespaceBuilder tablespaceBuilder = new TablespaceBuilder();

      // we will partition this dataset in as many partitions as:
      tablespaceBuilder.setNPartitions(nPartitions);
      tablespaceBuilder.add(tableBuilder.build());
      // we turn a specific SQLite pragma on for making autocomplete queries fast
      tablespaceBuilder.initStatements("pragma case_sensitive_like=true;");

      HadoopUtils.deleteIfExists(outFs, outPath);
View Full Code Here

Examples of generator.symbol.TableBuilder

  public static void main( String args[] ){
   
    LexiconAnalyzer lexicon = new LexiconAnalyzer( "codigo.tny" );
    SyntacticAnalyzer syntactic = new SyntacticAnalyzer( lexicon.analyze() );
    SyntaxTree tree = syntactic.analyze();
    TableBuilder builder = new TableBuilder( tree );
    CodeGenerator generator = new CodeGenerator( tree, builder.build() );
   
    generator.generate( "codigo.tm" );
  }
View Full Code Here

Examples of org.apache.isis.viewer.scimpi.dispatcher.view.display.TableBuilder

        addElementProcessor(new ShowDebug());
        addElementProcessor(new SimpleButton());
        addElementProcessor(new Specification());
        addElementProcessor(new TableCell());
        addElementProcessor(new TableView());
        addElementProcessor(new TableBuilder());
        addElementProcessor(new TableEmpty());
        addElementProcessor(new TableRow());
        addElementProcessor(new TableHeader());
        addElementProcessor(new TemplateTag());
        addElementProcessor(new Title());
View Full Code Here

Examples of org.apache.isis.viewer.scimpi.dispatcher.view.display.TableBuilder

        addElementProcessor(new ShowDebug());
        addElementProcessor(new SimpleButton());
        addElementProcessor(new Specification());
        addElementProcessor(new TableCell());
        addElementProcessor(new TableView());
        addElementProcessor(new TableBuilder());
        addElementProcessor(new TableEmpty());
        addElementProcessor(new TableRow());
        addElementProcessor(new TableHeader());
        addElementProcessor(new TemplateTag());
        addElementProcessor(new Title());
View Full Code Here

Examples of org.apache.isis.viewer.scimpi.dispatcher.view.display.TableBuilder

        addElementProcessor(new ShortFormView());
        addElementProcessor(new ShowDebug());
        addElementProcessor(new Specification());
        addElementProcessor(new TableCell());
        addElementProcessor(new TableView());
        addElementProcessor(new TableBuilder());
        addElementProcessor(new TableEmpty());
        addElementProcessor(new TableRow());
        addElementProcessor(new TableHeader());
        addElementProcessor(new TemplateTag());
        addElementProcessor(new Title());
View Full Code Here

Examples of org.grouplens.lenskit.util.table.TableBuilder

    public Table perform() throws TaskExecutionException, InterruptedException {
        try {
            experiments = createExperimentSuite();
            measurements = createMeasurementSuite();
            layout = ExperimentOutputLayout.create(experiments, measurements);
            TableBuilder resultsBuilder = new TableBuilder(layout.getResultsLayout());

            logger.info("Starting evaluation of {} algorithms ({} from LensKit) on {} data sets",
                        Iterables.size(experiments.getAllAlgorithms()),
                        experiments.getAlgorithms().size(),
                        experiments.getDataSets().size());
            Closer closer = Closer.create();

            try {
                outputs = openExperimentOutputs(layout, measurements, resultsBuilder, closer);
                DAGNode<JobGraph.Node,JobGraph.Edge> jobGraph;
                try {
                    jobGraph = makeJobGraph(experiments);
                } catch (RecommenderConfigurationException ex) {
                    throw new TaskExecutionException("Recommender configuration error", ex);
                }
                if (taskGraphFile != null) {
                    logger.info("writing task graph to {}", taskGraphFile);
                    JobGraph.writeGraphDescription(jobGraph, taskGraphFile);
                }
                registerTaskListener(jobGraph);

                // tell all metrics to get started
                runEvaluations(jobGraph);
            } catch (Throwable th) {
                throw closer.rethrow(th, TaskExecutionException.class, InterruptedException.class);
            } finally {
                closer.close();
            }

            logger.info("evaluation {} completed", getName());

            return resultsBuilder.build();
        } catch (IOException e) {
            throw new TaskExecutionException("I/O error", e);
        } finally {
            experiments = null;
            measurements = null;
View Full Code Here

Examples of org.iq80.leveldb.table.TableBuilder

        try {
            InternalKey smallest = null;
            InternalKey largest = null;
            FileChannel channel = new FileOutputStream(file).getChannel();
            try {
                TableBuilder tableBuilder = new TableBuilder(options, channel, new InternalUserComparator(internalKeyComparator));

               
                for (Entry<InternalKey, Slice> entry : data) {
                    // update keys
                    InternalKey key = entry.getKey();
                    if (smallest == null) {
                        smallest = key;
                    }
                    largest = key;

                    tableBuilder.add(key.encode(), entry.getValue());
                }

                tableBuilder.finish();
            } finally {
                try {
                    channel.force(true);
                } finally {
                    channel.close();
View Full Code Here

Examples of org.iq80.leveldb.table.TableBuilder

            compactionState.currentSmallest = null;
            compactionState.currentLargest = null;

            File file = new File(databaseDir, Filename.tableFileName(fileNumber));
            compactionState.outfile = new FileOutputStream(file).getChannel();
            compactionState.builder = new TableBuilder(options, compactionState.outfile, new InternalUserComparator(internalKeyComparator));
        }
        finally {
            mutex.unlock();
        }
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.