Package co.cask.cdap.data2.dataset2.lib.table.ordered

Examples of co.cask.cdap.data2.dataset2.lib.table.ordered.Update


      @Override
      protected void configure() {
        // NOTE: order is important due to dependencies between modules
        Map<String, DatasetModule> defaultModules = Maps.newLinkedHashMap();
        defaultModules.put("orderedTable-hbase", new HBaseOrderedTableModule());
        defaultModules.put("metricsTable-hbase", new HBaseMetricsTableModule());
        defaultModules.put("core", new CoreDatasetsModule());
        defaultModules.put("aclTable", new ACLTableModule());

        bind(new TypeLiteral<Map<String, ? extends DatasetModule>>() { })
          .annotatedWith(Names.named("defaultDatasetModules")).toInstance(defaultModules);
View Full Code Here


    return new PrivateModule() {
      @Override
      protected void configure() {
        // NOTE: order is important due to dependencies between modules
        Map<String, DatasetModule> defaultModules = Maps.newLinkedHashMap();
        defaultModules.put("orderedTable-hbase", new HBaseOrderedTableModule());
        defaultModules.put("metricsTable-hbase", new HBaseMetricsTableModule());
        defaultModules.put("core", new CoreDatasetsModule());
        defaultModules.put("aclTable", new ACLTableModule());

        bind(new TypeLiteral<Map<String, ? extends DatasetModule>>() { })
View Full Code Here

    DatasetDefinitionRegistryFactory registryFactory = injector.getInstance(DatasetDefinitionRegistryFactory.class);
    DatasetFramework datasetFramework =
      new NamespacedDatasetFramework(new InMemoryDatasetFramework(registryFactory),
                                     new DefaultDatasetNamespace(cConf, Namespace.SYSTEM));
    datasetFramework.addModule("orderedTable", new HBaseOrderedTableModule());
    datasetFramework.addModule("core", new CoreDatasetsModule());

    return datasetFramework;
  }
View Full Code Here

      @Override
      protected void configure() {
        // NOTE: order is important due to dependencies between modules
        Map<String, DatasetModule> defaultModules = Maps.newLinkedHashMap();
        defaultModules.put("orderedTable-memory", new InMemoryOrderedTableModule());
        defaultModules.put("metricsTable-memory", new InMemoryMetricsTableModule());
        defaultModules.put("core", new CoreDatasetsModule());
        defaultModules.put("aclTable", new ACLTableModule());

        bind(new TypeLiteral<Map<String, ? extends DatasetModule>>() { })
          .annotatedWith(Names.named("defaultDatasetModules")).toInstance(defaultModules);
View Full Code Here

    return new PrivateModule() {
      @Override
      protected void configure() {
        // NOTE: order is important due to dependencies between modules
        Map<String, DatasetModule> defaultModules = Maps.newLinkedHashMap();
        defaultModules.put("orderedTable-memory", new InMemoryOrderedTableModule());
        defaultModules.put("metricsTable-memory", new InMemoryMetricsTableModule());
        defaultModules.put("core", new CoreDatasetsModule());
        defaultModules.put("aclTable", new ACLTableModule());

        bind(new TypeLiteral<Map<String, ? extends DatasetModule>>() { })
View Full Code Here

      @Override
      protected void configure() {
        // NOTE: order is important due to dependencies between modules
        Map<String, DatasetModule> defaultModules = Maps.newLinkedHashMap();
        defaultModules.put("orderedTable-leveldb", new LevelDBOrderedTableModule());
        defaultModules.put("metricsTable-leveldb", new LevelDBMetricsTableModule());
        defaultModules.put("core", new CoreDatasetsModule());
        defaultModules.put("aclTable", new ACLTableModule());

        bind(new TypeLiteral<Map<String, ? extends DatasetModule>>() { })
          .annotatedWith(Names.named("defaultDatasetModules")).toInstance(defaultModules);
View Full Code Here

    return new PrivateModule() {
      @Override
      protected void configure() {
        // NOTE: order is important due to dependencies between modules
        Map<String, DatasetModule> defaultModules = Maps.newLinkedHashMap();
        defaultModules.put("orderedTable-leveldb", new LevelDBOrderedTableModule());
        defaultModules.put("metricsTable-leveldb", new LevelDBMetricsTableModule());
        defaultModules.put("core", new CoreDatasetsModule());
        defaultModules.put("aclTable", new ACLTableModule());

        bind(new TypeLiteral<Map<String, ? extends DatasetModule>>() { })
View Full Code Here

    // (for example, if Hadoop uses a different Java version than CDAP).

    Set<String> bootstrapClassPaths = ExploreServiceUtils.getBoostrapClasses();

    Set<File> hBaseTableDeps = ExploreServiceUtils.traceDependencies(
      new HBaseTableUtilFactory().get().getClass().getCanonicalName(),
      bootstrapClassPaths, null);

    // Note the order of dependency jars is important so that HBase jars come first in the classpath order
    // LinkedHashSet maintains insertion order while removing duplicate entries.
    Set<File> orderedDependencies = new LinkedHashSet<File>();
View Full Code Here

    if (classLoader == null) {
      usingCL = ExploreRuntimeModule.class.getClassLoader();
    }
    Set<String> bootstrapClassPaths = getBoostrapClasses();

    Set<File> hBaseTableDeps = traceDependencies(new HBaseTableUtilFactory().get().getClass().getCanonicalName(),
                                                 bootstrapClassPaths, usingCL);

    // Note the order of dependency jars is important so that HBase jars come first in the classpath order
    // LinkedHashSet maintains insertion order while removing duplicate entries.
    Set<File> orderedDependencies = new LinkedHashSet<File>();
View Full Code Here

      // We have to add this Hadoop Configuration to the dependency jar so that when the Spark job runs outside
      // CDAP it can create the BasicMapReduceContext to have access to our datasets, transactions etc.
      resources.add(hConfLocation);

      try {
        Class<?> hbaseTableUtilClass = new HBaseTableUtilFactory().get().getClass();
        classes.add(hbaseTableUtilClass);
      } catch (ProvisionException e) {
        LOG.warn("Not including HBaseTableUtil classes in submitted Job Jar since they are not available");
      }
View Full Code Here

TOP

Related Classes of co.cask.cdap.data2.dataset2.lib.table.ordered.Update

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.