Examples of JavaSparkContext


Examples of org.apache.spark.api.java.JavaSparkContext

  }

  // Play area

  public static void main(String[] args) {
    final JavaSparkContext sc = new JavaSparkContext("local[1]", "optiq");
    final JavaRDD<String> file = sc.textFile("/usr/share/dict/words");
    System.out.println(
        file.map(
            new Function<String, Object>() {
              @Override
              public Object call(String s) throws Exception {
                return s.substring(0, Math.min(s.length(), 1));
              }
            }).distinct().count());
    file.cache();
    String s =
        file.groupBy(
            new Function<String, String>() {
              @Override
              public String call(String s) throws Exception {
                return s.substring(0, Math.min(s.length(), 1));
              }
            }
            //CHECKSTYLE: IGNORE 1
        ).map(
            new Function<Tuple2<String, List<String>>, Object>() {
              @Override
              public Object call(Tuple2<String, List<String>> pair) {
                return pair._1() + ":" + pair._2().size();
              }
            }).collect().toString();
    System.out.print(s);

    final JavaRDD<Integer> rdd = sc.parallelize(
        new AbstractList<Integer>() {
          final Random random = new Random();
          @Override
          public Integer get(int index) {
            System.out.println("get(" + index + ")");
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

        toMaterialize.put(c, outputTargetsToMaterialize.get(c));
        outputTargetsToMaterialize.remove(c);
      }
    }
    if (sparkContext == null) {
      this.sparkContext = new JavaSparkContext(sparkConnect, getName());
    }
    SparkRuntime runtime = new SparkRuntime(this, sparkContext, getConfiguration(), outputTargets, toMaterialize,
        cachedCollections);
    runtime.execute();
    outputTargets.clear();
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

    }

    @Test
    public void testBasicRead() {
        String input = TestUtils.sampleArtistsDat();
        sc = new JavaSparkContext(cfg);
        JavaRDD<String> data = sc.textFile(input).cache();

        assertThat((int) data.count(), is(greaterThan(300)));

        long radioHead = data.filter(new Function<String, Boolean>() {
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

    @Test
    public void testHadoopOldApiRead() throws Exception {
        cfg.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        //clone.set("spark.kryo.registrator", MyRegistrator.class.getName());

        sc = new JavaSparkContext(cfg);

        String target = "spark-test/hadoop-basic";

        RestUtils.touch("spark-test");
        RestUtils.putData(target, "{\"message\" : \"Hello World\",\"message_date\" : \"2014-05-25\"}".getBytes());
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

  private static transient JavaSparkContext sc = null;
  private static transient JavaSQLContext sqc = null;

  @BeforeClass
  public static void setup() {
    sc = new JavaSparkContext(conf);
    sqc = new JavaSQLContext(sc);
  }
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

    private static final transient SparkConf conf = new SparkConf().setAll(propertiesAsScalaMap(TestSettings.TESTING_PROPS)).setMaster("local").setAppName("estest");
    private static transient JavaSparkContext sc = null;

    @BeforeClass
    public static void setup() {
        sc = new JavaSparkContext(conf);
    }
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

  }

  // Play area

  public static void main(String[] args) {
    final JavaSparkContext sc = new JavaSparkContext("local[1]", "optiq");
    final JavaRDD<String> file = sc.textFile("/usr/share/dict/words");
    System.out.println(
        file.map(
            new Function<String, Object>() {
              @Override
              public Object call(String s) throws Exception {
                return s.substring(0, Math.min(s.length(), 1));
              }
            }).distinct().count());
    file.cache();
    String s =
        file.groupBy(
            new Function<String, String>() {
              @Override
              public String call(String s) throws Exception {
                return s.substring(0, Math.min(s.length(), 1));
              }
            }
            //CHECKSTYLE: IGNORE 1
        ).map(
            new Function<Tuple2<String, List<String>>, Object>() {
              @Override
              public Object call(Tuple2<String, List<String>> pair) {
                return pair._1() + ":" + pair._2().size();
              }
            }).collect().toString();
    System.out.print(s);

    final JavaRDD<Integer> rdd = sc.parallelize(
        new AbstractList<Integer>() {
          final Random random = new Random();
          @Override
          public Integer get(int index) {
            System.out.println("get(" + index + ")");
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

        toMaterialize.put(c, outputTargetsToMaterialize.get(c));
        outputTargetsToMaterialize.remove(c);
      }
    }
    if (sparkContext == null) {
      this.sparkContext = new JavaSparkContext(sparkConnect, getName());
      if (jarClass != null) {
        String[] jars = JavaSparkContext.jarOfClass(jarClass);
        if (jars != null && jars.length > 0) {
          for (String jar : jars) {
            sparkContext.addJar(jar);
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

      for (Map.Entry<String, String> e : conf) {
        if (e.getKey().startsWith("spark.")) {
          sparkConf.set(e.getKey(), e.getValue());
        }
      }
      this.sparkContext = new JavaSparkContext(sparkConnect, getName(), sparkConf);
      if (jarClass != null) {
        String[] jars = JavaSparkContext.jarOfClass(jarClass);
        if (jars != null && jars.length > 0) {
          for (String jar : jars) {
            sparkContext.addJar(jar);
View Full Code Here

Examples of org.apache.spark.api.java.JavaSparkContext

    if (args.length < 2) {
      System.err.println("Usage: JavaWordCount <master> <file>");
      System.exit(1);
    }

    JavaSparkContext ctx = new JavaSparkContext(args[0], "JavaWordCount",
        System.getenv("SPARK_HOME"), JavaSparkContext.jarOfClass(JavaWordCount.class));
    JavaRDD<String> lines = ctx.textFile(args[1], 1);

    JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
      @Override
      public Iterable<String> call(String s) {
        return Arrays.asList(SPACE.split(s));
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.