Spark使用parallelize方法創建RDD以及map、flatmap的區別

spark創建RDD常用的方法有兩個分別是parallelize、parallelizePairs,parallelize用來生成普通格式的RDD,parallelizePairs用來生成kv格式的RDD

package com.debug;

import java.util.Arrays;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;

public class CreateRDD1 {

	public static void main(String[] args) {
		SparkConf conf = new SparkConf();
		conf.setMaster("local");
		conf.setAppName("createRDD01");

		JavaSparkContext sc = new JavaSparkContext(conf);

		List<String> arr = Arrays.asList("a_1", "b_2", "c_3", "d_4", "e_5", "f_6");
		JavaRDD<String> rdd1 = sc.parallelize(arr);

		/*
		 * JavaRDD<String> res=rdd1.map(new Function<String, String>() {
		 * 
		 * public String call(String ch) throws Exception {
		 * 
		 * return ch+"*"; } });
		 */
		JavaRDD<String> res = rdd1.flatMap(new FlatMapFunction<String, String>() {

			public Iterable<String> call(String ch) throws Exception {

				return Arrays.asList(ch.split("_"));
			}
		});
		res.foreach(new VoidFunction<String>() {

			public void call(String content) throws Exception {
				System.out.println(content);

			}
		});

	}

}

這裏需要注意理解map和flatmap的區別,map會將每一條映射爲一個新對象,flatmap會將每一個輸入對象映射爲一個新集合,然後把這些集合拼接成一個大的集合;如果把RDD的每個元素比喻成水果,map就相當於削皮,水果的總數不會變,flatmap相當於切碎水果,總數一般變多

平時使用較多的還有kv格式的RDD,前面已經說過可以使用parallelizePairs生成kv格式的RDD,看下如下的demo

package com.debug;

import java.util.Arrays;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;

import scala.Tuple2;

public class CreateRDD2 {

	public static void main(String[] args) {
		
		SparkConf conf=new SparkConf();
		conf.setMaster("local");
		conf.setAppName("CreateRDD2");
		
		
		JavaSparkContext sc=new JavaSparkContext(conf);
		List<Tuple2<String, Double>> arr2 = Arrays.asList(
				new Tuple2<String, Double>("u1", 20.01),
				new Tuple2<String, Double>("u2", 18.95), 
				new Tuple2<String, Double>("u3", 20.55),
				new Tuple2<String, Double>("u4", 20.12), 
				new Tuple2<String, Double>("u5", 100.11)
		);
		JavaPairRDD<String, Double> rdd2 = sc.parallelizePairs(arr2);

		rdd2.foreach(new VoidFunction<Tuple2<String, Double>>() {

			public void call(Tuple2<String, Double> tup) throws Exception {
				System.out.println(tup);

			}
		});

		sc.stop();

	}

}

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章