|
| 1 | +package com.sparkTutorial.pairRdd.aggregation.combinebykey; |
| 2 | + |
| 3 | +import com.sparkTutorial.pairRdd.aggregation.reducebykey.housePrice.AvgCount; |
| 4 | +import org.apache.log4j.Level; |
| 5 | +import org.apache.log4j.Logger; |
| 6 | +import org.apache.spark.SparkConf; |
| 7 | +import org.apache.spark.api.java.JavaPairRDD; |
| 8 | +import org.apache.spark.api.java.JavaRDD; |
| 9 | +import org.apache.spark.api.java.JavaSparkContext; |
| 10 | +import org.apache.spark.api.java.function.Function; |
| 11 | +import org.apache.spark.api.java.function.Function2; |
| 12 | +import org.apache.spark.api.java.function.PairFunction; |
| 13 | +import scala.Tuple2; |
| 14 | + |
| 15 | +import java.util.Map; |
| 16 | + |
| 17 | +public class AverageHousePriceSolution { |
| 18 | + |
| 19 | + public static void main(String[] args) throws Exception { |
| 20 | + |
| 21 | + Logger.getLogger("org").setLevel(Level.ERROR); |
| 22 | + SparkConf conf = new SparkConf().setAppName("wordCounts").setMaster("local[3]"); |
| 23 | + JavaSparkContext sc = new JavaSparkContext(conf); |
| 24 | + |
| 25 | + JavaRDD<String> lines = sc.textFile("in/RealEstate.csv"); |
| 26 | + |
| 27 | + JavaRDD<String> cleanedLines = lines.filter(line -> !line.contains("Bedrooms")); |
| 28 | + |
| 29 | + JavaPairRDD<String, Double> housePricePairRdd = cleanedLines.mapToPair( |
| 30 | + (PairFunction<String, String, Double>) line -> |
| 31 | + new Tuple2<>(line.split(",")[3], |
| 32 | + Double.parseDouble(line.split(",")[2]))); |
| 33 | + |
| 34 | + JavaPairRDD<String, AvgCount> housePriceTotal= housePricePairRdd.combineByKey(createCombiner, mergeValue, mergeCombiners); |
| 35 | + |
| 36 | + JavaPairRDD<String, Double> housePriceAvg = housePriceTotal.mapToPair( |
| 37 | + (PairFunction<Tuple2<String, AvgCount>, String, Double>) total -> |
| 38 | + new Tuple2<>(total._1(), total._2().getTotal()/total._2().getCount())); |
| 39 | + |
| 40 | + for (Map.Entry<String, Double> housePriceAvgPair : housePriceAvg.collectAsMap().entrySet()) { |
| 41 | + System.out.println(housePriceAvgPair.getKey() + " : " + housePriceAvgPair.getValue()); |
| 42 | + |
| 43 | + } |
| 44 | + } |
| 45 | + |
| 46 | + static Function<Double, AvgCount> createCombiner = (Function<Double, AvgCount>) x -> new AvgCount(1, x); |
| 47 | + |
| 48 | + static Function2<AvgCount, Double, AvgCount> mergeValue |
| 49 | + = (Function2<AvgCount, Double, AvgCount>) (avgCount, x) -> new AvgCount(avgCount.getCount() + 1, |
| 50 | + avgCount.getTotal() + x); |
| 51 | + static Function2<AvgCount, AvgCount, AvgCount> mergeCombiners |
| 52 | + = (Function2<AvgCount, AvgCount, AvgCount>) (avgCountA, avgCountB) -> new AvgCount(avgCountA.getCount() + avgCountB.getCount(), |
| 53 | + avgCountA.getTotal() + avgCountB.getTotal()); |
| 54 | + |
| 55 | +} |
0 commit comments