Scenario 1) Enable file client debug for Spark Java Program
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.JavaRDD;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Level;
import org.apache.spark.SparkConf;
public class Word {
public static void main (String args[]){
SparkConf conf = new SparkConf().setAppName("Eclipse").setMaster("yarn");
JavaSparkContext jsc = new JavaSparkContext(conf);
List<Integer> data = Arrays.asList(1, 2, 3, 4, 5);
JavaRDD<Integer> distData = jsc.parallelize(data);
//Test 1 - Set debug for all executors.
jsc.setLogLevel("DEBUG");
//Test 2 - Set file client debug
jsc.hadoopConfiguration().set("fs.mapr.trace", "debug");
JavaRDD<String> file = jsc.textFile("/avro/install.log");
long wordCount = file.count();
System.out.println("Number of words in file : "+wordCount);
}
}
Scenario 2) Enable file client debug for Spark Scala Program
spark-shell >
import org.apache.spark.{SparkConf, SparkContext}
val df = sc.textFile("/avro/install.log")
sc.hadoopConfiguration.set("fs.mapr.trace","DEBUG")
df.count()
No comments:
Post a Comment