Search Java Code Snippets


  Help us in improving the repository. Add new snippets through 'Submit Code Snippet ' link.





#Java - Code Snippets for '#Spark.Spark' - 5 code snippet(s) found

 Sample 1. Usage of org.apache.spark.SparkConf

SparkConf sparkConf = new SparkConf();
for (Map.Entry<String, String> e : conf) {
if (e.getKey().startsWith("spark.")) {
sparkConf.set(e.getKey(), e.getValue());
}
}
this.sparkContext = new JavaSparkContext(sparkConnect, getName(), sparkConf);

   Like      Feedback     


 Sample 2. Code Sample / Example / Snippet of org.apache.spark.SparkEnv

  public UnsafeExternalRowSorter(

StructType schema,

Ordering<InternalRow> ordering,

PrefixComparator prefixComparator,

PrefixComputer prefixComputer,

long pageSizeBytes) throws IOException {

this.schema = schema;

this.prefixComputer = prefixComputer;

final SparkEnv sparkEnv = SparkEnv.get();

final TaskContext taskContext = TaskContext.get();

sorter = UnsafeExternalSorter.create(

taskContext.taskMemoryManager(),

sparkEnv.blockManager(),

taskContext,

new RowComparator(ordering, schema.length()),

prefixComparator,

pageSizeBytes

);

}


   Like      Feedback      org.apache.spark.SparkEnv


 Sample 3. Code Sample / Example / Snippet of org.apache.spark.SparkConf

  public static void main(String[] args) throws Exception {

SparkConf sparkConf = new SparkConf().setAppName(APP_NAME);

final JavaSparkContext sc = new JavaSparkContext(sparkConf);



JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5).map(

new IdentityWithDelay<Integer>());

JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();

while (!jobFuture.isDone()) {

Thread.sleep(1000); // 1 second

List<Integer> jobIds = jobFuture.jobIds();

if (jobIds.isEmpty()) {

continue;

}

int currentJobId = jobIds.get(jobIds.size() - 1);

SparkJobInfo jobInfo = sc.statusTracker().getJobInfo(currentJobId);

SparkStageInfo stageInfo = sc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);

System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() +

" active, " + stageInfo.numCompletedTasks() + " complete");

}



System.out.println("Job results are: " + jobFuture.get());

sc.stop();

}


   Like      Feedback      org.apache.spark.SparkConf


 Sample 4. Code Sample / Example / Snippet of org.apache.spark.SparkJobInfo

  public static void main(String[] args) throws Exception {

SparkConf sparkConf = new SparkConf().setAppName(APP_NAME);

final JavaSparkContext sc = new JavaSparkContext(sparkConf);



JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5).map(

new IdentityWithDelay<Integer>());

JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();

while (!jobFuture.isDone()) {

Thread.sleep(1000); // 1 second

List<Integer> jobIds = jobFuture.jobIds();

if (jobIds.isEmpty()) {

continue;

}

int currentJobId = jobIds.get(jobIds.size() - 1);

SparkJobInfo jobInfo = sc.statusTracker().getJobInfo(currentJobId);

SparkStageInfo stageInfo = sc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);

System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() +

" active, " + stageInfo.numCompletedTasks() + " complete");

}



System.out.println("Job results are: " + jobFuture.get());

sc.stop();

}


   Like      Feedback      org.apache.spark.SparkJobInfo


Subscribe to Java News and Posts. Get latest updates and posts on Java from Buggybread.com
Enter your email address:
Delivered by FeedBurner
 Sample 5. Code Sample / Example / Snippet of org.apache.spark.SparkStageInfo

  public static void main(String[] args) throws Exception {

SparkConf sparkConf = new SparkConf().setAppName(APP_NAME);

final JavaSparkContext sc = new JavaSparkContext(sparkConf);



JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 5).map(

new IdentityWithDelay<Integer>());

JavaFutureAction<List<Integer>> jobFuture = rdd.collectAsync();

while (!jobFuture.isDone()) {

Thread.sleep(1000); // 1 second

List<Integer> jobIds = jobFuture.jobIds();

if (jobIds.isEmpty()) {

continue;

}

int currentJobId = jobIds.get(jobIds.size() - 1);

SparkJobInfo jobInfo = sc.statusTracker().getJobInfo(currentJobId);

SparkStageInfo stageInfo = sc.statusTracker().getStageInfo(jobInfo.stageIds()[0]);

System.out.println(stageInfo.numTasks() + " tasks total: " + stageInfo.numActiveTasks() +

" active, " + stageInfo.numCompletedTasks() + " complete");

}



System.out.println("Job results are: " + jobFuture.get());

sc.stop();

}


   Like      Feedback      org.apache.spark.SparkStageInfo



Subscribe to Java News and Posts. Get latest updates and posts on Java from Buggybread.com
Enter your email address:
Delivered by FeedBurner