Why does this class, built with Java 8, run ~5x faster on Java 11 than Java 8?
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.HashMap;
public class JMapSpeed {
public static void main(String[] args) {
HashMap<Integer, Integer> map = new HashMap<>(10_000_000);
LocalDateTime start=LocalDateTime.now();
for (Integer n=0; n< 10_000_000; n++) {
map.put(n,n);
}
LocalDateTime stop= LocalDateTime.now();
System.out.println("Duration: " + Duration.between(start, stop).toMillis());
System.out.println("Iterations: " + map.size());
}
}
Related
I am trying to use wholeTextFiles API for file processing. I do have lot of .gz files in a folder and want to read them with the wholeTextFiles API.
I have 4 executors with each 1 core with 2GB RAM on each executor.
Only 2 executors are processing the job and the processing is really slow. The other two executors are sitting idle.
How do i spread the job to other 2 executors to increase the parallelism.?
package com.sss.ss.ss.WholeText;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;
public class WholeText {
public static class mySchema implements Serializable {
private String CFIELD1 ;
private String CFIELD2 ;
public String getCFIELD1()
{
return CFIELD1;
}
public void setCFIELD1(String cFIELD1)
{
CFIELD1 = cFIELD1;
}
public String getCFIELD2()
{
return CFIELD2;
}
public void setCFIELD2(String cFIELD2)
{
CFIELD2 = cFIELD2;
}
}
public static void main(String[] args) throws InterruptedException {
SparkConf sparkConf = new SparkConf().setAppName("My app")
.setMaster("mymaster..")
.set("spark.driver.allowMultipleContexts", "true");
JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, Durations.seconds(15));
JavaPairRDD<String, String> wholeTextFiles = jssc.sparkContext().wholeTextFiles(args[0],Integer.parseInt(args[3]));
Integer ll = wholeTextFiles.getNumPartitions();
System.out.println("Number of Partitions"+ll);
JavaRDD<String> stringRDD = wholeTextFiles.
map(
new Function<Tuple2<String, String>, String>() {
private static final long serialVersionUID = -551872585218963131L;
public String call(Tuple2<String, String> v1) throws Exception
{
return v1._2;
}
}
).
flatMap
(new FlatMapFunction<String, String>()
{
public Iterator<String> call(String t) throws Exception
{
return Arrays.asList(t.split("\\r?\\n")).iterator();
}
}).
filter(new Function<String, Boolean>() {
private static final long serialVersionUID = 1L;
public Boolean call(String t) throws Exception {
int colons = 0;
String s = t;
if(s == null || s.trim().length() < 1) {
return false;
}
for(int i = 0; i < s.length(); i++) {
if(s.charAt(i) == ';') colons++;
}
System.out.println("colons="+colons);
if ((colons <=3)){
return false;
}
return true;
}
});
JavaRDD<mySchema> schemaRDD = stringRDD.map(new Function<String, mySchema>()
{
private static final long serialVersionUID = 1L;
public mySchema call(String line) throws Exception
{
String[] parts = line.split(";",-1);
mySchema mySchema = new mySchema();
mySchema.setCFIELD1 (parts[0]);
mySchema.setCFIELD2 (parts[1]);
return mySchema;
}
});
SQLContext hc = new HiveContext(jssc.sparkContext());
Dataset<Row> df = hc.createDataFrame(schemaRDD, mySchema.class);
df.createOrReplaceTempView("myView");
hc.sql("INSERT INTO -----
"from myView");
hc.sql("INSERT INTO .......
"from myView");
}
}
I'm looking to rewrite my whole .txt file of numbers e.g. 302340372048725280 to 3 0 2 3 4 0 3 7 2 0 4 8 7 2 5 2 8 0. How may I do this?
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Scanner;
public class IntSeparator {
public static void main(String[] args) throws IOException {
Scanner scanner;
try {
scanner = new Scanner(new File("src/extremePuzzles.txt"));
PrintWriter spacer = new PrintWriter(new FileWriter("src/extremePuzzles.txt", true));
while (scanner.hasNext()) {
String puzzle = scanner.next();
System.out.println(puzzle);
String splitted = puzzle.replace("", " ").trim();
System.out.println(splitted);
spacer.print(splitted);
}
} catch (FileNotFoundException e) {
System.out.println("file not found!");
}
}
}
simple string processing may work as below
public class Str{
public static void main(String args[]){
String str = "302340372048725280";
String temp = "";
int i=0;
int len = str.length();
while(len>0){
temp+=str.charAt(i);
i++;
len--;
temp+=" ";
}
System.out.println(temp);
}
}
You can use this code as per your need!
//what Packages needed for these 3 programs???
how to combine this 3 program into one program??
how to do mapreduce using this 3 program in eclipse??
please help me to run this program successfully
os : linux
Exception Faced :
The method TryParseInt(String) is undefined for the type
MaxPYear.MaxPubYearReducer
2.The method setInputFormatClass (Class) in
the type Job is not applicable for the arguments
(Class)
Mapper code :
public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
{
public void map(LongWritable key, Text value , Context context)
throws IOException, InterruptedException
{
String delim = "\t";
Text valtosend = new Text();
String tokens[] = value.toString().split(delim);
if (tokens.length == 2)
{
valtosend.set(tokens[0] + ";"+ tokens[1]);
context.write(new IntWritable(1), valtosend);
}
}
}
Reducer Code :
public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
{
public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
{
int maxiValue = Integer.MIN_VALUE;
String maxiYear = "";
for(Text value:values) {
String token[] = value.toString().split(";");
if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
{
maxiValue = TryParseInt(token[1]);
maxiYear = token[0];
}
}
context.write(new Text(maxiYear), new IntWritable(maxiValue));
}
}
Driver Code :
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf , "Frequency`enter code here`");
job.setJarByClass(MaxPubYear.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setMapperClass(FrequencyMapper.class);
job.setCombinerClass(FrequencyReducer.class);
job.setReducerClass(FrequencyReducer.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setInputFormatClass(TextInputFormat.class);
FileInputFormat.addInputPath(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
int exitCode = job.waitForCompletion(true)?0:1;
if (exitCode == 0 )
{
Job SecondJob = new Job(conf, "Maximum Publication year");
SecondJob.setJarByClass(MaxPubYear.class);
SecondJob.setOutputKeyClass(Text.class);
SecondJob.setOutputValueClass(IntWritable.class);
SecondJob.setMapOutputKeyClass(IntWritable.class);
SecondJob.setMapOutputValueClass(Text.class);
SecondJob.setMapperClass(MaxPubYearMapper.class);
SecondJob.setReducerClass(MaxPubYearReducer.class);
FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
System.exit(SecondJob.waitForCompletion(true)?0:1);
}
}
just Write them together in one class
required packages are:
package org.myorg;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import java.io.DataInput;
import java.io.DataOutput;
there might be some extras here since I copied them from my code.
package org.myorg;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
public class <your classname as well as filename> {
public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
{
public void map(LongWritable key, Text value , Context context)
throws IOException, InterruptedException
{
String delim = "\t";
Text valtosend = new Text();
String tokens[] = value.toString().split(delim);
if (tokens.length == 2)
{
valtosend.set(tokens[0] + ";"+ tokens[1]);
context.write(new IntWritable(1), valtosend);
}
}
}
public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
{
public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
{
int maxiValue = Integer.MIN_VALUE;
String maxiYear = "";
for(Text value:values) {
String token[] = value.toString().split(";");
if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
{
maxiValue = TryParseInt(token[1]);
maxiYear = token[0];
}
}
context.write(new Text(maxiYear), new IntWritable(maxiValue));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf , "Frequency`enter code here`");
job.setJarByClass(MaxPubYear.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setMapperClass(FrequencyMapper.class);
job.setCombinerClass(FrequencyReducer.class);
job.setReducerClass(FrequencyReducer.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setInputFormatClass(TextInputFormat.class);
FileInputFormat.addInputPath(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
int exitCode = job.waitForCompletion(true)?0:1;
if (exitCode == 0 )
{
Job SecondJob = new Job(conf, "Maximum Publication year");
SecondJob.setJarByClass(MaxPubYear.class);
SecondJob.setOutputKeyClass(Text.class);
SecondJob.setOutputValueClass(IntWritable.class);
SecondJob.setMapOutputKeyClass(IntWritable.class);
SecondJob.setMapOutputValueClass(Text.class);
SecondJob.setMapperClass(MaxPubYearMapper.class);
SecondJob.setReducerClass(MaxPubYearReducer.class);
FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
System.exit(SecondJob.waitForCompletion(true)?0:1);
}
}
}
Is there any implementation of Date Picker and Time Picker into the default JavaFX 8 package that I can use without using third party solutions?
DatePicker
Yes, Java 8 has a DatePicker:
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.control.DatePicker;
import javafx.stage.Stage;
import java.time.LocalDate;
public class PickerDemo extends Application {
#Override public void start(Stage stage) {
final DatePicker datePicker = new DatePicker(LocalDate.now());
datePicker.setOnAction(event -> {
LocalDate date = datePicker.getValue();
System.out.println("Selected date: " + date);
});
stage.setScene(
new Scene(datePicker)
);
stage.show();
}
public static void main(String[] args) { launch(args); }
}
TimePicker
No, Java 8 does not have a TimePicker.
There is a TimePicker in jfxtras (source here).
Given that Java 8 already has a DatePicker, the addition of a TimePicker might be an appropriate feature request, you could make.
If you do not want to display calendar in popup. Here is a solution which uses internal CalendarPickerContent class.
DatePickerSkin skin = new DatePickerSkin(new DatePicker());
Node calendarControl = skin.getPopupContent();
here is my try based on #javaLearner answer:
DateTimePicker.java:
package test;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.scene.control.DatePicker;
import javafx.scene.control.Skin;
import javafx.util.StringConverter;
public class DateTimePicker extends DatePicker{
private ObjectProperty<LocalTime> timeValue = new SimpleObjectProperty<>();
private ObjectProperty<ZonedDateTime> dateTimeValue;
public DateTimePicker(){
super();
setValue(LocalDate.now());
setTimeValue(LocalTime.now());
setConverter(new StringConverter<LocalDate>() {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd'T'HH:mm:ssZ");
#Override
public String toString ( LocalDate object ) {
return dateTimeValue.get().format(formatter);
}
#Override
public LocalDate fromString ( String string ) {
return LocalDate.parse(string, formatter);
}
});
}
#Override
protected Skin<?> createDefaultSkin () {
return new DateTimePickerSkin(this);
}
public LocalTime getTimeValue(){
return timeValue.get();
}
void setTimeValue(LocalTime timeValue){
this.timeValue.set(timeValue);
}
public ObjectProperty<LocalTime> timeValueProperty(){
return timeValue;
}
public ZonedDateTime getDateTimeValue() {
return dateTimeValueProperty().get();
}
public void setDateTimeValue (ZonedDateTime dateTimeValue) {
dateTimeValueProperty().set(dateTimeValue);
}
public ObjectProperty<ZonedDateTime> dateTimeValueProperty(){
if (dateTimeValue == null){
dateTimeValue = new SimpleObjectProperty<>(ZonedDateTime.of(LocalDateTime.of(this.getValue(), timeValue.get()), ZoneId.systemDefault()));
timeValue.addListener(t -> {
dateTimeValue.set(ZonedDateTime.of(LocalDateTime.of(this.getValue(), timeValue.get()), ZoneId.systemDefault()));
});
valueProperty().addListener(t -> {
dateTimeValue.set(ZonedDateTime.of(LocalDateTime.of(this.getValue(), timeValue.get()), ZoneId.systemDefault()));
});
}
return dateTimeValue;
}
}
DateTimePickerSkin.java:
package test;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.Slider;
import javafx.scene.layout.HBox;
import com.sun.javafx.scene.control.skin.DatePickerContent;
import com.sun.javafx.scene.control.skin.DatePickerSkin;
public class DateTimePickerSkin extends DatePickerSkin {
private DateTimePicker datePicker;
private DatePickerContent ret;
public DateTimePickerSkin(DateTimePicker datePicker){
super(datePicker);
this.datePicker = datePicker;
}
#Override
public Node getPopupContent() {
if (ret == null){
ret = (DatePickerContent) super.getPopupContent();
Slider hours = new Slider(0, 23, (datePicker.getTimeValue() != null ? datePicker.getTimeValue().getMinute() : 0));
Label hoursValue = new Label("Hours: " + (datePicker.getTimeValue() != null ? datePicker.getTimeValue().getHour() : "") + " ");
Slider minutes = new Slider(0, 59, (datePicker.getTimeValue() != null ? datePicker.getTimeValue().getMinute() : 0));
Label minutesValue = new Label("Minutes: " + (datePicker.getTimeValue() != null ? datePicker.getTimeValue().getMinute() : "") + " ");
Slider seconds = new Slider(0, 59, (datePicker.getTimeValue() != null ? datePicker.getTimeValue().getSecond() : 0));
Label secondsValue = new Label("Seconds: " + (datePicker.getTimeValue() != null ? datePicker.getTimeValue().getSecond() : "") + " ");
ret.getChildren().addAll(new HBox(hoursValue, hours), new HBox(minutesValue, minutes), new HBox(secondsValue, seconds));
hours.valueProperty().addListener((observable, oldValue, newValue) -> {
datePicker.setTimeValue(datePicker.getTimeValue().withHour(newValue.intValue()));
hoursValue.setText("Hours: " + String.format("%02d", datePicker.getTimeValue().getHour()) + " ");
});
minutes.valueProperty().addListener((observable, oldValue, newValue) -> {
datePicker.setTimeValue(datePicker.getTimeValue().withMinute(newValue.intValue()));
minutesValue.setText("Minutes: " + String.format("%02d", datePicker.getTimeValue().getMinute()) + " ");
});
seconds.valueProperty().addListener((observable, oldValue, newValue) -> {
datePicker.setTimeValue(datePicker.getTimeValue().withSecond(newValue.intValue()));
secondsValue.setText("Seconds: " + String.format("%02d", datePicker.getTimeValue().getSecond()) + " ");
});
}
return ret;
}
}
usage:
Main.java:
public class Main extends Application{
#Override
public void start ( Stage primaryStage ) {
VBox vBox = new VBox();
Scene s = new Scene(new ScrollPane(vBox), 600, 400);
DateTimePicker d = new DateTimePicker();
// Date only
d.valueProperty().addListener(t -> System.out.println(t));
// Time only
d.timeValueProperty().addListener(t -> System.out.println(t));
// DateAndTime
d.dateTimeValueProperty().addListener(t -> System.out.println(t));
vBox.getChildren().add(d);
primaryStage.setScene(s);
primaryStage.show();
}
public static void main ( String[] args ) {
launch(args);
}
}
there is still the StringConverter job to be done, but it's quite usable even like this. hope it helps someone.
PS: this was tested with jdk8u40, and it uses classes from the com.sun package (DatePickerContent/DatePickerSkin) which are not public API and might change in the future, but common, even if they do, how hard is it to adapt the above code !? :)
edit: added a StringConverter for iso8601 format and added a ZonedDateTime property for a cleaner usage (can swapped with a LocalDateTime if you don't need the Zone information)
I have the following Callback listening on the selected Cell of a TableView:
Callback<TableColumn<MyFTPFile,String>, TableCell<MyFTPFile,String>> cellFactory =
new Callback<TableColumn<MyFTPFile,String>, TableCell<MyFTPFile,String>>() {
public TableCell<MyFTPFile,String> call(TableColumn<MyFTPFile,String> p) {
TableCell<MyFTPFile,String> cell = new TableCell<MyFTPFile, String>() {
#Override
public void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
setText(empty ? null : getString());
setGraphic(null);
}
private String getString() {
return getItem() == null ? "" : getItem().toString();
}
};
cell.addEventFilter(MouseEvent.MOUSE_CLICKED, new EventHandler<MouseEvent>() {
#Override
public void handle(MouseEvent event) {
if (event.getClickCount() > 1) {
TableCell<MyFTPFile,String> c = (TableCell<MyFTPFile,String>) event.getSource();
ftpObservablelist = MyFTPClient.getInstance().getFtpObservableList();
ftpTable.setItems(ftpObservablelist);
}
}
});
Now, I would like to get the MyFTPFile object which is referenced by the cell, which is doubleclicked, so that i can pass it to another class and do stuff... Any Idea how to do that???
Thanks in advance.
The MyFTPFile object is associated with the cell's row, so, as the asker pointed out in his comment, it is retrievable via cell.getTableRow().getItem().
At first I thought this should be cell.getItem(), which returns the data value associated with the cell. However, most of the time, the cell data value will be a property of the backing item rather than the object itself (for example a filename field of a MyFTPFile object).
Executable sample for the curious:
import javafx.application.Application;
import javafx.event.EventHandler;
import javafx.scene.Group;
import javafx.scene.Scene;
import javafx.scene.control.TableCell;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.input.MouseEvent;
import javafx.stage.Stage;
import javafx.util.Callback;
public class TableClickListener extends Application {
public static void main(String[] args) {
launch(args);
}
class FTPTableCell<S, T> extends TextFieldTableCell<S, T> {
FTPTableCell() {
super();
addEventFilter(MouseEvent.MOUSE_CLICKED, new EventHandler<MouseEvent>() {
#Override
public void handle(MouseEvent event) {
if (event.getClickCount() > 1 && getItem() != null) {
System.out.println("Sending " + getTableRow().getItem() + " to the FTP client");
}
}
});
}
}
final Callback<TableColumn<MyFTPFile, String>, TableCell<MyFTPFile, String>> FTP_TABLE_CELL_FACTORY =
new Callback<TableColumn<MyFTPFile, String>, TableCell<MyFTPFile, String>>() {
public TableCell<MyFTPFile, String> call(TableColumn<MyFTPFile, String> p) {
return new FTPTableCell<>();
}
};
#Override
public void start(final Stage stage) {
final TableView<MyFTPFile> table = new TableView<>();
final TableColumn<MyFTPFile, String> filenameColumn = new TableColumn<>("Filename");
filenameColumn.setCellValueFactory(new PropertyValueFactory<MyFTPFile, String>("filename"));
filenameColumn.setCellFactory(FTP_TABLE_CELL_FACTORY);
filenameColumn.setMinWidth(150);
final TableColumn<MyFTPFile, String> ratingColumn = new TableColumn<>("Rating");
ratingColumn.setCellValueFactory(new PropertyValueFactory<MyFTPFile, String>("rating"));
ratingColumn.setCellFactory(FTP_TABLE_CELL_FACTORY);
ratingColumn.setMinWidth(20);
table.getColumns().setAll(filenameColumn, ratingColumn);
table.getItems().setAll(
new MyFTPFile("xyzzy.txt", 10),
new MyFTPFile("management_report.doc", 1),
new MyFTPFile("flower.png", 7)
);
table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY);
stage.setScene(new Scene(new Group(table)));
stage.show();
}
public class MyFTPFile {
private final String filename;
private final int rating;
MyFTPFile(String filename, int rating) {
this.filename = filename;
this.rating = rating;
}
public String getFilename() {
return filename;
}
public int getRating() {
return rating;
}
#Override
public String toString() {
return "MyFTPFile{" +
"filename='" + filename + '\'' +
", rating=" + rating +
'}';
}
}
}