Skip to content

Commit 77f2623

Browse files
committed
[MINOR][GRAPHX] Correct typos in the sub-modules: graphx, external, and examples
This PR intends to fix typos in the sub-modules: graphx, external, and examples. Split per holdenk #30323 (comment) NOTE: The misspellings have been reported at jsoref@706a726#commitcomment-44064356 Misspelled words make it harder to read / understand content. No No testing was performed Closes #30326 from jsoref/spelling-graphx. Authored-by: Josh Soref <jsoref@users.noreply.github.com> Signed-off-by: Takeshi Yamamuro <yamamuro@apache.org>
1 parent fece4a3 commit 77f2623

22 files changed

Lines changed: 34 additions & 34 deletions

File tree

examples/src/main/java/org/apache/spark/examples/streaming/JavaCustomReceiver.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public static void main(String[] args) throws Exception {
6767
JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000));
6868

6969
// Create an input stream with the custom receiver on target ip:port and count the
70-
// words in input stream of \n delimited text (eg. generated by 'nc')
70+
// words in input stream of \n delimited text (e.g. generated by 'nc')
7171
JavaReceiverInputDStream<String> lines = ssc.receiverStream(
7272
new JavaCustomReceiver(args[0], Integer.parseInt(args[1])));
7373
JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(SPACE.split(x)).iterator());

examples/src/main/java/org/apache/spark/examples/streaming/JavaNetworkWordCount.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ public static void main(String[] args) throws Exception {
5757
JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
5858

5959
// Create a JavaReceiverInputDStream on target ip:port and count the
60-
// words in input stream of \n delimited text (eg. generated by 'nc')
60+
// words in input stream of \n delimited text (e.g. generated by 'nc')
6161
// Note that no duplication in storage level only for running locally.
6262
// Replication necessary in distributed scenario for fault tolerance.
6363
JavaReceiverInputDStream<String> lines = ssc.socketTextStream(

examples/src/main/java/org/apache/spark/examples/streaming/JavaRecoverableNetworkWordCount.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ private static JavaStreamingContext createContext(String ip,
126126
ssc.checkpoint(checkpointDirectory);
127127

128128
// Create a socket stream on target ip:port and count the
129-
// words in input stream of \n delimited text (eg. generated by 'nc')
129+
// words in input stream of \n delimited text (e.g. generated by 'nc')
130130
JavaReceiverInputDStream<String> lines = ssc.socketTextStream(ip, port);
131131
JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(SPACE.split(x)).iterator());
132132
JavaPairDStream<String, Integer> wordCounts = words.mapToPair(s -> new Tuple2<>(s, 1))

examples/src/main/java/org/apache/spark/examples/streaming/JavaSqlNetworkWordCount.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ public static void main(String[] args) throws Exception {
5959
JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
6060

6161
// Create a JavaReceiverInputDStream on target ip:port and count the
62-
// words in input stream of \n delimited text (eg. generated by 'nc')
62+
// words in input stream of \n delimited text (e.g. generated by 'nc')
6363
// Note that no duplication in storage level only for running locally.
6464
// Replication necessary in distributed scenario for fault tolerance.
6565
JavaReceiverInputDStream<String> lines = ssc.socketTextStream(

examples/src/main/python/ml/train_validation_split.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
"""
1919
This example demonstrates applying TrainValidationSplit to split data
20-
and preform model selection.
20+
and perform model selection.
2121
Run with:
2222
2323
bin/spark-submit examples/src/main/python/ml/train_validation_split.py

examples/src/main/python/streaming/recoverable_network_wordcount.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def createContext(host, port, outputPath):
6868
ssc = StreamingContext(sc, 1)
6969

7070
# Create a socket stream on target ip:port and count the
71-
# words in input stream of \n delimited text (eg. generated by 'nc')
71+
# words in input stream of \n delimited text (e.g. generated by 'nc')
7272
lines = ssc.socketTextStream(host, port)
7373
words = lines.flatMap(lambda line: line.split(" "))
7474
wordCounts = words.map(lambda x: (x, 1)).reduceByKey(lambda x, y: x + y)

examples/src/main/python/streaming/sql_network_wordcount.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def getSparkSessionInstance(sparkConf):
5454
ssc = StreamingContext(sc, 1)
5555

5656
# Create a socket stream on target ip:port and count the
57-
# words in input stream of \n delimited text (eg. generated by 'nc')
57+
# words in input stream of \n delimited text (e.g. generated by 'nc')
5858
lines = ssc.socketTextStream(host, int(port))
5959
words = lines.flatMap(lambda line: line.split(" "))
6060

examples/src/main/scala/org/apache/spark/examples/streaming/CustomReceiver.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ object CustomReceiver {
5050
val ssc = new StreamingContext(sparkConf, Seconds(1))
5151

5252
// Create an input stream with the custom receiver on target ip:port and count the
53-
// words in input stream of \n delimited text (eg. generated by 'nc')
53+
// words in input stream of \n delimited text (e.g. generated by 'nc')
5454
val lines = ssc.receiverStream(new CustomReceiver(args(0), args(1).toInt))
5555
val words = lines.flatMap(_.split(" "))
5656
val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _)

examples/src/main/scala/org/apache/spark/examples/streaming/NetworkWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ object NetworkWordCount {
4747
val ssc = new StreamingContext(sparkConf, Seconds(1))
4848

4949
// Create a socket stream on target ip:port and count the
50-
// words in input stream of \n delimited text (eg. generated by 'nc')
50+
// words in input stream of \n delimited text (e.g. generated by 'nc')
5151
// Note that no duplication in storage level only for running locally.
5252
// Replication necessary in distributed scenario for fault tolerance.
5353
val lines = ssc.socketTextStream(args(0), args(1).toInt, StorageLevel.MEMORY_AND_DISK_SER)

examples/src/main/scala/org/apache/spark/examples/streaming/RecoverableNetworkWordCount.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ object RecoverableNetworkWordCount {
112112
ssc.checkpoint(checkpointDirectory)
113113

114114
// Create a socket stream on target ip:port and count the
115-
// words in input stream of \n delimited text (eg. generated by 'nc')
115+
// words in input stream of \n delimited text (e.g. generated by 'nc')
116116
val lines = ssc.socketTextStream(ip, port)
117117
val words = lines.flatMap(_.split(" "))
118118
val wordCounts = words.map((_, 1)).reduceByKey(_ + _)

0 commit comments

Comments
 (0)