From 8d288253d7fbfc796757a681f7a97ac4c8db8421 Mon Sep 17 00:00:00 2001 From: panbingkun Date: Thu, 18 May 2023 19:57:33 +0800 Subject: [PATCH 1/2] [SPARK-43576][CORE] Remove unused declarations from Core module --- .../resources/org/apache/spark/ui/static/executorspage.js | 1 - .../org/apache/spark/deploy/history/ApplicationCache.scala | 1 - core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala | 5 ----- core/src/main/scala/org/apache/spark/ui/JettyUtils.scala | 5 ----- core/src/main/scala/org/apache/spark/ui/ToolTips.scala | 7 ------- 5 files changed, 19 deletions(-) diff --git a/core/src/main/resources/org/apache/spark/ui/static/executorspage.js b/core/src/main/resources/org/apache/spark/ui/static/executorspage.js index b7fbe0492b6d..bf6af9b2cc92 100644 --- a/core/src/main/resources/org/apache/spark/ui/static/executorspage.js +++ b/core/src/main/resources/org/apache/spark/ui/static/executorspage.js @@ -126,7 +126,6 @@ function totalDurationAlpha(totalGCTime, totalDuration) { (Math.min(totalGCTime / totalDuration + 0.5, 1)) : 1; } -// When GCTimePercent is edited change ToolTips.TASK_TIME to match var GCTimePercent = 0.1; function totalDurationStyle(totalGCTime, totalDuration) { diff --git a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala index 829631a04546..909f5ea937ce 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala @@ -394,7 +394,6 @@ private[history] class ApplicationCacheCheckFilter( val httpRequest = request.asInstanceOf[HttpServletRequest] val httpResponse = response.asInstanceOf[HttpServletResponse] val requestURI = httpRequest.getRequestURI - val operation = httpRequest.getMethod // if the request is for an attempt, check to see if it is in need of delete/refresh // and have the cache update the UI if so diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala index 0d905b46953c..12b28532231d 100644 --- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala @@ -128,8 +128,6 @@ class HadoopRDD[K, V]( protected val jobConfCacheKey: String = "rdd_%d_job_conf".format(id) - protected val inputFormatCacheKey: String = "rdd_%d_input_format".format(id) - // used to build JobTracker ID private val createTime = new Date() @@ -404,9 +402,6 @@ private[spark] object HadoopRDD extends Logging { */ val CONFIGURATION_INSTANTIATION_LOCK = new Object() - /** Update the input bytes read metric each time this number of records has been read */ - val RECORDS_BETWEEN_BYTES_READ_METRIC_UPDATES = 256 - /** * The three methods below are helpers for accessing the local map, a property of the SparkEnv of * the local process. diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala index d8119fb94984..9582bdbf5264 100644 --- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala +++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala @@ -590,11 +590,6 @@ private class ProxyRedirectHandler(_proxyUri: String) extends HandlerWrapper { override def sendRedirect(location: String): Unit = { val newTarget = if (location != null) { val target = new URI(location) - val path = if (target.getPath().startsWith("/")) { - target.getPath() - } else { - req.getRequestURI().stripSuffix("/") + "/" + target.getPath() - } // The target path should already be encoded, so don't re-encode it, just the // proxy address part. val proxyBase = UIUtils.uiRoot(req) diff --git a/core/src/main/scala/org/apache/spark/ui/ToolTips.scala b/core/src/main/scala/org/apache/spark/ui/ToolTips.scala index 587046676ff1..b80fba396b33 100644 --- a/core/src/main/scala/org/apache/spark/ui/ToolTips.scala +++ b/core/src/main/scala/org/apache/spark/ui/ToolTips.scala @@ -35,10 +35,6 @@ private[spark] object ToolTips { val OUTPUT = "Bytes written to Hadoop." - val STORAGE_MEMORY = - "Memory used / total available memory for storage of data " + - "like RDD partitions cached in memory. " - val SHUFFLE_WRITE = "Bytes and records written to disk in order to be read by a shuffle in a future stage." @@ -88,9 +84,6 @@ private[spark] object ToolTips { also create multiple RDDs internally. Cached RDDs are shown in green. """ - val TASK_TIME = - "Shaded red when garbage collection (GC) time is over 10% of task time" - val APPLICATION_EXECUTOR_LIMIT = """Maximum number of executors that this application will use. This limit is finite only when dynamic allocation is enabled. The number of granted executors may exceed the limit From 206f506618d988908d2bad5b9aedbb3fd439a97a Mon Sep 17 00:00:00 2001 From: panbingkun Date: Fri, 19 May 2023 09:09:57 +0800 Subject: [PATCH 2/2] [SPARK-43576][CORE] Remove unused declarations from Core module --- core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala index 12b28532231d..cad107256c58 100644 --- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala @@ -128,6 +128,8 @@ class HadoopRDD[K, V]( protected val jobConfCacheKey: String = "rdd_%d_job_conf".format(id) + protected val inputFormatCacheKey: String = "rdd_%d_input_format".format(id) + // used to build JobTracker ID private val createTime = new Date()