Skip to content

Commit

Permalink
CR from @pwendell - rename configs and add cleanup.enabled
Browse files Browse the repository at this point in the history
  • Loading branch information
Evan Chan committed Apr 3, 2014
1 parent f2f6027 commit 9f10d96
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 6 deletions.
11 changes: 7 additions & 4 deletions core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,11 @@ private[spark] class Worker(
val REGISTRATION_TIMEOUT = 20.seconds
val REGISTRATION_RETRIES = 3

val CLEANUP_ENABLED = conf.getBoolean("spark.worker.cleanup.enabled", true)
// How often worker will clean up old app folders
val CLEANUP_INTERVAL_MILLIS = conf.getLong("spark.worker.cleanupInterval", 60 * 30) * 1000
val CLEANUP_INTERVAL_MILLIS = conf.getLong("spark.worker.cleanup.interval", 60 * 30) * 1000
// TTL for app folders/data; after TTL expires it will be cleaned up
val APP_DATA_RETENTION_SECS = conf.getLong("spark.worker.appDataTTL", 7 * 24 * 3600)
val APP_DATA_RETENTION_SECS = conf.getLong("spark.worker.cleanup.appDataTtl", 7 * 24 * 3600)

// Index into masterUrls that we're currently trying to register with.
var masterIndex = 0
Expand Down Expand Up @@ -184,8 +185,10 @@ private[spark] class Worker(
registered = true
changeMaster(masterUrl, masterWebUiUrl)
context.system.scheduler.schedule(0 millis, HEARTBEAT_MILLIS millis, self, SendHeartbeat)
context.system.scheduler.schedule(CLEANUP_INTERVAL_MILLIS millis,
CLEANUP_INTERVAL_MILLIS millis, self, WorkDirCleanup)
if (CLEANUP_ENABLED) {
context.system.scheduler.schedule(CLEANUP_INTERVAL_MILLIS millis,
CLEANUP_INTERVAL_MILLIS millis, self, WorkDirCleanup)
}

case SendHeartbeat =>
masterLock.synchronized {
Expand Down
11 changes: 9 additions & 2 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -587,15 +587,22 @@ Apart from these, the following properties are also available, and may be useful
</td>
</tr>
<tr>
<td>spark.worker.cleanupInterval</td>
<td>spark.worker.cleanup.enabled</td>
<td>true</td>
<td>
Enable periodic cleanup of worker / application directories
</td>
</tr>
<tr>
<td>spark.worker.cleanup.interval</td>
<td>1800 (30 minutes)</td>
<td>
Controls the interval, in seconds, at which the worker cleans up old application work dirs
on the local machine.
</td>
</tr>
<tr>
<td>spark.worker.appDataTTL</td>
<td>spark.worker.cleanup.appDataTtl</td>
<td>7 * 24 * 3600 (7 days)</td>
<td>
The number of seconds to retain application work directories on each worker. This is a Time To Live
Expand Down

0 comments on commit 9f10d96

Please sign in to comment.