Skip to content

Commit

Permalink
Minor comment and wording updates
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew Or committed Feb 6, 2015
1 parent b4695e7 commit 9c82a36
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
outStream.println("Unknown/unsupported param " + unknownParam)
}
outStream.println(
"""Usage: spark-submit [options] <app jar | python file> [app options]
"""Usage: spark-submit [options] <app jar | python file> [app arguments]
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local.
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,19 @@ private[spark] class MasterPage(parent: MasterWebUI) extends WebUIPage("") {
val stateFuture = (master ? RequestMasterState)(timeout).mapTo[MasterStateResponse]
val state = Await.result(stateFuture, timeout)

val workerHeaders = Seq("Id", "Address", "State", "Cores", "Memory")
val workerHeaders = Seq("Worker Id", "Address", "State", "Cores", "Memory")
val workers = state.workers.sortBy(_.id)
val workerTable = UIUtils.listingTable(workerHeaders, workerRow, workers)

val appHeaders = Seq("ID", "Name", "Cores", "Memory per Node", "Submitted Time", "User",
"State", "Duration")
val appHeaders = Seq("Application ID", "Name", "Cores", "Memory per Node", "Submitted Time",
"User", "State", "Duration")
val activeApps = state.activeApps.sortBy(_.startTime).reverse
val activeAppsTable = UIUtils.listingTable(appHeaders, appRow, activeApps)
val completedApps = state.completedApps.sortBy(_.endTime).reverse
val completedAppsTable = UIUtils.listingTable(appHeaders, appRow, completedApps)

val driverHeaders = Seq("ID", "Submitted Time", "Worker", "State", "Cores", "Memory",
"Main Class")
val driverHeaders = Seq("Submission ID", "Submitted Time", "Worker", "State", "Cores",
"Memory", "Main Class")
val activeDrivers = state.activeDrivers.sortBy(_.startTime).reverse
val activeDriversTable = UIUtils.listingTable(driverHeaders, driverRow, activeDrivers)
val completedDrivers = state.completedDrivers.sortBy(_.startTime).reverse
Expand All @@ -77,7 +77,7 @@ private[spark] class MasterPage(parent: MasterWebUI) extends WebUIPage("") {
state.restUri.map { uri =>
<li>
<strong>REST URL:</strong> {uri}
<span class="rest-uri"> (for standalone cluster mode in Spark 1.3+)</span>
<span class="rest-uri"> (cluster mode)</span>
</li>
}.getOrElse { Seq.empty }
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,19 +31,24 @@ import org.apache.spark.{Logging, SparkConf, SPARK_VERSION => sparkVersion}
* This client is intended to communicate with the [[StandaloneRestServer]] and is
* currently used for cluster mode only.
*
* The specific request sent to the server depends on the action as follows:
* In protocol version v1, the REST URL takes the form http://[host:port]/v1/submissions/[action],
* where [action] can be one of create, kill, or status. Each type of request is represented in
* an HTTP message sent to the following prefixes:
* (1) submit - POST to /submissions/create
* (2) kill - POST /submissions/kill/[submissionId]
* (3) status - GET /submissions/status/[submissionId]
*
* In the case of (1), parameters are posted in the HTTP body in the form of JSON fields.
* Otherwise, the URL fully specifies the intended action of the client.
*
* Additionally, the base URL includes the version of the protocol. For instance:
* http://1.2.3.4:6066/v1/submissions/create. Since the protocol is expected to be stable
* across Spark versions, existing fields cannot be added or removed. In the rare event that
* forward or backward compatibility is broken, Spark must introduce a new protocol version
* (e.g. v2). The client and the server must communicate on the same version of the protocol.
* Since the protocol is expected to be stable across Spark versions, existing fields cannot be
* added or removed, though new optional fields can be added. In the rare event that forward or
* backward compatibility is broken, Spark must introduce a new protocol version (e.g. v2).
*
* The client and the server must communicate using the same version of the protocol. If there
* is a mismatch, the server will respond with the highest protocol version it supports. A future
* implementation of this client can use that information to retry using the version specified
* by the server.
*/
private[spark] class StandaloneRestClient extends Logging {
import StandaloneRestClient._
Expand Down

0 comments on commit 9c82a36

Please sign in to comment.