Skip to content

Commit

Permalink
Merge pull request #4138 from hashicorp/i-hcl-json-endpoint
Browse files Browse the repository at this point in the history
HCL to JSON api endpoint
  • Loading branch information
nickethier committed Apr 19, 2018
2 parents a4bf901 + 059ea94 commit 95d9eb9
Show file tree
Hide file tree
Showing 8 changed files with 229 additions and 0 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
## 0.8.2 (Unreleased)

IMPROVEMENTS:
* api: Add /v1/jobs/parse api endpoint for rendering HCL jobs files as JSON [[GH-2782](https://github.com/hashicorp/nomad/issues/2782)]
* client: Create new process group on process startup. [[GH-3572](https://github.com/hashicorp/nomad/issues/3572)]

BUG FIXES:
Expand Down
22 changes: 22 additions & 0 deletions api/jobs.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,33 @@ type Jobs struct {
client *Client
}

// JobsParseRequest is used for arguments of the /vi/jobs/parse endpoint
type JobsParseRequest struct {
// JobHCL is an hcl jobspec
JobHCL string

// Canonicalize is a flag as to if the server should return default values
// for unset fields
Canonicalize bool
}

// Jobs returns a handle on the jobs endpoints.
func (c *Client) Jobs() *Jobs {
return &Jobs{client: c}
}

// Parse is used to convert the HCL repesentation of a Job to JSON server side.
// To parse the HCL client side see package github.com/hashicorp/nomad/jobspec
func (j *Jobs) ParseHCL(jobHCL string, canonicalize bool) (*Job, error) {
var job Job
req := &JobsParseRequest{
JobHCL: jobHCL,
Canonicalize: canonicalize,
}
_, err := j.client.write("/v1/jobs/parse", req, &job, nil)
return &job, err
}

func (j *Jobs) Validate(job *Job, q *WriteOptions) (*JobValidateResponse, *WriteMeta, error) {
var resp JobValidateResponse
req := &JobValidateRequest{Job: job}
Expand Down
37 changes: 37 additions & 0 deletions api/jobs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,43 @@ func TestJobs_Register(t *testing.T) {
}
}

func TestJobs_Parse(t *testing.T) {
t.Parallel()
c, s := makeClient(t, nil, nil)
defer s.Stop()

jobs := c.Jobs()

checkJob := func(job *Job, expectedRegion string) {
if job == nil {
t.Fatal("job should not be nil")
}

region := job.Region

if region == nil {
if expectedRegion != "" {
t.Fatalf("expected job region to be '%s' but was unset", expectedRegion)
}
} else {
if expectedRegion != *region {
t.Fatalf("expected job region '%s', but got '%s'", expectedRegion, *region)
}
}
}
job, err := jobs.ParseHCL(mock.HCL(), true)
if err != nil {
t.Fatalf("err: %s", err)
}
checkJob(job, "global")

job, err = jobs.ParseHCL(mock.HCL(), false)
if err != nil {
t.Fatalf("err: %s", err)
}
checkJob(job, "")
}

func TestJobs_Validate(t *testing.T) {
t.Parallel()
c, s := makeClient(t, nil, nil)
Expand Down
1 change: 1 addition & 0 deletions command/agent/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ func (s *HTTPServer) Shutdown() {
// registerHandlers is used to attach our handlers to the mux
func (s *HTTPServer) registerHandlers(enableDebug bool) {
s.mux.HandleFunc("/v1/jobs", s.wrap(s.JobsRequest))
s.mux.HandleFunc("/v1/jobs/parse", s.wrap(s.JobsParseRequest))
s.mux.HandleFunc("/v1/job/", s.wrap(s.JobSpecificRequest))

s.mux.HandleFunc("/v1/nodes", s.wrap(s.NodesRequest))
Expand Down
27 changes: 27 additions & 0 deletions command/agent/job_endpoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (

"github.com/golang/snappy"
"github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/jobspec"
"github.com/hashicorp/nomad/nomad/structs"
)

Expand Down Expand Up @@ -544,6 +545,32 @@ func (s *HTTPServer) jobDispatchRequest(resp http.ResponseWriter, req *http.Requ
return out, nil
}

// JobsParseRequest parses a hcl jobspec and returns a api.Job
func (s *HTTPServer) JobsParseRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) {
if req.Method != http.MethodPut && req.Method != http.MethodPost {
return nil, CodedError(405, ErrInvalidMethod)
}

args := &api.JobsParseRequest{}
if err := decodeBody(req, &args); err != nil {
return nil, CodedError(400, err.Error())
}
if args.JobHCL == "" {
return nil, CodedError(400, "Job spec is empty")
}

jobfile := strings.NewReader(args.JobHCL)
jobStruct, err := jobspec.Parse(jobfile)
if err != nil {
return nil, CodedError(400, err.Error())
}

if args.Canonicalize {
jobStruct.Canonicalize()
}
return jobStruct, nil
}

func ApiJobToStructJob(job *api.Job) *structs.Job {
job.Canonicalize()

Expand Down
32 changes: 32 additions & 0 deletions command/agent/job_endpoint_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,38 @@ func TestHTTP_JobsRegister_Defaulting(t *testing.T) {
})
}

func TestHTTP_JobsParse(t *testing.T) {
t.Parallel()
httpTest(t, nil, func(s *TestAgent) {
buf := encodeReq(api.JobsParseRequest{JobHCL: mock.HCL()})
req, err := http.NewRequest("POST", "/v1/jobs/parse", buf)
if err != nil {
t.Fatalf("err: %v", err)
}

respW := httptest.NewRecorder()

obj, err := s.Server.JobsParseRequest(respW, req)
if err != nil {
t.Fatalf("err: %v", err)
}
if obj == nil {
t.Fatal("response should not be nil")
}

job := obj.(*api.Job)
expected := mock.Job()
if job.Name == nil || *job.Name != expected.Name {
t.Fatalf("job name is '%s', expected '%s'", *job.Name, expected.Name)
}

if job.Datacenters == nil ||
job.Datacenters[0] != expected.Datacenters[0] {
t.Fatalf("job datacenters is '%s', expected '%s'",
job.Datacenters[0], expected.Datacenters[0])
}
})
}
func TestHTTP_JobQuery(t *testing.T) {
t.Parallel()
httpTest(t, nil, func(s *TestAgent) {
Expand Down
32 changes: 32 additions & 0 deletions nomad/mock/mock.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,38 @@ func Node() *structs.Node {
return node
}

func HCL() string {
return `job "my-job" {
datacenters = ["dc1"]
type = "service"
constraint {
attribute = "${attr.kernel.name}"
value = "linux"
}
group "web" {
count = 10
restart {
attempts = 3
interval = "10m"
delay = "1m"
mode = "delay"
}
task "web" {
driver = "exec"
config {
command = "/bin/date"
}
resources {
cpu = 500
memory = 256
}
}
}
}
`
}

func Job() *structs.Job {
job := &structs.Job{
Region: "global",
Expand Down
77 changes: 77 additions & 0 deletions website/source/api/jobs.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,83 @@ $ curl \
}
```

## Parse Job

This endpoint will parse a HCL jobspec and produce the equivalent JSON encoded
job.

| Method | Path | Produces |
| ------ | ------------------------- | -------------------------- |
| `POST` | `/v1/jobs/parse` | `application/json` |

The table below shows this endpoint's support for
[blocking queries](/api/index.html#blocking-queries) and
[required ACLs](/api/index.html#acls).

| Blocking Queries | ACL Required |
| ---------------- | ------------ |
| `NO` | `none` |

### Parameters

- `JobHCL` `(string: <required>)` - Specifies the HCL definition of the job
encoded in a JSON string.
- `Canonicalize` `(bool: false)` - Flag to enable setting any unset fields to
their default values.

## Sample Payload

```json
{
"JobHCL":"job \"example\" { type = \"service\" group \"cache\" {} }",
"Canonicalize": true
}
```

### Sample Request

```text
$ curl \
--request POST \
--data '{"Canonicalize": true, "JobHCL": "job \"my-job\" {}"}' \
https://localhost:4646/v1/jobs/parse
```

### Sample Response

```json
{
"AllAtOnce": false,
"Constraints": null,
"CreateIndex": 0,
"Datacenters": null,
"ID": "my-job",
"JobModifyIndex": 0,
"Meta": null,
"Migrate": null,
"ModifyIndex": 0,
"Name": "my-job",
"Namespace": "default",
"ParameterizedJob": null,
"ParentID": "",
"Payload": null,
"Periodic": null,
"Priority": 50,
"Region": "global",
"Reschedule": null,
"Stable": false,
"Status": "",
"StatusDescription": "",
"Stop": false,
"SubmitTime": null,
"TaskGroups": null,
"Type": "service",
"Update": null,
"VaultToken": "",
"Version": 0
}
```

## Read Job

This endpoint reads information about a single job for its specification and
Expand Down

0 comments on commit 95d9eb9

Please sign in to comment.