Skip to content

Commit

Permalink
Bring json_output in the task runner, adapt thread logic
Browse files Browse the repository at this point in the history
  • Loading branch information
dottorblaster committed Dec 10, 2018
1 parent c275759 commit 0c08dac
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 27 deletions.
69 changes: 48 additions & 21 deletions src/execute.rs
Original file line number Diff line number Diff line change
@@ -1,54 +1,81 @@
extern crate ansi_term;
extern crate serde_json;

use parse_config::Task;
use task_output::TaskOutput;
use task_output::Tasks;
use task_output::SerializableOutput;
use task_output;
use std::sync::{Mutex, Arc};
use std::thread;
use std::process::Command;
use std::process::Output;

use self::ansi_term::Colour::{Red, Green, Yellow, Black};
use self::ansi_term::ANSIString;

fn task_success(task: Task, output: Output) {
let stdout = ANSIString::from(String::from_utf8(output.stdout).unwrap());
println!(
"{} {}\n{}\n",
Black.bold().on(Green).paint(" SUCCESS "),
Yellow.paint(format!("{}", task.name)),
stdout
);
fn task_success(task: Task, output: Output, json: bool) {
if json == false {
let stdout = ANSIString::from(String::from_utf8(output.stdout).unwrap());
println!(
"{} {}\n{}\n",
Black.bold().on(Green).paint(" SUCCESS "),
Yellow.paint(format!("{}", task.name)),
stdout
);
}
}

fn task_failure(task: Task, output: Output) {
let stderr = ANSIString::from(String::from_utf8(output.stderr).unwrap());
println!(
"{} {}\n{}\n",
Black.bold().on(Red).paint(" FAIL "),
Yellow.paint(format!("{}", task.name)),
stderr
);
fn task_failure(task: Task, output: Output, json: bool) {
if json == false {
let stderr = ANSIString::from(String::from_utf8(output.stderr).unwrap());
println!(
"{} {}\n{}\n",
Black.bold().on(Red).paint(" FAIL "),
Yellow.paint(format!("{}", task.name)),
stderr
);
}
}

pub fn run(tasks: Vec<Task>, cwd_path: String) -> bool {
pub fn run(tasks: Vec<Task>, cwd_path: String, json_output: bool) -> bool {
let outputs = Arc::new(Mutex::new(task_output::Tasks::with_capacity(tasks.len())));
let mut handles = Vec::with_capacity(tasks.len());
println!("\n");
for task in &tasks {
let (data, path) = (task.clone(), cwd_path.clone());
let outputs = Arc::clone(&outputs);
let child = thread::spawn(move || {
let local_task = data.clone();
let task_data = data.clone();
let mut iter = local_task.command.split_whitespace();
let output = Command::new(iter.nth(0).unwrap())
let mut list = outputs.lock().unwrap();
let command_output = Command::new(iter.nth(0).unwrap())
.args(iter)
.current_dir(path)
.output()
.expect("command failed");
match output.status.code() {
Some(0) => task_success(data, output),
Some(_) => task_failure(data, output),
let cloned_output = command_output.clone();
list.push(TaskOutput {
outcome: String::from_utf8(cloned_output.stdout).unwrap(),
code: cloned_output.status.code().unwrap().to_string(),
name: task_data.name,
description: task_data.description,
command: task_data.command,
});
match command_output.status.code() {
Some(0) => task_success(data, command_output, json_output),
Some(_) => task_failure(data, command_output, json_output),
None => println!("Process terminated by signal")
}
});
handles.push(child);
}
for handle in handles { handle.join().unwrap(); }
if json_output == true {
let slice = &*outputs.lock().unwrap();
let serializable_output = SerializableOutput { tasks: slice.to_vec() };
println!("{}", serde_json::to_string(&serializable_output).unwrap());
}
true
}
12 changes: 6 additions & 6 deletions src/task_output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@ extern crate serde_json;

#[derive(Serialize, Clone)]
pub struct TaskOutput {
outcome: String,
code: String,
name: String,
description: String,
command: String,
pub outcome: String,
pub code: String,
pub name: String,
pub description: String,
pub command: String,
}

pub type Tasks = Vec<TaskOutput>;

#[derive(Serialize, Clone)]
pub struct Output {
pub struct SerializableOutput {
pub tasks: Vec<TaskOutput>,
}

0 comments on commit 0c08dac

Please sign in to comment.