Skip to content

Commit

Permalink
Merge pull request #11 from Alokit-Innovations/tr/settings
Browse files Browse the repository at this point in the history
Implement settings for auto assign and comment
  • Loading branch information
tapishr authored Oct 26, 2023
2 parents 4d1a462 + 02ad66a commit 70dbbbe
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 37 deletions.
47 changes: 30 additions & 17 deletions vibi-dpu/src/core/coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@ use std::collections::{HashMap, HashSet};

use crate::{utils::hunk::{HunkMap, PrHunkItem}, db::user::get_workspace_user_from_db, bitbucket::{comment::add_comment, reviewer::add_reviewers}};
use crate::utils::review::Review;
use crate::utils::repo_config::RepoConfig;
use crate::bitbucket::auth::get_access_token_review;

pub async fn process_coverage(hunkmap: &HunkMap, review: &Review) {
pub async fn process_coverage(hunkmap: &HunkMap, review: &Review, repo_config: &RepoConfig) {
let access_token_opt = get_access_token_review(review).await;
if access_token_opt.is_none() {
eprintln!("Unable to acquire access_token in process_coverage");
Expand All @@ -14,24 +15,32 @@ pub async fn process_coverage(hunkmap: &HunkMap, review: &Review) {
for prhunk in hunkmap.prhunkvec() {
// calculate number of hunks for each userid
let coverage_map = calculate_coverage(&hunkmap.repo_owner(), prhunk);
let coverage_cond = !coverage_map.is_empty();
println!("!coverage_map.is_empty() = {:?}", &coverage_cond);
println!("repo_config.comment() = {:?}", repo_config.comment());
println!("repo_config.auto_assign() = {:?}", repo_config.auto_assign());
if !coverage_map.is_empty() {
// get user for each user id
// add reviewers
let mut author_set: HashSet<String> = HashSet::new();
author_set.insert(prhunk.author().to_string());
for blame in prhunk.blamevec() {
if author_set.contains(blame.author()) {
continue;
if repo_config.comment() {
println!("Inserting comment...");
// create comment text
let comment = comment_text(coverage_map, repo_config.auto_assign());
// add comment
add_comment(&comment, review, &access_token).await;
}
if repo_config.auto_assign() {
// add reviewers
println!("Adding reviewers...");
let mut author_set: HashSet<String> = HashSet::new();
author_set.insert(prhunk.author().to_string());
for blame in prhunk.blamevec() {
if author_set.contains(blame.author()) {
continue;
}
author_set.insert(blame.author().to_string());
let author_id = blame.author();
add_reviewers(blame.author(), review, &access_token).await;
}
author_set.insert(blame.author().to_string());
let author_id = blame.author();
add_reviewers(blame.author(), review, &access_token).await;
}
// create comment text
let comment = comment_text(coverage_map);
// add comment
add_comment(&comment, review, &access_token).await;
// TODO - implement settings
}
}
}
Expand Down Expand Up @@ -74,14 +83,18 @@ fn calculate_coverage(repo_owner: &str, prhunk: &PrHunkItem) -> HashMap<String,
return coverage_map;
}

fn comment_text(coverage_map: HashMap<String, String>) -> String {
fn comment_text(coverage_map: HashMap<String, String>, auto_assign: bool) -> String {
let mut comment = "Relevant users for this PR:\n\n".to_string(); // Added two newlines
comment += "| Contributor Name/Alias | Code Coverage |\n"; // Added a newline at the end
comment += "| -------------- | --------------- |\n"; // Added a newline at the end

for (key, value) in coverage_map.iter() {
comment += &format!("| {} | {}% |\n", key, value); // Added a newline at the end
}
if auto_assign {
comment += "\n\n";
comment += "Auto assigning to all relevant reviewers";
}
comment += "\n\n";
comment += "Code coverage is calculated based on the git blame information of the PR. To know more, hit us up at contact@vibinex.com.\n\n"; // Added two newlines
comment += "To change comment and auto-assign settings, go to [your Vibinex settings page.](https://vibinex.com/settings)\n"; // Added a newline at the end
Expand Down
50 changes: 32 additions & 18 deletions vibi-dpu/src/core/review.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ use serde_json::Value;

use crate::{
utils::{hunk::{HunkMap, PrHunkItem},
review::Review,
review::Review,
repo_config::RepoConfig,
gitops::{commit_exists,
git_pull,
get_excluded_files,
Expand All @@ -13,27 +14,29 @@ use crate::{
generate_blame}},
db::{hunk::{get_hunk_from_db, store_hunkmap_to_db},
repo::get_clone_url_clone_dir,
review::{save_review_to_db}},
review::save_review_to_db,
repo_config::save_repo_config_to_db},
bitbucket::config::get_client,
core::coverage::process_coverage};

pub async fn process_review(message_data: &Vec<u8>) {
let review_opt = parse_review(message_data);
if review_opt.is_none() {
eprintln!("Unable to deserialize review message");
eprintln!("Unable to deserialize review message and repo config");
return;
}
let review = review_opt.expect("review_opt is empty");
let (review, repo_config) = review_opt.expect("parse_opt is empty");
println!("deserialized repo_config, review = {:?}, {:?}", &repo_config, &review);
if hunk_already_exists(&review) {
return;
}
println!("Processing PR : {}", &review.id());
commit_check(&review).await;
let hunkmap_opt = process_review_changes(&review).await;
send_hunkmap(&hunkmap_opt, &review).await;
send_hunkmap(&hunkmap_opt, &review, &repo_config).await;
}

async fn send_hunkmap(hunkmap_opt: &Option<HunkMap>, review: &Review) {
async fn send_hunkmap(hunkmap_opt: &Option<HunkMap>, review: &Review, repo_config: &RepoConfig) {
if hunkmap_opt.is_none() {
eprintln!("Empty hunkmap in send_hunkmap");
return;
Expand All @@ -44,7 +47,8 @@ async fn send_hunkmap(hunkmap_opt: &Option<HunkMap>, review: &Review) {
publish_hunkmap(&hunkmap);
let hunkmap_async = hunkmap.clone();
let review_async = review.clone();
process_coverage(&hunkmap_async, &review_async).await;
let repo_config_clone = repo_config.clone();
process_coverage(&hunkmap_async, &review_async, &repo_config_clone).await;
}

fn hunk_already_exists(review: &Review) -> bool {
Expand Down Expand Up @@ -94,41 +98,51 @@ async fn commit_check(review: &Review) {
}
}

fn parse_review(message_data: &Vec<u8>) -> Option<Review>{
fn parse_review(message_data: &Vec<u8>) -> Option<(Review, RepoConfig)>{
let data_res = serde_json::from_slice::<Value>(&message_data);
if data_res.is_err() {
let e = data_res.expect_err("No error in data_res");
eprintln!("Incoming message does not contain valid reviews: {:?}", e);
return None;
}
let data = data_res.expect("Uncaught error in deserializing message_data");
println!("data == {:?}", &data["eventPayload"]["repository"]);
let repo_provider = data["repositoryProvider"].to_string().trim_matches('"').to_string();
let repo_name = data["eventPayload"]["repository"]["name"].to_string().trim_matches('"').to_string();
let deserialized_data = data_res.expect("Uncaught error in deserializing message_data");
println!("deserialized_data == {:?}", &deserialized_data["eventPayload"]["repository"]);
let repo_provider = deserialized_data["repositoryProvider"].to_string().trim_matches('"').to_string();
let repo_name = deserialized_data["eventPayload"]["repository"]["name"].to_string().trim_matches('"').to_string();
println!("repo NAME == {}", &repo_name);
let workspace_name = data["eventPayload"]["repository"]["workspace"]["slug"].to_string().trim_matches('"').to_string();
let workspace_name = deserialized_data["eventPayload"]["repository"]["workspace"]["slug"].to_string().trim_matches('"').to_string();
let clone_opt = get_clone_url_clone_dir(&repo_provider, &workspace_name, &repo_name);
if clone_opt.is_none() {
eprintln!("Unable to get clone url and directory");
return None;
}
let (clone_url, clone_dir) = clone_opt.expect("Empty clone_opt");
let pr_id = data["eventPayload"]["pullrequest"]["id"].to_string().trim_matches('"').to_string();
let pr_id = deserialized_data["eventPayload"]["pullrequest"]["id"].to_string().trim_matches('"').to_string();
let review = Review::new(
data["eventPayload"]["pullrequest"]["destination"]["commit"]["hash"].to_string().replace("\"", ""),
data["eventPayload"]["pullrequest"]["source"]["commit"]["hash"].to_string().replace("\"", ""),
deserialized_data["eventPayload"]["pullrequest"]["destination"]["commit"]["hash"].to_string().replace("\"", ""),
deserialized_data["eventPayload"]["pullrequest"]["source"]["commit"]["hash"].to_string().replace("\"", ""),
pr_id.clone(),
repo_name.clone(),
workspace_name.clone(),
repo_provider.clone(),
format!("bitbucket/{}/{}/{}", &workspace_name, &repo_name, &pr_id),
clone_dir,
clone_url,
data["eventPayload"]["pullrequest"]["author"]["uuid"].to_string().replace("\"", ""),
deserialized_data["eventPayload"]["pullrequest"]["author"]["uuid"].to_string().replace("\"", ""),
);
println!("review = {:?}", &review);
save_review_to_db(&review);
return Some(review);
let repo_config_res = serde_json::from_value(deserialized_data["repoConfig"].clone());
if repo_config_res.is_err() {
let e = repo_config_res.expect_err("No error in repo_config_res");
eprintln!("Unable to deserialze repo_config_res: {:?}", e);
let default_config = RepoConfig::default();
return Some((review, default_config));
}
let repo_config = repo_config_res.expect("Uncaught error in repo_config_res");
println!("repo_config = {:?}", &repo_config);
save_repo_config_to_db(&repo_config, &review.repo_name(), &review.repo_owner(), &review.provider());
return Some((review, repo_config));
}

fn publish_hunkmap(hunkmap: &HunkMap) {
Expand Down
3 changes: 2 additions & 1 deletion vibi-dpu/src/db/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ mod config;
pub mod webhook;
pub mod user;
pub mod hunk;
pub mod review;
pub mod review;
pub mod repo_config;
28 changes: 28 additions & 0 deletions vibi-dpu/src/db/repo_config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
use sled::IVec;

use crate::db::config::get_db;
use crate::utils::repo_config::RepoConfig;

pub fn save_repo_config_to_db(repo_config: &RepoConfig,
repo_name: &str, repo_owner: &str, repo_provider: &str) {
let db = get_db();
let config_key = format!("{}/{}/{}/config", repo_provider, repo_owner, repo_name);
println!("config_key = {}", &config_key);

// Serialize repo struct to JSON
let parse_res = serde_json::to_vec(repo_config);
if parse_res.is_err() {
let e = parse_res.expect_err("Empty error in parse_res in save_repo_config_to_db");
eprintln!("Unable to serialize repo in save_repo_config_to_db: {:?}, error: {:?}", &repo_config, e);
return;
}
let config_json = parse_res.expect("Uncaught error in parse_res save_repo_config_to_db");
// Insert JSON into sled DB
let insert_res = db.insert(IVec::from(config_key.as_bytes()), config_json);
if insert_res.is_err() {
let e = insert_res.expect_err("No error in insert_res save_repo_config_to_db");
eprintln!("Failed to upsert repo config into sled DB: {:?}", e);
return;
}
println!("Repo Config succesfully upserted: {:?}", repo_config);
}
3 changes: 2 additions & 1 deletion vibi-dpu/src/utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ pub mod hunk;
pub mod review;
pub mod gitops;
pub mod user;
pub mod lineitem;
pub mod lineitem;
pub mod repo_config;
26 changes: 26 additions & 0 deletions vibi-dpu/src/utils/repo_config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
use serde::{Serialize, Deserialize};

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct RepoConfig {
comment: bool,
auto_assign: bool
}

impl RepoConfig {
// Getters
pub fn comment(&self) -> bool {
self.comment
}

pub fn auto_assign(&self) -> bool {
self.auto_assign
}

// Function to create a default RepoConfig
pub fn default() -> Self {
RepoConfig {
comment: true,
auto_assign: true
}
}
}

0 comments on commit 70dbbbe

Please sign in to comment.