Skip to content

Commit 9e4bd9b

Browse files
authored
Merge pull request #37 from ropensci-review-tools/reviews
change request reviews for #11
2 parents 563ab18 + 22f8dec commit 9e4bd9b

File tree

4 files changed

+114
-2
lines changed

4 files changed

+114
-2
lines changed

DESCRIPTION

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Package: repometrics
22
Title: Metrics for Your Code Repository
3-
Version: 0.1.1.108
3+
Version: 0.1.1.110
44
Authors@R:
55
person("Mark", "Padgham", , "mark.padgham@email.com", role = c("aut", "cre"),
66
comment = c(ORCID = "0000-0003-2172-5265"))

R/cm-metric-pr-reviews.R

+90
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
#' Change request reviews
2+
#' \url{https://chaoss.community/kb/metric-change-request-reviews/}.
3+
#'
4+
#' The "Overview" at that link says:
5+
#' The Change Request Reviews metric evaluates the level and quality of formal
6+
#' review processes for change requests (e.g., pull requests) within
7+
#' open-source projects. This metric tracks specific data points such as the
8+
#' number of reviews, types of review feedback, and review outcomes (e.g.,
9+
#' accepted, declined) to determine the rigor and quality of reviews. Measuring
10+
#' this helps project maintainers gauge the thoroughness of code evaluation,
11+
#' process efficiency, and software quality. Change request reviews include top
12+
#' level comments about the entire change request, file level comments asking
13+
#' for specific changes, and whether the change request was “accepted”, “had
14+
#' changes requested”, or the reasoning behind a change request being closed
15+
#' without getting merged. Additionally, this metric can reveal insights into
16+
#' DEI-related aspects, such as the diversity of contributors participating in
17+
#' review processes.
18+
#'
19+
#' This function generates a few outputs which can be used to provide insight
20+
#' into that.
21+
#'
22+
#' @noRd
23+
cm_metric_pr_reviews <- function (path, end_date = Sys.Date ()) {
24+
25+
prs <- get_prs_in_period (path, end_date) # in cm-metrics-change-req.R
26+
27+
prs$created_at <- as.Date (prs$created_at)
28+
prs$closed_at <- as.Date (prs$closed_at)
29+
prs$review_decision [which (is.na (prs$review_decision))] <- "NA"
30+
31+
index_approved <- which (prs$review_decision == "APPROVED")
32+
index_rejected <- which (prs$review_decision != "APPROVED" & !prs$merged & prs$closed)
33+
index_other <- which (prs$review_decision != "APPROVED" & prs$merged)
34+
index_open <- which (!prs$merged)
35+
36+
approved_ratio <- rejected_ratio <- 0
37+
if (nrow (prs) > 0) {
38+
approved_ratio <- length (index_approved) / nrow (prs)
39+
rejected_ratio <- length (index_rejected) / nrow (prs)
40+
}
41+
42+
mean_to_na <- function (x) {
43+
ifelse (length (x) == 0L, NA, mean (x))
44+
}
45+
46+
pr_duration <- difftime (prs$closed_at, prs$created_at, units = "days")
47+
pr_duration <- as.integer (pr_duration)
48+
approval_duration <- mean_to_na (pr_duration [index_approved])
49+
50+
n_comments <- prs$total_comments
51+
n_comments_per_approved <- mean_to_na (n_comments [index_approved])
52+
n_comments_per_rejected <- mean_to_na (n_comments [index_rejected])
53+
n_comments_per_other <- mean_to_na (n_comments [index_other])
54+
55+
num_commenters <- vapply (seq_len (nrow (prs)), function (i) {
56+
cmt_authors <- unique (prs$comments [[i]]$author)
57+
cmt_authors <- cmt_authors [which (!cmt_authors == prs$user_login [i])]
58+
length (cmt_authors)
59+
}, integer (1L))
60+
num_comment_iterations <- vapply (seq_len (nrow (prs)), function (i) {
61+
auts <- prs$comments [[i]]$author
62+
creator <- prs$user_login [i]
63+
index <- rep (0L, length (auts))
64+
index [which (auts == creator)] <- 1L
65+
ifelse (length (index) == 0L, 0L, max (cumsum (index)))
66+
}, integer (1L))
67+
n_commenters_per_approved <- mean_to_na (num_commenters [index_approved])
68+
n_commenters_per_rejected <- mean_to_na (num_commenters [index_rejected])
69+
n_commenters_per_other <- mean_to_na (num_commenters [index_other])
70+
n_iterations_per_approved <- mean_to_na (num_comment_iterations [index_approved])
71+
n_iterations_per_rejected <- mean_to_na (num_comment_iterations [index_rejected])
72+
n_iterations_per_other <- mean_to_na (num_comment_iterations [index_other])
73+
74+
ret <- data.frame (
75+
approved_ratio = approved_ratio,
76+
rejected_ratio = rejected_ratio,
77+
approval_duration = approval_duration,
78+
n_comments_per_approved = n_comments_per_approved,
79+
n_comments_per_rejected = n_comments_per_rejected,
80+
n_comments_per_other = n_comments_per_other,
81+
n_commenters_per_approved = n_commenters_per_approved,
82+
n_commenters_per_rejected = n_commenters_per_rejected,
83+
n_commenters_per_other = n_commenters_per_other,
84+
n_iterations_per_approved = n_iterations_per_approved,
85+
n_iterations_per_rejected = n_iterations_per_rejected,
86+
n_iterations_per_other = n_iterations_per_other
87+
)
88+
89+
return (ret)
90+
}

codemeta.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
"codeRepository": "https://github.com/ropensci-review-tools/repometrics",
99
"issueTracker": "https://github.com/ropensci-review-tools/repometrics/issues",
1010
"license": "https://spdx.org/licenses/GPL-3.0",
11-
"version": "0.1.1.108",
11+
"version": "0.1.1.110",
1212
"programmingLanguage": {
1313
"@type": "ComputerLanguage",
1414
"name": "R",

tests/testthat/test-cm-metrics.R

+22
Original file line numberDiff line numberDiff line change
@@ -108,3 +108,25 @@ test_that ("cm metrics issues-to-prs", {
108108
expect_length (x, 1L)
109109
expect_true (x > 0)
110110
})
111+
112+
test_that ("cm metrics pr-reviews", {
113+
114+
Sys.setenv ("REPOMETRICS_TESTS" = "true")
115+
mock_cm_data ()
116+
path <- generate_test_pkg ()
117+
revs <- cm_metric_pr_reviews (path, end_date = end_date)
118+
fs::dir_delete (path)
119+
120+
expect_s3_class (revs, "data.frame")
121+
expect_equal (nrow (revs), 1L)
122+
expect_equal (ncol (revs), 12L)
123+
nms <- c (
124+
"approved_ratio", "rejected_ratio", "approval_duration",
125+
"n_comments_per_approved", "n_comments_per_rejected",
126+
"n_comments_per_other", "n_commenters_per_approved",
127+
"n_commenters_per_rejected", "n_commenters_per_other",
128+
"n_iterations_per_approved", "n_iterations_per_rejected",
129+
"n_iterations_per_other"
130+
)
131+
expect_equal (names (revs), nms)
132+
})

0 commit comments

Comments
 (0)