content
stringlengths 0
14.9M
| filename
stringlengths 44
136
|
---|---|
#' usefun
#'
#' A collection of useful functions by John
#'
#' For a complete list of functions, use \code{library(help = "usefun")}
#'
#' @aliases usefun-package
#' @name usefun
NULL
|
/scratch/gouwar.j/cran-all/cranData/usefun/R/usefun.R
|
#' @import rlang
#' @import tune
#' @import cli
#' @importFrom purrr map_lgl
#' @importFrom recipes recipe %>% all_predictors
#' @importFrom dplyr select one_of pull
#' @importFrom utils globalVariables
# ------------------------------------------------------------------------------
utils::globalVariables(
c(
# false positives from template code
"all_nominal_predictors", "all_numeric_predictors", "boost_tree",
"comments", "linear_reg", "logistic_reg", "multinom_reg", "set_engine",
"set_mode", "step_dummy", "step_normalize", "step_novel", "step_zv",
"step_string2factor", "colors", "workflow", "add_model", "add_recipe"
)
)
|
/scratch/gouwar.j/cran-all/cranData/usemodels/R/0_imports.R
|
zv_msg <- paste(
"Before centering and scaling the numeric predictors, any predictors with",
"a single unique value are filtered out."
)
dist_msg <-
paste(
"Since distance calculations are used, the predictor",
"variables should be on the same scale."
)
dot_msg <-
paste(
"Since dot product calculations are used, the predictor",
"variables should be on the same scale."
)
reg_msg <-
paste(
"Regularization methods sum up functions of the model slope coefficients.",
"Because of this, the predictor variables should be on the same scale."
)
dummy_msg <-
paste(
"This model requires the predictors to be numeric. The most common method to",
"convert qualitative predictors to numeric is to create binary indicator",
"variables (aka dummy variables) from these predictors."
)
dummy_hot_msg <-
paste(
dummy_msg,
"However, for this model, binary indicator variables can be made for",
"each of the levels of the factors (known as 'one-hot encoding')."
)
string_to_factor_msg <-
paste(
"For modeling, it is preferred to encode qualitative data as factors",
"(instead of character)."
)
|
/scratch/gouwar.j/cran-all/cranData/usemodels/R/messages.R
|
model_mode <- function(rec) {
var_roles <- summary(rec)
y_types <- var_roles$type[var_roles$role == "outcome"]
y_types <- unique(y_types)
if (length(y_types) > 1) {
rlang::abort("outcomes are of different types.")
}
if (all(y_types == "numeric")) {
mod_mode <- "regression"
} else {
mod_mode <- "classification"
}
mod_mode
}
y_lvl <- function(rec) {
mod_mode <- model_mode(rec)
if (mod_mode == "regression") {
return(NULL)
}
var_roles <- summary(rec)
y_cols <- var_roles$variable[var_roles$role == "outcome"]
y_dat <- rec$template %>% dplyr::select(one_of(y_cols)) %>% dplyr::pull(1)
length(levels(y_dat))
}
has_factor_pred <- function(x) {
info <- summary(x)
pred_types <- info$type[info$role == "predictor"]
any(pred_types == "nominal")
}
num_pred_col <- function(x) {
info <- summary(x)
sum(info$role == "predictor")
}
# ------------------------------------------------------------------------------
# helper functions
expr_width <- 74L
assign_value <- function(name, value, cr = TRUE) {
value <- rlang::enexpr(value)
value <- rlang::expr_text(value, width = expr_width)
chr_assign(name, value, cr)
}
chr_assign <- function(name, value, cr = TRUE) {
name <- paste(name, "<-")
if (cr) {
res <- c(name, paste0("\n ", value))
} else {
res <- paste(name, value)
}
res
}
pipe_value <- function(base, value) {
# Find last non-comment line, add a `%>%` to the end, then add another line
value <- rlang::enexpr(value)
value <- rlang::expr_text(value, width = expr_width)
clean_base <- gsub("\\n", "", base)
clean_base <- trimws(base, which = "left")
not_comment <- seq_along(base)[!grepl("## ", clean_base)]
n <- max(1, max(not_comment))
base[n] <- paste(base[n], "%>%")
c(base, paste0("\n ", value))
}
add_comment <- function(base, value, add = TRUE, colors = TRUE) {
if (!add) {
return(base)
}
if (!is.character(value)) {
rlang::abort("`value` must be character.")
}
value <- strwrap(value, width = expr_width, prefix = "## ")
if (colors) {
value <- tune::get_tune_colors()$message$warning(value)
}
res <- c(base, paste0("\n ", value))
res
}
add_steps_dummy_vars <- function(base, hot = FALSE, add = FALSE, colors = TRUE) {
base <- base %>%
pipe_value(step_novel(all_nominal_predictors()))
if (hot) {
base <- base %>%
add_comment(dummy_hot_msg, add, colors = colors) %>%
pipe_value(step_dummy(all_nominal_predictors(), one_hot = TRUE))
} else {
base <- base %>%
add_comment(dummy_msg, add, colors = colors) %>%
pipe_value(step_dummy(all_nominal_predictors()))
}
base
}
add_steps_normalization <- function(base) {
base %>%
pipe_value(step_zv(all_predictors())) %>%
pipe_value(step_normalize(all_numeric_predictors()))
}
factor_check <- function(base, rec, add, colors = TRUE) {
var_roles <- summary(rec)
nominal <- var_roles$variable[var_roles$type == "nominal"]
is_str <-
purrr::map_lgl(rec$template %>% dplyr::select(dplyr::one_of(nominal)),
rlang::is_character)
if (any(is_str)) {
selector <- rlang::expr(one_of(!!!nominal[is_str]))
step_expr <- rlang::expr(step_string2factor(!!selector))
base <-
base %>%
add_comment(string_to_factor_msg, add = add, colors = colors) %>%
pipe_value(!!step_expr)
}
base
}
top_level_comment <- function(..., add = FALSE, colors = TRUE) {
if (!add) {
return(invisible(NULL))
}
value <- paste(...)
value <- strwrap(value, width = expr_width, prefix = "## ")
if (colors) {
value <- tune::get_tune_colors()$message$warning(value)
}
cat(paste0(value, collapse = "\n"))
cat("\n")
}
template_workflow <- function(prefix) {
paste0(prefix, "_workflow") %>%
assign_value(workflow()) %>%
pipe_value(add_recipe(!!rlang::sym(paste0(prefix, "_recipe")))) %>%
pipe_value(add_model(!!rlang::sym(paste0(prefix, "_spec"))))
}
template_tune_with_grid <- function(prefix, colors = TRUE) {
tune_expr <-
rlang::call2("tune_grid",
sym(paste0(prefix, "_workflow")),
resamples = expr(stop("add your rsample object")),
grid = sym(paste0(prefix, "_grid")))
res <- assign_value(paste0(prefix, "_tune"), !!tune_expr)
if (colors) {
res <- sub(
"stop(\"add your rsample object\")",
tune::get_tune_colors()$message$danger("stop(\"add your rsample object\")"),
res,
fixed = TRUE
)
}
res
}
template_tune_no_grid <- function(prefix, seed = sample.int(10^5, 1), colors = TRUE) {
tune_expr <-
rlang::call2(
"tune_grid",
sym(paste0(prefix, "_workflow")),
resamples = expr(stop("add your rsample object")),
grid = expr(stop("add number of candidate points"))
)
res <- c(paste0("set.seed(", seed,")\n"),
assign_value(paste0(prefix, "_tune"), !!tune_expr))
if (colors) {
res <- sub(
"stop(\"add your rsample object\")",
tune::get_tune_colors()$message$danger("stop(\"add your rsample object\")"),
res,
fixed = TRUE
)
res <- sub(
"stop(\"add number of candidate points\")",
tune::get_tune_colors()$message$danger("stop(\"add number of candidate points\")"),
res,
fixed = TRUE
)
}
res
}
# Take the call to the template function and turn it into a call to `recipe()`
initial_recipe_call <- function(cl) {
cl$tune <- NULL
cl$verbose <- NULL
cl$colors <- NULL
cl$prefix <- NULL
rec_cl <- cl
rec_cl[[1]] <- rlang::expr(recipe)
rec_cl
}
output_loc <- function(clipboard) {
if (clipboard) {
res <- tempfile(pattern = "usemodels_")
} else {
res <- ""
}
res
}
route <- function(x, path, ...) {
cat(x, "\n\n", file = path, append = path != "", ...)
invisible(NULL)
}
clipboard_output <- function(pth) {
if (pth == "") {
return(invisible(NULL))
}
code <- readLines(pth)
clipr::write_clip(code, object_type = "character")
cli::cli_alert_success("code is on the clipboard.")
invisible(NULL)
}
check_color <- function(cls, clip) {
if (cls & clip) {
cls <- FALSE
}
cls
}
check_clipboard <- function(clipboard) {
if (!clipboard) {
return(invisible(NULL))
}
# from reprex_clipboard
y <- clipr::clipr_available()
if (isFALSE(y)) {
clipr::dr_clipr()
rlang::abort("Please use `clipboard = FALSE`")
}
invisible(NULL)
}
|
/scratch/gouwar.j/cran-all/cranData/usemodels/R/misc.R
|
#' Functions to create boilerplate code for specific models
#'
#' These functions make suggestions for code when using a few common models.
#' They print out code to the console that could be considered minimal syntax
#' for their respective techniques. Each creates a prototype recipe and workflow
#' object that can be edited or updated as the data require.
#'
#' @param formula A simple model formula with no in-line functions. This will
#' be used to template the recipe object as well as determining which outcome
#' and predictor columns will be used.
#' @param data A data frame with the columns used in the analysis.
#' @param prefix A single character string to use as a prefix for the resulting
#' objects.
#' @param verbose A single logical that determined whether comments are added to
#' the printed code explaining why certain lines are used.
#' @param tune A single logical that controls if code for model tuning should be
#' printed.
#' @param colors A single logical for coloring warnings and code snippets that
#' require the users attention (ignored when `colors = FALSE`)
#' @param clipboard A single logical for whether the code output should be
#' sent to the clip board or printed in the console.
#' @return Invisible `NULL` but code is printed to the console.
#' @details
#' Based on the columns in `data`, certain recipe steps printed. For example, if
#' a model requires that qualitative predictors be converted to numeric (say,
#' using dummy variables) then an additional `step_dummy()` is added. Otherwise
#' that recipe step is not included in the output.
#'
#' The syntax is opinionated and should not be considered the exact answer for
#' every data analysis. It has reasonable defaults.
#' @examples
#' library(modeldata)
#' data(ad_data)
#' use_glmnet(Class ~ ., data = ad_data)
#'
#' data(Sacramento)
#' use_glmnet(price ~ ., data = Sacramento, verbose = TRUE, prefix = "sac_homes")
#' @export
#' @rdname templates
use_glmnet <- function(formula, data, prefix = "glmnet", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors = colors)
if (has_factor_pred(rec)) {
rec_syntax <-
add_steps_dummy_vars(rec_syntax, add = verbose, colors = colors)
}
rec_syntax <-
rec_syntax %>%
add_comment(paste(reg_msg, zv_msg), add = verbose, colors = colors) %>%
add_steps_normalization()
mod_mode <- model_mode(rec)
if (tune) {
prm <- rlang::exprs(penalty = tune(), mixture = tune())
} else {
prm <- NULL
}
if (mod_mode == "classification") {
num_lvl <- y_lvl(rec)
if (num_lvl == 2) {
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("logistic_reg", !!!prm)) %>%
pipe_value(set_mode("classification"))
} else {
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("multinom_reg", !!!prm)) %>%
pipe_value(set_mode("classification"))
}
} else {
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("linear_reg", !!!prm)) %>%
pipe_value(set_mode("regression"))
}
mod_syntax <-
mod_syntax %>%
pipe_value(set_engine("glmnet"))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
glmn_grid <- rlang::expr(
glmn_grid <-
tidyr::crossing(
penalty = 10 ^ seq(-6, -1, length.out = 20),
mixture = c(0.05, .2, .4, .6, .8, 1)
)
)
glmn_grid[[2]] <- rlang::sym(paste0(prefix, "_grid"))
route(rlang::expr_text(glmn_grid, width = expr_width), path = pth)
route(template_tune_with_grid(prefix, colors = colors), path = pth)
}
clipboard_output(pth)
invisible(NULL)
}
#' @export
#' @rdname templates
use_xgboost <- function(formula, data, prefix = "xgboost", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipe(formula, data)
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors = colors)
if (has_factor_pred(rec)) {
rec_syntax <-
add_steps_dummy_vars(rec_syntax,
hot = TRUE,
add = verbose,
colors = colors)
}
rec_syntax <- pipe_value(rec_syntax, step_zv(all_predictors()))
if (tune) {
prm <-
rlang::exprs(
trees = tune(), min_n = tune(), tree_depth = tune(), learn_rate = tune(),
loss_reduction = tune(), sample_size = tune()
)
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("boost_tree", !!!prm)) %>%
pipe_value(set_mode(!!model_mode(rec))) %>%
pipe_value(set_engine("xgboost"))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
route(template_tune_no_grid(prefix, colors = colors), path = pth, sep = "")
}
clipboard_output(pth)
invisible(NULL)
}
# ------------------------------------------------------------------------------
#' @export
#' @rdname templates
use_kknn <- function(formula, data, prefix = "kknn", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors= colors)
if (has_factor_pred(rec)) {
rec_syntax <-
add_steps_dummy_vars(rec_syntax, add = verbose, colors = colors)
}
rec_syntax <-
rec_syntax %>%
add_comment(paste(dist_msg, zv_msg), add = verbose, colors = colors) %>%
add_steps_normalization()
if (tune) {
prm <- rlang::exprs(neighbors = tune(), weight_func = tune())
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("nearest_neighbor", !!!prm)) %>%
pipe_value(set_mode(!!model_mode(rec))) %>%
pipe_value(set_engine("kknn"))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
route(template_tune_no_grid(prefix, colors = colors), path = pth, sep = "")
}
clipboard_output(pth)
invisible(NULL)
}
# ------------------------------------------------------------------------------
#' @export
#' @rdname templates
use_ranger <- function(formula, data, prefix = "ranger", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors= colors)
# TODO add a check for the factor levels that are an issue for
if (tune) {
prm <- rlang::exprs(mtry = tune(), min_n = tune(), trees = 1000)
} else {
prm <- prm <- rlang::exprs(trees = 1000)
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("rand_forest", !!!prm)) %>%
pipe_value(set_mode(!!model_mode(rec))) %>%
pipe_value(set_engine("ranger"))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
route(template_tune_no_grid(prefix, colors = colors), path = pth, sep = "")
}
clipboard_output(pth)
invisible(NULL)
}
# ------------------------------------------------------------------------------
#' @export
#' @rdname templates
use_earth <- function(formula, data, prefix = "earth", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipe(formula, data)
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors= colors)
if (has_factor_pred(rec)) {
rec_syntax <-
add_steps_dummy_vars(rec_syntax, add = verbose, colors = colors)
}
rec_syntax <- pipe_value(rec_syntax, step_zv(all_predictors()))
if (tune) {
prm <-
rlang::exprs(
num_terms = tune(), prod_degree = tune(), prune_method = "none"
)
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("mars", !!!prm)) %>%
pipe_value(set_mode(!!model_mode(rec))) %>%
pipe_value(set_engine("earth"))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
# We can only have as many terms as data points but maybe we should
# give some wiggle room for resampling. Also, we will have a sequence of odd
# numbered terms so divide by 2 and keep an integer.
term_max <- floor(min(12, floor(floor(nrow(data) * 0.75)))/2)
mars_grid <- rlang::expr(
mars_grid <-
tidyr::crossing(num_terms = 2 * (1:!!term_max), prod_degree = 1:2)
)
mars_grid[[2]] <- rlang::sym(paste0(prefix, "_grid"))
top_level_comment(
"MARS models can make predictions on many _sub_models_, meaning that we can",
"evaluate many values of `num_terms` without much computational cost.",
"A regular grid is used to exploit this property.",
"The first term is only the intercept, so the grid is a sequence of even",
"numbered values.",
add = verbose,
colors = colors
)
route(rlang::expr_text(mars_grid, width = expr_width), path = pth)
route(template_tune_with_grid(prefix, colors = colors), path = pth)
}
clipboard_output(pth)
invisible(NULL)
}
# ------------------------------------------------------------------------------
#' @export
#' @rdname templates
use_cubist <- function(formula, data, prefix = "cubist", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
if (model_mode(rec) != "regression") {
rlang::abort("Cubist models are only for regression")
}
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors= colors)
rec_syntax <- pipe_value(rec_syntax, step_zv(all_predictors()))
if (tune) {
prm <- rlang::exprs(committees = tune(), neighbors = tune())
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("cubist_rules", !!!prm)) %>%
pipe_value(set_engine("Cubist"))
route("library(rules)", path = pth, sep = "")
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
cubist_grid <- rlang::expr(
cubist_grid <-
tidyr::crossing(committees = c(1:9, (1:5) * 10), neighbors = c(0, 3, 6, 9))
)
cubist_grid[[2]] <- rlang::sym(paste0(prefix, "_grid"))
route(rlang::expr_text(cubist_grid, width = expr_width), path = pth)
route(template_tune_with_grid(prefix, colors = colors), path = pth)
}
clipboard_output(pth)
invisible(NULL)
}
#' @export
#' @rdname templates
use_kernlab_svm_rbf <- function(formula, data, prefix = "kernlab", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
rec_syntax <-
rec_syntax %>%
add_comment(paste(dot_msg, zv_msg), add = verbose, colors = colors) %>%
add_steps_normalization()
mod_mode <- model_mode(rec)
if (tune) {
prm <- rlang::exprs(cost = tune(), rbf_sigma = tune())
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("svm_rbf", !!!prm)) %>%
pipe_value(set_mode(!!model_mode(rec)))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
route(template_tune_no_grid(prefix, colors = colors), path = pth, sep = "")
}
clipboard_output(pth)
invisible(NULL)
}
#' @export
#' @rdname templates
use_kernlab_svm_poly <- function(formula, data, prefix = "kernlab", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
rec_syntax <-
rec_syntax %>%
add_comment(paste(dot_msg, zv_msg), add = verbose, colors = colors) %>%
add_steps_normalization()
mod_mode <- model_mode(rec)
if (tune) {
prm <- rlang::exprs(cost = tune(), degree = tune(), scale_factor = tune())
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("svm_poly", !!!prm)) %>%
pipe_value(set_mode(!!model_mode(rec)))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
route(template_tune_no_grid(prefix, colors = colors), path = pth, sep = "")
}
clipboard_output(pth)
invisible(NULL)
}
#' @export
#' @rdname templates
use_C5.0 <- function(formula, data, prefix = "C50", verbose = FALSE,
tune = TRUE, colors = TRUE, clipboard = FALSE) {
check_clipboard(clipboard)
colors <- check_color(colors, clipboard)
pth <- output_loc(clipboard)
on.exit(unlink(pth))
rec_cl <- initial_recipe_call(match.call())
rec_syntax <-
paste0(prefix, "_recipe") %>%
assign_value(!!rec_cl)
rec <- recipes::recipe(formula, data)
if (model_mode(rec) != "classification") {
rlang::abort("C5.0 models are only for classification.")
}
rec_syntax <-
rec_syntax %>%
factor_check(rec, add = verbose, colors= colors)
if (tune) {
prm <- rlang::exprs(trees = tune(), min_n = tune())
} else {
prm <- NULL
}
mod_syntax <-
paste0(prefix, "_spec") %>%
assign_value(!!rlang::call2("boost_tree", !!!prm)) %>%
pipe_value(set_mode("classification")) %>%
pipe_value(set_engine("C5.0"))
route(rec_syntax, path = pth)
route(mod_syntax, path = pth)
route(template_workflow(prefix), path = pth)
if (tune) {
route(template_tune_no_grid(prefix, colors = colors), path = pth, sep = "")
}
clipboard_output(pth)
invisible(NULL)
}
|
/scratch/gouwar.j/cran-all/cranData/usemodels/R/use.R
|
#' Add minimal RStudio Addin binding
#'
#' This function helps you add a minimal
#' [RStudio Addin](https://rstudio.github.io/rstudioaddins/) binding to
#' `inst/rstudio/addins.dcf`.
#'
#' @param addin Name of the addin function, which should be defined in the
#' `R` folder.
#' @inheritParams use_template
#'
#' @export
use_addin <- function(addin = "new_addin", open = rlang::is_interactive()) {
addin_dcf_path <- proj_path("inst", "rstudio", "addins.dcf")
if (!file_exists(addin_dcf_path)) {
create_directory(proj_path("inst", "rstudio"))
file_create(addin_dcf_path)
ui_done("Creating {ui_path(addin_dcf_path)}")
}
addin_info <- render_template("addins.dcf", data = list(addin = addin))
addin_info[length(addin_info) + 1] <- ""
write_utf8(addin_dcf_path, addin_info, append = TRUE)
ui_done("Adding binding to {ui_code(addin)} to addins.dcf.")
if (open) {
edit_file(addin_dcf_path)
}
invisible(TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/addin.R
|
#' Add an author to the `Authors@R` field in DESCRIPTION
#'
#' @description
#' `use_author()` adds a person to the `Authors@R` field of the DESCRIPTION
#' file, creating that field if necessary. It will not modify, e.g., the role(s)
#' or email of an existing author (judged using their "Given Family" name). For
#' that we recommend editing DESCRIPTION directly. Or, for programmatic use,
#' consider calling the more specialized functions available in the \pkg{desc}
#' package directly.
#'
#' `use_author()` also surfaces two other situations you might want to address:
#' * Explicit use of the fields `Author` or `Maintainer`. We recommend switching
#' to the more modern `Authors@R` field instead, because it offers richer
#' metadata for various downstream uses. (Note that `Authors@R` is *eventually*
#' processed to create `Author` and `Maintainer` fields, but only when the
#' `tar.gz` is built from package source.)
#' * Presence of the fake author placed by [create_package()] and
#' [use_description()]. This happens when \pkg{usethis} has to create a
#' DESCRIPTION file and the user hasn't given any author information via the
#' `fields` argument or the global option `"usethis.description"`. The
#' placeholder looks something like `First Last <[email protected]> [aut,
#' cre] (YOUR-ORCID-ID)` and `use_author()` offers to remove it in interactive
#' sessions.
#'
#' @inheritParams utils::person
#' @inheritDotParams utils::person
#' @export
#' @examples
#' \dontrun{
#' use_author(
#' given = "Lucy",
#' family = "van Pelt",
#' role = c("aut", "cre"),
#' email = "[email protected]",
#' comment = c(ORCID = "LUCY-ORCID-ID")
#' )
#'
#' use_author("Charlie", "Brown")
#' }
#'
use_author <- function(given = NULL, family = NULL, ..., role = "ctb") {
check_is_package("use_author()")
maybe_name(given)
maybe_name(family)
check_character(role)
d <- proj_desc()
challenge_legacy_author_fields(d)
# We only need to consider Authors@R
authors_at_r_already <- d$has_fields("Authors@R")
if (authors_at_r_already) {
check_author_is_novel(given, family, d)
}
# This person is not already in Authors@R
author <- utils::person(given = given, family = family, role = role, ...)
aut_fmt <- format(author, style = 'text')
if (authors_at_r_already) {
ui_done("
Adding to {ui_field('Authors@R')} in DESCRIPTION:
{aut_fmt}")
} else {
ui_done("
Creating {ui_field('Authors@R')} field in DESCRIPTION and adding:
{aut_fmt}")
}
d$add_author(given = given, family = family, role = role, ...)
challenge_default_author(d)
d$write()
invisible(TRUE)
}
challenge_legacy_author_fields <- function(d = proj_desc()) {
has_legacy_field <- d$has_fields("Author") || d$has_fields("Maintainer")
if (!has_legacy_field) {
return(invisible())
}
ui_oops("
Found legacy {ui_field('Author')} and/or {ui_field('Maintainer')} field \\
in DESCRIPTION.
usethis only supports modification of the {ui_field('Authors@R')} field.")
ui_info("
We recommend one of these paths forward:
* Delete these fields and rebuild with {ui_code('use_author()')}.
* Convert to {ui_field('Authors@R')} with {ui_code('desc::desc_coerce_authors_at_r()')},
then delete the legacy fields.")
if (ui_yeah("Do you want to cancel this operation and sort that out first?")) {
ui_stop("Cancelling.")
}
invisible()
}
check_author_is_novel <- function(given = NULL, family = NULL, d = proj_desc()) {
authors <- d$get_authors()
authors_given <- purrr::map(authors, "given")
authors_family <- purrr::map(authors, "family")
m <- purrr::map2_lgl(authors_given, authors_family, function(x, y) {
identical(x, given) && identical(y, family)
})
if (any(m)) {
aut_name <- glue("{given %||% ''} {family %||% ''}")
usethis_abort(c(
"{.val {aut_name}} already appears in {.val Authors@R}.",
"Please make the desired change directly in DESCRIPTION or call the \\
desc package directly."
))
}
invisible()
}
challenge_default_author <- function(d = proj_desc()) {
defaults <- usethis_description_defaults()
default_author <- eval(parse(text = defaults[["Authors@R"]]))
authors <- d$get_authors()
m <- map_lgl(
authors,
# the `person` class is pretty weird!
function(x) identical(x, unclass(default_author)[[1]])
)
if (any(m)) {
ui_info("
{ui_field('Authors@R')} appears to include a placeholder author:
{format(default_author, style = 'text')}")
if(is_interactive() && ui_yeah("Would you like to remove it?")) {
# TODO: Do I want to suppress this output?
# Authors removed: First Last, NULL NULL.
do.call(d$del_author, unclass(default_author)[[1]])
}
}
return(invisible())
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/author.R
|
#' README badges
#'
#' These helpers produce the markdown text you need in your README to include
#' badges that report information, such as the CRAN version or test coverage,
#' and link out to relevant external resources. To add badges automatically
#' ensure your badge block starts with a line containing only
#' `<!-- badges: start -->` and ends with a line containing only
#' `<!-- badges: end -->`.
#'
#' @details
#'
#' * `use_badge()`: a general helper used in all badge functions
#' * `use_bioc_badge()`: badge indicates [BioConductor build
#' status](https://bioconductor.org/developers/)
#' * `use_cran_badge()`: badge indicates what version of your package is
#' available on CRAN, powered by <https://www.r-pkg.org>
#' * `use_lifecycle_badge()`: badge declares the developmental stage of a
#' package according to <https://lifecycle.r-lib.org/articles/stages.html>.
#' * `use_binder_badge()`: badge indicates that your repository can be launched
#' in an executable environment on <https://mybinder.org/>
#' * `use_posit_cloud_badge()`: badge indicates that your repository can be launched
#' in a [Posit Cloud](https://posit.cloud) project
#' * `use_rscloud_badge()`: `r lifecycle::badge("deprecated")`: Use
#' [use_posit_cloud_badge()] instead.
#'
#' @param badge_name Badge name. Used in error message and alt text
#' @param href,src Badge link and image src
#' @param stage Stage of the package lifecycle. One of "experimental",
#' "stable", "superseded", or "deprecated".
#' @seealso Functions that configure continuous integration, such as
#' [use_github_actions()], also create badges.
#'
#' @name badges
#' @examples
#' \dontrun{
#' use_cran_badge()
#' use_lifecycle_badge("stable")
#' }
NULL
#' @rdname badges
#' @export
use_badge <- function(badge_name, href, src) {
path <- find_readme()
if (is.null(path)) {
ui_oops("
Can't find a README for the current project.
See {ui_code('usethis::use_readme_rmd()')} for help creating this file.
Badge link can only be printed to screen.
")
path <- "README"
}
changed <- block_append(
glue("{ui_field(badge_name)} badge"),
glue("[]({href})"),
path = path,
block_start = badge_start,
block_end = badge_end
)
if (changed && path_ext(path) == "Rmd") {
ui_todo("Re-knit {ui_path(path)} with {ui_code('devtools::build_readme()')}")
}
invisible(changed)
}
#' @rdname badges
#' @export
use_cran_badge <- function() {
check_is_package("use_cran_badge()")
pkg <- project_name()
src <- glue("https://www.r-pkg.org/badges/version/{pkg}")
href <- glue("https://CRAN.R-project.org/package={pkg}")
use_badge("CRAN status", href, src)
invisible(TRUE)
}
#' @rdname badges
#' @export
use_bioc_badge <- function() {
check_is_package("use_bioc_badge()")
pkg <- project_name()
src <- glue("http://www.bioconductor.org/shields/build/release/bioc/{pkg}.svg")
href <- glue("https://bioconductor.org/checkResults/release/bioc-LATEST/{pkg}")
use_badge("BioC status", href, src)
invisible(TRUE)
}
#' @rdname badges
#' @export
use_lifecycle_badge <- function(stage) {
check_is_package("use_lifecycle_badge()")
pkg <- project_name()
stage <- tolower(stage)
stage <- arg_match0(stage, names(stages))
colour <- stages[[stage]]
src <- glue("https://img.shields.io/badge/lifecycle-{stage}-{colour}.svg")
href <- glue("https://lifecycle.r-lib.org/articles/stages.html#{stage}")
use_badge(paste0("Lifecycle: ", stage), href, src)
invisible(TRUE)
}
stages <- c(
experimental = "orange",
stable = "brightgreen",
superseded = "blue",
deprecated = "orange"
)
#' @rdname badges
#' @param ref A Git branch, tag, or SHA
#' @param urlpath An optional `urlpath` component to add to the link, e.g.
#' `"rstudio"` to open an RStudio IDE instead of a Jupyter notebook. See the
#' [binder
#' documentation](https://mybinder.readthedocs.io/en/latest/howto/user_interface.html)
#' for additional examples.
#' @export
use_binder_badge <- function(ref = git_default_branch(), urlpath = NULL) {
repo_spec <- target_repo_spec()
if (is.null(urlpath)) {
urlpath <- ""
} else {
urlpath <- glue("?urlpath={urlpath}")
}
url <- glue("https://mybinder.org/v2/gh/{repo_spec}/{ref}{urlpath}")
img <- "https://mybinder.org/badge_logo.svg"
use_badge("Launch binder", url, img)
invisible(TRUE)
}
#' @rdname badges
#' @param url A link to an existing [Posit Cloud](https://posit.cloud)
#' project. See the [Posit Cloud
#' documentation](https://posit.cloud/learn/guide#project-settings-access)
#' for details on how to set project access and obtain a project link.
#' @export
use_posit_cloud_badge <- function(url) {
check_name(url)
project_url <- "posit[.]cloud/content"
spaces_url <- "posit[.]cloud/spaces"
if (grepl(project_url, url) || grepl(spaces_url, url)) {
# TODO: Get posit logo hosted at https://github.com/simple-icons/simple-icons/
# and add to end of img url as `?logo=posit` (or whatever slug we get)
img <- "https://img.shields.io/badge/launch-posit%20cloud-447099?style=flat"
use_badge("Launch Posit Cloud", url, img)
} else {
usethis_abort("
{.fun usethis::use_posit_cloud_badge} requires a link to an \\
existing Posit Cloud project of the form \\
{.val https://posit.cloud/content/<project-id>} or \\
{.val https://posit.cloud/spaces/<space-id>/content/<project-id>}.")
}
invisible(TRUE)
}
#' @rdname badges
#' @export
use_rscloud_badge <- function(url) {
lifecycle::deprecate_warn(
"2.2.0", "use_rscloud_badge()",
"use_posit_cloud_badge()"
)
use_posit_cloud_badge(url)
}
has_badge <- function(href) {
readme_path <- proj_path("README.md")
if (!file_exists(readme_path)) {
return(FALSE)
}
readme <- read_utf8(readme_path)
any(grepl(href, readme, fixed = TRUE))
}
# Badge data structure ----------------------------------------------------
badge_start <- "<!-- badges: start -->"
badge_end <- "<!-- badges: end -->"
find_readme <- function() {
path_first_existing(proj_path(c("README.Rmd", "README.md")))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/badge.R
|
block_append <- function(desc, value, path,
block_start = "# <<<",
block_end = "# >>>",
block_prefix = NULL,
block_suffix = NULL,
sort = FALSE) {
if (!is.null(path) && file_exists(path)) {
lines <- read_utf8(path)
if (all(value %in% lines)) {
return(FALSE)
}
block_lines <- block_find(lines, block_start, block_end)
} else {
block_lines <- NULL
}
if (is.null(block_lines)) {
ui_todo("
Copy and paste the following lines into {ui_path(path)}:")
ui_code_block(c(block_prefix, block_start, value, block_end, block_suffix))
return(FALSE)
}
ui_done("Adding {desc} to {ui_path(path)}")
start <- block_lines[[1]]
end <- block_lines[[2]]
block <- lines[seq2(start, end)]
new_lines <- union(block, value)
if (sort) {
new_lines <- sort(new_lines)
}
lines <- c(
lines[seq2(1, start - 1L)],
new_lines,
lines[seq2(end + 1L, length(lines))]
)
write_utf8(path, lines)
TRUE
}
block_replace <- function(desc, value, path,
block_start = "# <<<",
block_end = "# >>>") {
if (!is.null(path) && file_exists(path)) {
lines <- read_utf8(path)
block_lines <- block_find(lines, block_start, block_end)
} else {
block_lines <- NULL
}
if (is.null(block_lines)) {
ui_todo("Copy and paste the following lines into {ui_value(path)}:")
ui_code_block(c(block_start, value, block_end))
return(invisible(FALSE))
}
start <- block_lines[[1]]
end <- block_lines[[2]]
block <- lines[seq2(start, end)]
if (identical(value, block)) {
return(invisible(FALSE))
}
ui_done("Replacing {desc} in {ui_path(path)}")
lines <- c(
lines[seq2(1, start - 1L)],
value,
lines[seq2(end + 1L, length(lines))]
)
write_utf8(path, lines)
}
block_show <- function(path, block_start = "# <<<", block_end = "# >>>") {
lines <- read_utf8(path)
block <- block_find(lines, block_start, block_end)
lines[seq2(block[[1]], block[[2]])]
}
block_find <- function(lines, block_start = "# <<<", block_end = "# >>>") {
# No file
if (is.null(lines)) {
return(NULL)
}
start <- which(lines == block_start)
end <- which(lines == block_end)
# No block
if (length(start) == 0 && length(end) == 0) {
return(NULL)
}
if (!(length(start) == 1 && length(end) == 1 && start < end)) {
ui_stop(
"Invalid block specification.
Must start with {ui_code(block_start)} and end with {ui_code(block_end)}"
)
}
c(start + 1L, end - 1L)
}
block_create <- function(lines = character(), block_start = "# <<<", block_end = "# >>>") {
c(block_start, unique(lines), block_end)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/block.R
|
#' Visit important project-related web pages
#'
#' These functions take you to various web pages associated with a project
#' (often, an R package) and return the target URL(s) invisibly. To form
#' these URLs we consult:
#' * Git remotes configured for the active project that appear to be hosted on
#' a GitHub deployment
#' * DESCRIPTION file for the active project or the specified `package`. The
#' DESCRIPTION file is sought first in the local package library and then
#' on CRAN.
#' * Fixed templates:
#' - Travis CI: `https://travis-ci.{EXT}/{OWNER}/{PACKAGE}`
#' - Circle CI: `https://circleci.com/gh/{OWNER}/{PACKAGE}`
#' - CRAN landing page: `https://cran.r-project.org/package={PACKAGE}`
#' - GitHub mirror of a CRAN package: `https://github.com/cran/{PACKAGE}`
#' Templated URLs aren't checked for existence, so there is no guarantee
#' there will be content at the destination.
#'
#' @details
#' * `browse_package()`: Assembles a list of URLs and lets user choose one to
#' visit in a web browser. In a non-interactive session, returns all
#' discovered URLs.
#' * `browse_project()`: Thin wrapper around `browse_package()` that always
#' targets the active usethis project.
#' * `browse_github()`: Visits a GitHub repository associated with the project.
#' In the case of a fork, you might be asked to specify if you're interested
#' in the source repo or your fork.
#' * `browse_github_issues()`: Visits the GitHub Issues index or one specific
#' issue.
#' * `browse_github_pulls()`: Visits the GitHub Pull Request index or one
#' specific pull request.
#' * `browse_travis()`: Visits the project's page on
#' [Travis CI](https://www.travis-ci.com/).
#' * `browse_circleci()`: Visits the project's page on
#' [Circle CI](https://circleci.com).
#' * `browse_cran()`: Visits the package on CRAN, via the canonical URL.
#'
#' @param package Name of package. If `NULL`, the active project is targeted,
#' regardless of whether it's an R package or not.
#' @param number Optional, to specify an individual GitHub issue or pull
#' request. Can be a number or `"new"`.
#'
#' @examples
#' # works on the active project
#' # browse_project()
#'
#' browse_package("httr")
#' browse_github("gh")
#' browse_github_issues("fs")
#' browse_github_issues("fs", 1)
#' browse_github_pulls("curl")
#' browse_github_pulls("curl", 183)
#' browse_cran("MASS")
#' @name browse-this
NULL
#' @export
#' @rdname browse-this
browse_package <- function(package = NULL) {
maybe_name(package)
if (is.null(package)) {
check_is_project()
}
urls <- character()
details <- list()
if (is.null(package) && uses_git()) {
grl <- github_remote_list(these = NULL)
ord <- c(
which(grl$remote == "origin"),
which(grl$remote == "upstream"),
which(!grl$remote %in% c("origin", "upstream"))
)
grl <- grl[ord, ]
grl <- set_names(grl$url, nm = grl$remote)
parsed <- parse_github_remotes(grl)
urls <- c(urls, glue_data(parsed, "https://{host}/{repo_owner}/{repo_name}"))
details <- c(details, map(parsed$name, ~ glue("{ui_value(.x)} remote")))
}
desc_urls_dat <- desc_urls(package, include_cran = TRUE)
urls <- c(urls, desc_urls_dat$url)
details <- c(
details,
map(
desc_urls_dat$desc_field,
~ if (is.na(.x)) "CRAN" else glue("{ui_field(.x)} field in DESCRIPTION")
)
)
if (length(urls) == 0) {
ui_oops("Can't find any URLs")
return(invisible(character()))
}
if (!is_interactive()) {
return(invisible(urls))
}
prompt <- "Which URL do you want to visit? (0 to exit)"
pretty <- purrr::map2(
format(urls, justify = "left"), details,
~ glue("{.x} ({.y})")
)
choice <- utils::menu(title = prompt, choices = pretty)
if (choice == 0) {
return(invisible(character()))
}
view_url(urls[choice])
}
#' @export
#' @rdname browse-this
browse_project <- function() browse_package(NULL)
#' @export
#' @rdname browse-this
browse_github <- function(package = NULL) {
view_url(github_url(package))
}
#' @export
#' @rdname browse-this
browse_github_issues <- function(package = NULL, number = NULL) {
view_url(github_url(package), "issues", number)
}
#' @export
#' @rdname browse-this
browse_github_pulls <- function(package = NULL, number = NULL) {
pull <- if (is.null(number)) "pulls" else "pull"
view_url(github_url(package), pull, number)
}
#' @export
#' @rdname browse-this
browse_github_actions <- function(package = NULL) {
view_url(github_url(package), "actions")
}
#' @export
#' @rdname browse-this
browse_circleci <- function(package = NULL) {
gh <- github_url(package)
circle_url <- "circleci.com/gh"
view_url(sub("github.com", circle_url, gh))
}
#' @export
#' @rdname browse-this
browse_cran <- function(package = NULL) {
view_url(cran_home(package))
}
# Try to get a GitHub repo spec from these places:
# 1. Remotes associated with GitHub (active project)
# 2. BugReports/URL fields of DESCRIPTION (active project or arbitrary
# installed package)
github_url <- function(package = NULL) {
maybe_name(package)
if (is.null(package)) {
check_is_project()
url <- github_url_from_git_remotes()
if (!is.null(url)) {
return(url)
}
}
desc_urls_dat <- desc_urls(package)
if (is.null(desc_urls_dat)) {
if (is.null(package)) {
ui_stop("
Project {ui_value(project_name())} has no DESCRIPTION file and \\
has no GitHub remotes configured
No way to discover URLs")
} else {
ui_stop("
Can't find DESCRIPTION for package {ui_value(package)} locally \\
or on CRAN
No way to discover URLs")
}
}
desc_urls_dat <- desc_urls_dat[desc_urls_dat$is_github, ]
if (nrow(desc_urls_dat) > 0) {
parsed <- parse_github_remotes(desc_urls_dat$url[[1]])
return(glue_data_chr(parsed, "https://{host}/{repo_owner}/{repo_name}"))
}
if (is.null(package)) {
ui_stop("
Project {ui_value(project_name())} has no GitHub remotes configured \\
and has no GitHub URLs in DESCRIPTION")
}
ui_warn("
Package {ui_value(package)} has no GitHub URLs in DESCRIPTION
Trying the GitHub CRAN mirror")
glue_chr("https://github.com/cran/{package}")
}
cran_home <- function(package = NULL) {
package <- package %||% project_name()
glue_chr("https://cran.r-project.org/package={package}")
}
# returns NULL, if no DESCRIPTION found
# returns 0-row data frame, if DESCRIPTION holds no URLs
# returns data frame, if successful
# include_cran whether to include CRAN landing page, if we consult it
desc_urls <- function(package = NULL, include_cran = FALSE, desc = NULL) {
maybe_desc <- purrr::possibly(desc::desc, otherwise = NULL)
desc_from_cran <- FALSE
if (is.null(desc)) {
if (is.null(package)) {
desc <- maybe_desc(file = proj_get())
if (is.null(desc)) {
return()
}
} else {
desc <- maybe_desc(package = package)
if (is.null(desc)) {
cran_desc_url <-
glue("https://cran.rstudio.com/web/packages/{package}/DESCRIPTION")
suppressWarnings(
desc <- maybe_desc(text = readLines(cran_desc_url))
)
if (is.null(desc)) {
return()
}
desc_from_cran <- TRUE
}
}
}
url <- desc$get_urls()
bug_reports <- desc$get_field("BugReports", default = character())
cran <-
if (include_cran && desc_from_cran) cran_home(package) else character()
dat <- data.frame(
desc_field = c(
rep_len("URL", length.out = length(url)),
rep_len("BugReports", length.out = length(bug_reports)),
rep_len(NA, length.out = length(cran))
),
url = c(url, bug_reports, cran),
stringsAsFactors = FALSE
)
dat <- cbind(dat, re_match(dat$url, github_remote_regex))
# TODO: could have a more sophisticated understanding of GitHub deployments
dat$is_github <- !is.na(dat$.match) & grepl("github", dat$host)
dat[c("url", "desc_field", "is_github")]
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/browse.R
|
#' Continuous integration setup and badges
#'
#' @description
#' `r lifecycle::badge("questioning")`
#'
#' These functions are not actively used by the tidyverse team, and may not
#' currently work. Use at your own risk.
#'
#' Sets up third-party continuous integration (CI) services for an R package
#' on GitLab or CircleCI. These functions:
#'
#' * Add service-specific configuration files and add them to `.Rbuildignore`.
#' * Activate a service or give the user a detailed prompt.
#' * Provide the markdown to insert a badge into README.
#'
#' @section `use_gitlab_ci()`:
#' Adds a basic `.gitlab-ci.yml` to the top-level directory of a package. This
#' is a configuration file for the [GitLab
#' CI/CD](https://docs.gitlab.com/ee/ci/) continuous integration service.
#' @export
use_gitlab_ci <- function() {
check_uses_git()
new <- use_template(
"gitlab-ci.yml",
".gitlab-ci.yml",
ignore = TRUE
)
if (!new) {
return(invisible(FALSE))
}
invisible(TRUE)
}
#' @section `use_circleci()`:
#' Adds a basic `.circleci/config.yml` to the top-level directory of a package.
#' This is a configuration file for the [CircleCI](https://circleci.com/)
#' continuous integration service.
#' @param browse Open a browser window to enable automatic builds for the
#' package.
#' @param image The Docker image to use for build. Must be available on
#' [DockerHub](https://hub.docker.com). The
#' [rocker/verse](https://hub.docker.com/r/rocker/verse) image includes
#' TeXLive, pandoc, and the tidyverse packages. For a minimal image, try
#' [rocker/r-ver](https://hub.docker.com/r/rocker/r-ver). To specify a version
#' of R, change the tag from `latest` to the version you want, e.g.
#' `rocker/r-ver:3.5.3`.
#' @export
#' @rdname use_gitlab_ci
use_circleci <- function(browse = rlang::is_interactive(),
image = "rocker/verse:latest") {
repo_spec <- target_repo_spec()
use_directory(".circleci", ignore = TRUE)
new <- use_template(
"circleci-config.yml",
".circleci/config.yml",
data = list(package = project_name(), image = image),
ignore = TRUE
)
if (!new) {
return(invisible(FALSE))
}
use_circleci_badge(repo_spec)
circleci_activate(spec_owner(repo_spec), browse)
invisible(TRUE)
}
#' @section `use_circleci_badge()`:
#' Only adds the [Circle CI](https://circleci.com/) badge. Use for a project
#' where Circle CI is already configured.
#' @rdname use_gitlab_ci
#' @eval param_repo_spec()
#' @export
use_circleci_badge <- function(repo_spec = NULL) {
repo_spec <- repo_spec %||% target_repo_spec()
url <- glue("https://circleci.com/gh/{repo_spec}")
img <- glue("{url}.svg?style=svg")
use_badge("CircleCI build status", url, img)
}
circleci_activate <- function(owner, browse = is_interactive()) {
url <- glue("https://circleci.com/add-projects/gh/{owner}")
ui_todo("Turn on CircleCI for your repo at {url}")
if (browse) {
utils::browseURL(url)
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/ci.R
|
#' Create a CITATION template
#'
#' Use this if you want to encourage users of your package to cite an
#' article or book.
#'
#' @export
use_citation <- function() {
check_is_package()
use_directory("inst")
use_template(
"citation-template.R",
path("inst", "CITATION"),
open = TRUE
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/citation.R
|
#' Add a code of conduct
#'
#' Adds a `CODE_OF_CONDUCT.md` file to the active project and lists in
#' `.Rbuildignore`, in the case of a package. The goal of a code of conduct is
#' to foster an environment of inclusiveness, and to explicitly discourage
#' inappropriate behaviour. The template comes from
#' <https://www.contributor-covenant.org>, version 2.1:
#' <https://www.contributor-covenant.org/version/2/1/code_of_conduct/>.
#'
#' If your package is going to CRAN, the link to the CoC in your README must
#' be an absolute link to a rendered website as `CODE_OF_CONDUCT.md` is not
#' included in the package sent to CRAN. `use_code_of_conduct()` will
#' automatically generate this link if (1) you use pkgdown and (2) have set the
#' `url` field in `_pkgdown.yml`; otherwise it will link to a copy of the CoC
#' on <https://www.contributor-covenant.org>.
#'
#' @param contact Contact details for making a code of conduct report.
#' Usually an email address.
#' @param path Path of the directory to put `CODE_OF_CONDUCT.md` in, relative to
#' the active project. Passed along to [use_directory()]. Default is to locate
#' at top-level, but `.github/` is also common.
#'
#' @export
use_code_of_conduct <- function(contact, path = NULL) {
if (missing(contact)) {
ui_stop("
{ui_code('use_code_of_conduct()')} requires contact details in \\
first argument")
}
new <- use_coc(contact = contact, path = path)
href <- pkgdown_url(pedantic = TRUE) %||%
"https://contributor-covenant.org/version/2/1"
href <- sub("/$", "", href)
href <- paste0(href, "/CODE_OF_CONDUCT.html")
ui_todo("You may also want to describe the code of conduct in your README:")
ui_code_block("
## Code of Conduct
Please note that the {project_name()} project is released with a \\
[Contributor Code of Conduct]({href}). By contributing to this project, \\
you agree to abide by its terms."
)
invisible(new)
}
use_coc <- function(contact, path = NULL) {
if (!is.null(path)) {
use_directory(path, ignore = is_package())
}
save_as <- path_join(c(path, "CODE_OF_CONDUCT.md"))
use_template(
"CODE_OF_CONDUCT.md",
save_as = save_as,
data = list(contact = contact),
ignore = is_package() && is.null(path)
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/code-of-conduct.R
|
## see end of file for some cURL notes
#' Download and unpack a ZIP file
#'
#' Functions to download and unpack a ZIP file into a local folder of files,
#' with very intentional default behaviour. Useful in pedagogical settings or
#' anytime you need a large audience to download a set of files quickly and
#' actually be able to find them. The underlying helpers are documented in
#' [use_course_details].
#'
#' @param url Link to a ZIP file containing the materials. To reduce the chance
#' of typos in live settings, these shorter forms are accepted:
#'
#' * GitHub repo spec: "OWNER/REPO". Equivalent to
#' `https://github.com/OWNER/REPO/DEFAULT_BRANCH.zip`.
#' * bit.ly or rstd.io shortlinks: "bit.ly/xxx-yyy-zzz" or "rstd.io/foofy".
#' The instructor must then arrange for the shortlink to point to a valid
#' download URL for the target ZIP file. The helper
#' [create_download_url()] helps to create such URLs for GitHub, DropBox,
#' and Google Drive.
#' @param destdir Destination for the new folder. Defaults to the location
#' stored in the global option `usethis.destdir`, if defined, or to the user's
#' Desktop or similarly conspicuous place otherwise.
#' @param cleanup Whether to delete the original ZIP file after unpacking its
#' contents. In an interactive setting, `NA` leads to a menu where user can
#' approve the deletion (or decline).
#'
#' @return Path to the new directory holding the unpacked ZIP file, invisibly.
#' @name zip-utils
#' @examples
#' \dontrun{
#' # download the source of usethis from GitHub, behind a bit.ly shortlink
#' use_course("bit.ly/usethis-shortlink-example")
#' use_course("http://bit.ly/usethis-shortlink-example")
#'
#' # download the source of rematch2 package from CRAN
#' use_course("https://cran.r-project.org/bin/windows/contrib/3.4/rematch2_2.0.1.zip")
#'
#' # download the source of rematch2 package from GitHub, 4 ways
#' use_course("r-lib/rematch2")
#' use_course("https://api.github.com/repos/r-lib/rematch2/zipball/HEAD")
#' use_course("https://api.github.com/repos/r-lib/rematch2/zipball/main")
#' use_course("https://github.com/r-lib/rematch2/archive/main.zip")
#' }
NULL
#' @describeIn zip-utils
#'
#' Designed with live workshops in mind. Includes intentional friction to
#' highlight the download destination. Workflow:
#' * User executes, e.g., `use_course("bit.ly/xxx-yyy-zzz")`.
#' * User is asked to notice and confirm the location of the new folder. Specify
#' `destdir` or configure the `"usethis.destdir"` option to prevent this.
#' * User is asked if they'd like to delete the ZIP file.
#' * If new folder contains an `.Rproj` file, a new instance of RStudio is
#' launched. Otherwise, the folder is opened in the file manager, e.g. Finder
#' or File Explorer.
#' @export
use_course <- function(url, destdir = getOption("usethis.destdir")) {
url <- normalize_url(url)
destdir_not_specified <- is.null(destdir)
destdir <- user_path_prep(destdir %||% conspicuous_place())
check_path_is_directory(destdir)
if (destdir_not_specified && is_interactive()) {
ui_line(c(
"Downloading into {ui_path(destdir)}.",
"Prefer a different location? Cancel, try again, and specify {ui_code('destdir')}"
))
if (ui_nope("OK to proceed?")) {
ui_oops("Cancelling download.")
return(invisible())
}
}
ui_done("Downloading from {ui_value(url)}")
zipfile <- tidy_download(url, destdir)
ui_done("Download stored in {ui_path(zipfile)}")
check_is_zip(attr(zipfile, "content-type"))
tidy_unzip(zipfile, cleanup = NA)
}
#' @describeIn zip-utils
#'
#' More useful in day-to-day work. Downloads in current working directory, by
#' default, and allows `cleanup` behaviour to be specified.
#' @export
use_zip <- function(url,
destdir = getwd(),
cleanup = if (rlang::is_interactive()) NA else FALSE) {
url <- normalize_url(url)
check_path_is_directory(destdir)
ui_done("Downloading from {ui_value(url)}")
zipfile <- tidy_download(url, destdir)
ui_done("Download stored in {ui_path(zipfile)}")
check_is_zip(attr(zipfile, "content-type"))
tidy_unzip(zipfile, cleanup)
}
#' Helpers to download and unpack a ZIP file
#'
#' @description
#' Details on the internal and helper functions that power [use_course()] and
#' [use_zip()]. Only `create_download_url()` is exported.
#'
#' @name use_course_details
#' @keywords internal
#' @usage
#' tidy_download(url, destdir = getwd())
#' tidy_unzip(zipfile, cleanup = FALSE)
#'
#' @aliases tidy_download tidy_unzip
#' @param url A GitHub, DropBox, or Google Drive URL.
#' * For `create_download_url()`: A URL copied from a web browser.
#' * For `tidy_download()`: A download link for a ZIP file, possibly behind a
#' shortlink or other redirect. `create_download_url()` can be helpful for
#' creating this URL from typical browser URLs.
#' @param destdir Path to existing local directory where the ZIP file will be
#' stored. Defaults to current working directory, but note that [use_course()]
#' has different default behavior.
#' @param zipfile Path to local ZIP file.
#' @param cleanup Whether to delete the ZIP file after unpacking. In an
#' interactive session, `cleanup = NA` leads to asking the user if they
#' want to delete or keep the ZIP file.
#' @section tidy_download():
#'
#' ```
#' # how it's used inside use_course()
#' tidy_download(
#' # url has been processed with internal helper normalize_url()
#' url,
#' # conspicuous_place() = `getOption('usethis.destdir')` or desktop or home
#' # directory or working directory
#' destdir = destdir %||% conspicuous_place()
#' )
#' ```
#'
#' Special-purpose function to download a ZIP file and automatically determine
#' the file name, which often determines the folder name after unpacking.
#' Developed with DropBox and GitHub as primary targets, possibly via
#' shortlinks. Both platforms offer a way to download an entire folder or repo
#' as a ZIP file, with information about the original folder or repo transmitted
#' in the `Content-Disposition` header. In the absence of this header, a
#' filename is generated from the input URL. In either case, the filename is
#' sanitized. Returns the path to downloaded ZIP file, invisibly.
#'
#' `tidy_download()` is setup to retry after a download failure. In an
#' interactive session, it asks for user's consent. All retries use a longer
#' connect timeout.
#'
#' ## DropBox
#'
#' To make a folder available for ZIP download, create a shared link for it:
#' * <https://help.dropbox.com/share/create-and-share-link>
#'
#' A shared link will have this form:
#' ```
#' https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0
#' ```
#' Replace the `dl=0` at the end with `dl=1` to create a download link:
#' ```
#' https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=1
#' ```
#' You can use `create_download_url()` to do this conversion.
#'
#' This download link (or a shortlink that points to it) is suitable as input
#' for `tidy_download()`. After one or more redirections, this link will
#' eventually lead to a download URL. For more details, see
#' <https://help.dropbox.com/share/force-download> and
#' <https://help.dropbox.com/sync/download-entire-folders>.
#'
#' ## GitHub
#'
#' Click on the repo's "Clone or download" button, to reveal a "Download ZIP"
#' button. Capture this URL, which will have this form:
#' ```
#' https://github.com/r-lib/usethis/archive/main.zip
#' ```
#' This download link (or a shortlink that points to it) is suitable as input
#' for `tidy_download()`. After one or more redirections, this link will
#' eventually lead to a download URL. Here are other links that also lead to
#' ZIP download, albeit with a different filenaming scheme (REF could be a
#' branch name, a tag, or a SHA):
#' ```
#' https://github.com/github.com/r-lib/usethis/zipball/HEAD
#' https://api.github.com/repos/r-lib/rematch2/zipball/REF
#' https://api.github.com/repos/r-lib/rematch2/zipball/HEAD
#' https://api.github.com/repos/r-lib/usethis/zipball/REF
#' ```
#'
#' You can use `create_download_url()` to create the "Download ZIP" URL from
#' a typical GitHub browser URL.
#'
#' ## Google Drive
#'
#' To our knowledge, it is not possible to download a Google Drive folder as a
#' ZIP archive. It is however possible to share a ZIP file stored on Google
#' Drive. To get its URL, click on "Get the shareable link" (within the "Share"
#' menu). This URL doesn't allow for direct download, as it's designed to be
#' processed in a web browser first. Such a sharing link looks like:
#'
#' ```
#' https://drive.google.com/open?id=123456789xxyyyzzz
#' ```
#'
#' To be able to get the URL suitable for direct download, you need to extract
#' the "id" element from the URL and include it in this URL format:
#'
#' ```
#' https://drive.google.com/uc?export=download&id=123456789xxyyyzzz
#' ```
#'
#' Use `create_download_url()` to perform this transformation automatically.
#'
#' @section tidy_unzip():
#'
#' Special-purpose function to unpack a ZIP file and (attempt to) create the
#' directory structure most people want. When unpacking an archive, it is easy
#' to get one more or one less level of nesting than you expected.
#'
#' It's especially important to finesse the directory structure here: we want
#' the same local result when unzipping the same content from either GitHub or
#' DropBox ZIP files, which pack things differently. Here is the intent:
#' * If the ZIP archive `foo.zip` does not contain a single top-level directory,
#' i.e. it is packed as "loose parts", unzip into a directory named `foo`.
#' Typical of DropBox ZIP files.
#' * If the ZIP archive `foo.zip` has a single top-level directory (which, by
#' the way, is not necessarily called "foo"), unpack into said directory.
#' Typical of GitHub ZIP files.
#'
#' Returns path to the directory holding the unpacked files, invisibly.
#'
#' **DropBox:**
#' The ZIP files produced by DropBox are special. The file list tends to contain
#' a spurious directory `"/"`, which we ignore during unzip. Also, if the
#' directory is a Git repo and/or RStudio Project, we unzip-ignore various
#' hidden files, such as `.RData`, `.Rhistory`, and those below `.git/` and
#' `.Rproj.user`.
#'
#' @examples
#' \dontrun{
#' tidy_download("https://github.com/r-lib/rematch2/archive/main.zip")
#' tidy_unzip("rematch2-main.zip")
#' }
NULL
# 1. downloads from `url`
# 2. calls a retry-capable helper to download the ZIP file
# 3. determines filename from content-description header (with fallbacks)
# 4. returned path has content-type and content-description as attributes
tidy_download <- function(url, destdir = getwd()) {
check_path_is_directory(destdir)
tmp <- file_temp("tidy-download-")
h <- download_url(url, destfile = tmp)
ui_line()
cd <- content_disposition(h)
base_name <- make_filename(cd, fallback = path_file(url))
full_path <- path(destdir, base_name)
if (!can_overwrite(full_path)) {
ui_stop("Cancelling download, to avoid overwriting {ui_path(full_path)}")
}
attr(full_path, "content-type") <- content_type(h)
attr(full_path, "content-disposition") <- cd
file_move(tmp, full_path)
invisible(full_path)
}
download_url <- function(url,
destfile,
handle = curl::new_handle(),
n_tries = 3,
retry_connecttimeout = 40L) {
handle_options <- list(noprogress = FALSE, progressfunction = progress_fun)
curl::handle_setopt(handle, .list = handle_options)
we_should_retry <- function(i, n_tries, status) {
if (i >= n_tries) {
FALSE
} else if (inherits(status, "error")) {
# TODO: find a way to detect a (connect) timeout more specifically?
# https://github.com/jeroen/curl/issues/154
# https://ec.haxx.se/usingcurl/usingcurl-timeouts
# "Failing to connect within the given time will cause curl to exit with a
# timeout exit code (28)."
# (however, note that all timeouts lead to this same exit code)
# https://ec.haxx.se/usingcurl/usingcurl-returns
# "28. Operation timeout. The specified time-out period was reached
# according to the conditions. curl offers several timeouts, and this exit
# code tells one of those timeout limits were reached."
# https://github.com/curl/curl/blob/272282a05416e42d2cc4a847a31fd457bc6cc827/lib/strerror.c#L143-L144
# "Timeout was reached" <-- actual message we could potentially match
TRUE
} else {
FALSE
}
}
status <- try_download(url, destfile, handle = handle)
if (inherits(status, "error") && is_interactive()) {
ui_oops(status$message)
if (ui_nope("
Download failed :(
See above for everything we know about why it failed.
Shall we try a couple more times, with a longer timeout?
")) {
n_tries <- 1
}
}
i <- 1
# invariant: we have made i download attempts
while (we_should_retry(i, n_tries, status)) {
if (i == 1) {
curl::handle_setopt(
handle,
.list = c(connecttimeout = retry_connecttimeout)
)
}
i <- i + 1
ui_info("Retrying download ... attempt {i}")
status <- try_download(url, destfile, handle = handle)
}
if (inherits(status, "error")) {
stop(status)
}
invisible(handle)
}
try_download <- function(url, destfile, quiet = FALSE, mode = "wb", handle) {
tryCatch(
curl::curl_download(
url = url,
destfile = destfile,
quiet = quiet,
mode = mode,
handle = handle
),
error = function(e) e
)
}
tidy_unzip <- function(zipfile, cleanup = FALSE) {
base_path <- path_dir(zipfile)
filenames <- utils::unzip(zipfile, list = TRUE)[["Name"]]
## deal with DropBox's peculiar habit of including "/" as a file --> drop it
filenames <- filenames[filenames != "/"]
## DropBox ZIP files often include lots of hidden R, RStudio, and Git files
filenames <- filenames[keep_lgl(filenames)]
td <- top_directory(filenames)
loose_parts <- is.na(td)
if (loose_parts) {
target <- path_ext_remove(zipfile)
utils::unzip(zipfile, files = filenames, exdir = target)
} else {
target <- path(base_path, td)
utils::unzip(zipfile, files = filenames, exdir = base_path)
}
ui_done(
"Unpacking ZIP file into {ui_path(target, base_path)} \\
({length(filenames)} files extracted)"
)
if (isNA(cleanup)) {
cleanup <- is_interactive() &&
ui_yeah("Shall we delete the ZIP file ({ui_path(zipfile, base_path)})?")
}
if (isTRUE(cleanup)) {
ui_done("Deleting {ui_path(zipfile, base_path)}")
file_delete(zipfile)
}
if (is_interactive()) {
rproj_path <- rproj_paths(target)
if (length(rproj_path) == 1 && rstudioapi::hasFun("openProject")) {
ui_done("Opening project in RStudio")
rstudioapi::openProject(target, newSession = TRUE)
} else if (!in_rstudio_server()) {
ui_done("Opening {ui_path(target, base_path)} in the file manager")
utils::browseURL(path_real(target))
}
}
invisible(target)
}
#' @rdname use_course_details
#' @examples
#' # GitHub
#' create_download_url("https://github.com/r-lib/usethis")
#' create_download_url("https://github.com/r-lib/usethis/issues")
#'
#' # DropBox
#' create_download_url("https://www.dropbox.com/sh/12345abcde/6789wxyz?dl=0")
#'
#' # Google Drive
#' create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz")
#' create_download_url("https://drive.google.com/open?id=123456789xxyyyzzz/view")
#' @export
create_download_url <- function(url) {
check_name(url)
stopifnot(grepl("^http[s]?://", url))
switch(
classify_url(url),
drive = modify_drive_url(url),
dropbox = modify_dropbox_url(url),
github = modify_github_url(url),
hopeless_url(url)
)
}
classify_url <- function(url) {
if (grepl("drive.google.com", url)) {
return("drive")
}
if (grepl("dropbox.com/sh", url)) {
return("dropbox")
}
if (grepl("github.com", url)) {
return("github")
}
"unknown"
}
modify_drive_url <- function(url) {
# id-isolating approach taken from the gargle / googleverse
id_loc <- regexpr("/d/([^/])+|/folders/([^/])+|id=([^/])+", url)
if (id_loc == -1) {
return(hopeless_url(url))
}
id <- gsub("/d/|/folders/|id=", "", regmatches(url, id_loc))
glue_chr("https://drive.google.com/uc?export=download&id={id}")
}
modify_dropbox_url <- function(url) {
gsub("dl=0", "dl=1", url)
}
modify_github_url <- function(url) {
# TO CONSIDER: one could use the API for this, which might be more proper and
# would work if auth is needed
# https://docs.github.com/en/free-pro-team@latest/rest/reference/repos#download-a-repository-archive-zip
# https://api.github.com/repos/OWNER/REPO/zipball/
# but then, in big workshop settings, we might see rate limit problems or
# get blocked because of too many token-free requests from same IP
parsed <- parse_github_remotes(url)
glue_data_chr(parsed, "{protocol}://{host}/{repo_owner}/{repo_name}/zipball/HEAD")
}
hopeless_url <- function(url) {
ui_info(
"URL does not match a recognized form for Google Drive or DropBox. \\
No change made."
)
url
}
normalize_url <- function(url) {
check_name(url)
has_scheme <- grepl("^http[s]?://", url)
if (has_scheme) {
return(url)
}
if (!is_shortlink(url)) {
url <- tryCatch(
expand_github(url),
error = function(e) url
)
}
paste0("https://", url)
}
is_shortlink <- function(url) {
shortlink_hosts <- c("rstd\\.io", "bit\\.ly")
any(map_lgl(shortlink_hosts, grepl, x = url))
}
expand_github <- function(url) {
# mostly to handle errors in the spec
repo_spec <- parse_repo_spec(url)
glue_data_chr(repo_spec, "github.com/{owner}/{repo}/zipball/HEAD")
}
conspicuous_place <- function() {
destdir_opt <- getOption("usethis.destdir")
if (!is.null(destdir_opt)) {
return(path_tidy(destdir_opt))
}
Filter(dir_exists, c(
path_home("Desktop"),
path_home(),
path_home_r(),
path_tidy(getwd())
))[[1]]
}
keep_lgl <- function(file,
ignores = c(".Rproj.user", ".rproj.user", ".Rhistory", ".RData", ".git", "__MACOSX", ".DS_Store")) {
ignores <- paste0(
"((\\/|\\A)", gsub("\\.", "[.]", ignores), "(\\/|\\Z))",
collapse = "|"
)
!grepl(ignores, file, perl = TRUE)
}
top_directory <- function(filenames) {
in_top <- path_dir(filenames) == "."
unique_top <- unique(filenames[in_top])
is_directory <- grepl("/$", unique_top)
if (length(unique_top) > 1 || !is_directory) {
NA_character_
} else {
unique_top
}
}
content_type <- function(h) {
headers <- curl::parse_headers_list(curl::handle_data(h)$headers)
headers[["content-type"]]
}
content_disposition <- function(h) {
headers <- curl::parse_headers_list(curl::handle_data(h)$headers)
cd <- headers[["content-disposition"]]
if (is.null(cd)) {
return()
}
parse_content_disposition(cd)
}
check_is_zip <- function(ct) {
# "https://www.fueleconomy.gov/feg/epadata/16data.zip" comes with
# MIME type "application/x-zip-compressed"
# see https://github.com/r-lib/usethis/issues/573
allowed <- c("application/zip", "application/x-zip-compressed")
if (!ct %in% allowed) {
ui_stop(c(
"Download does not have MIME type {ui_value('application/zip')}.",
"Instead it's {ui_value(ct)}."
))
}
invisible(ct)
}
## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition
## https://tools.ietf.org/html/rfc6266
## DropBox eg: "attachment; filename=\"foo.zip\"; filename*=UTF-8''foo.zip\"
## GitHub eg: "attachment; filename=foo-main.zip"
# https://stackoverflow.com/questions/30193569/get-content-disposition-parameters
# http://test.greenbytes.de/tech/tc2231/
parse_content_disposition <- function(cd) {
if (!grepl("^attachment;", cd)) {
ui_stop(c(
"{ui_code('Content-Disposition')} header doesn't start with {ui_value('attachment')}.",
"Actual header: {ui_value(cd)}"
))
}
cd <- sub("^attachment;\\s*", "", cd, ignore.case = TRUE)
cd <- strsplit(cd, "\\s*;\\s*")[[1]]
cd <- strsplit(cd, "=")
stats::setNames(
vapply(cd, `[[`, character(1), 2),
vapply(cd, `[[`, character(1), 1)
)
}
progress_fun <- function(down, up) {
total <- down[[1]]
now <- down[[2]]
pct <- if (length(total) && total > 0) {
paste0("(", round(now / total * 100), "%)")
} else {
""
}
if (now > 10000) {
cat("\rDownloaded:", sprintf("%.2f", now / 2^20), "MB ", pct)
}
TRUE
}
make_filename <- function(cd,
fallback = path_file(file_temp())) {
## TO DO(jennybc): the element named 'filename*' is preferred but I'm not
## sure how to parse it yet, so targeting 'filename' for now
## https://tools.ietf.org/html/rfc6266
cd <- cd[["filename"]]
if (is.null(cd) || is.na(cd)) {
check_name(fallback)
return(path_sanitize(fallback))
}
## I know I could use regex and lookahead but this is easier for me to
## maintain
cd <- sub("^\"(.+)\"$", "\\1", cd)
path_sanitize(cd)
}
## https://stackoverflow.com/questions/21322614/use-curl-to-download-a-dropbox-folder-via-shared-link-not-public-link
## lesson: if using cURL, you'd want these options
## -L, --location (follow redirects)
## -O, --remote-name (name local file like the file part of remote name)
## -J, --remote-header-name (tells -O option to consult Content-Disposition
## instead of the URL)
## https://curl.haxx.se/docs/manpage.html#OPTIONS
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/course.R
|
#' Test coverage
#'
#' Adds test coverage reporting to a package, using either Codecov
#' (`https://codecov.io`) or Coveralls (`https://coveralls.io`).
#'
#' @param type Which web service to use.
#' @eval param_repo_spec()
#' @export
use_coverage <- function(type = c("codecov", "coveralls"), repo_spec = NULL) {
repo_spec <- repo_spec %||% target_repo_spec()
type <- match.arg(type)
if (type == "codecov") {
new <- use_template("codecov.yml", ignore = TRUE)
if (!new) {
return(invisible(FALSE))
}
} else if (type == "coveralls") {
ui_todo("Turn on coveralls for this repo at https://coveralls.io/repos/new")
}
switch(
type,
codecov = use_codecov_badge(repo_spec),
coveralls = use_coveralls_badge(repo_spec)
)
ui_todo("
Call {ui_code('use_github_action(\"test-coverage\")')} to continuously \\
monitor test coverage.")
invisible(TRUE)
}
#' @export
#' @rdname use_coverage
#' @param files Character vector of file globs.
use_covr_ignore <- function(files) {
use_build_ignore(".covrignore")
write_union(proj_path(".covrignore"), files)
}
use_codecov_badge <- function(repo_spec) {
default_branch <- git_default_branch()
url <- glue("https://app.codecov.io/gh/{repo_spec}?branch={default_branch}")
img <- glue("https://codecov.io/gh/{repo_spec}/branch/{default_branch}/graph/badge.svg")
use_badge("Codecov test coverage", url, img)
}
use_coveralls_badge <- function(repo_spec) {
default_branch <- git_default_branch()
url <- glue("https://coveralls.io/r/{repo_spec}?branch={default_branch}")
img <- glue("https://coveralls.io/repos/github/{repo_spec}/badge.svg")
use_badge("Coveralls test coverage", url, img)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/coverage.R
|
#' Use C++ via the cpp11 package
#'
#' Adds infrastructure needed to use the [cpp11](https://cpp11.r-lib.org)
#' package, a header-only R package that helps R package developers handle R
#' objects with C++ code:
#' * Creates `src/`
#' * Adds cpp11 to `DESCRIPTION`
#' * Creates `src/code.cpp`, an initial placeholder `.cpp` file
#'
#' @export
use_cpp11 <- function() {
check_installed("cpp11")
check_is_package("use_cpp11()")
check_uses_roxygen("use_cpp11()")
use_src()
use_dependency("cpp11", "LinkingTo")
use_template(
"code-cpp11.cpp",
path("src", "code.cpp"),
open = is_interactive()
)
check_cpp_register_deps()
invisible()
}
get_cpp_register_deps <- function() {
desc <- desc::desc(package = "cpp11")
desc$get_list("Config/Needs/cpp11/cpp_register")[[1]]
}
check_cpp_register_deps <- function() {
cpp_register_deps <- get_cpp_register_deps()
installed <- map_lgl(cpp_register_deps, is_installed)
if (!all(installed)) {
ui_todo("Now install {ui_value(cpp_register_deps[!installed])} to use cpp11.")
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/cpp11.R
|
#' CRAN submission comments
#'
#' Creates `cran-comments.md`, a template for your communications with CRAN when
#' submitting a package. The goal is to clearly communicate the steps you have
#' taken to check your package on a wide range of operating systems. If you are
#' submitting an update to a package that is used by other packages, you also
#' need to summarize the results of your [reverse dependency
#' checks][use_revdep].
#'
#' @export
#' @inheritParams use_template
use_cran_comments <- function(open = rlang::is_interactive()) {
check_is_package("use_cran_comments()")
use_template(
"cran-comments.md",
ignore = TRUE,
open = open
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/cran.R
|
#' Create a package or project
#'
#' @description
#' These functions create an R project:
#' * `create_package()` creates an R package
#' * `create_project()` creates a non-package project, i.e. a data analysis
#' project
#'
#' Both functions can be called on an existing project; you will be asked before
#' any existing files are changed.
#'
#' @inheritParams use_description
#' @param path A path. If it exists, it is used. If it does not exist, it is
#' created, provided that the parent path exists.
#' @param roxygen Do you plan to use roxygen2 to document your package?
#' @param rstudio If `TRUE`, calls [use_rstudio()] to make the new package or
#' project into an [RStudio
#' Project](https://r-pkgs.org/workflow101.html#sec-workflow101-rstudio-projects).
#' If `FALSE` and a non-package project, a sentinel `.here` file is placed so
#' that the directory can be recognized as a project by the
#' [here](https://here.r-lib.org) or
#' [rprojroot](https://rprojroot.r-lib.org) packages.
#' @param open If `TRUE`, [activates][proj_activate()] the new project:
#'
#' * If using RStudio desktop, the package is opened in a new session.
#' * If on RStudio server, the current RStudio project is activated.
#' * Otherwise, the working directory and active project is changed.
#'
#' @return Path to the newly created project or package, invisibly.
#' @seealso [create_tidy_package()] is a convenience function that extends
#' `create_package()` by immediately applying as many of the tidyverse
#' development conventions as possible.
#' @export
create_package <- function(path,
fields = list(),
rstudio = rstudioapi::isAvailable(),
roxygen = TRUE,
check_name = TRUE,
open = rlang::is_interactive()) {
path <- user_path_prep(path)
check_path_is_directory(path_dir(path))
name <- path_file(path_abs(path))
if (check_name) {
check_package_name(name)
}
challenge_nested_project(path_dir(path), name)
challenge_home_directory(path)
create_directory(path)
local_project(path, force = TRUE)
use_directory("R")
proj_desc_create(name, fields, roxygen)
use_namespace(roxygen = roxygen)
if (rstudio) {
use_rstudio()
}
if (open) {
if (proj_activate(proj_get())) {
# working directory/active project already set; clear the scheduled
# restoration of the original project
withr::deferred_clear()
}
}
invisible(proj_get())
}
#' @export
#' @rdname create_package
create_project <- function(path,
rstudio = rstudioapi::isAvailable(),
open = rlang::is_interactive()) {
path <- user_path_prep(path)
name <- path_file(path_abs(path))
challenge_nested_project(path_dir(path), name)
challenge_home_directory(path)
create_directory(path)
local_project(path, force = TRUE)
use_directory("R")
if (rstudio) {
use_rstudio()
} else {
ui_done("Writing a sentinel file {ui_path('.here')}")
ui_todo("Build robust paths within your project via {ui_code('here::here()')}")
ui_todo("Learn more at <https://here.r-lib.org>")
file_create(proj_path(".here"))
}
if (open) {
if (proj_activate(proj_get())) {
# working directory/active project already set; clear the scheduled
# restoration of the original project
withr::deferred_clear()
}
}
invisible(proj_get())
}
#' Create a project from a GitHub repo
#'
#' @description
#' Creates a new local project and Git repository from a repo on GitHub, by
#' either cloning or
#' [fork-and-cloning](https://docs.github.com/en/get-started/quickstart/fork-a-repo).
#' In the fork-and-clone case, `create_from_github()` also does additional
#' remote and branch setup, leaving you in the perfect position to make a pull
#' request with [pr_init()], one of several [functions for working with pull
#' requests][pull-requests].
#'
#' `create_from_github()` works best when your GitHub credentials are
#' discoverable. See below for more about authentication.
#'
#' @template double-auth
#'
#' @seealso
#' * [use_github()] to go the opposite direction, i.e. create a GitHub repo
#' from your local repo
#' * [git_protocol()] for background on `protocol` (HTTPS vs SSH)
#' * [use_course()] to download a snapshot of all files in a GitHub repo,
#' without the need for any local or remote Git operations
#'
#' @inheritParams create_package
#' @param repo_spec A string identifying the GitHub repo in one of these forms:
#' * Plain `OWNER/REPO` spec
#' * Browser URL, such as `"https://github.com/OWNER/REPO"`
#' * HTTPS Git URL, such as `"https://github.com/OWNER/REPO.git"`
#' * SSH Git URL, such as `"[email protected]:OWNER/REPO.git"`
#' @param destdir Destination for the new folder, which will be named according
#' to the `REPO` extracted from `repo_spec`. Defaults to the location stored
#' in the global option `usethis.destdir`, if defined, or to the user's
#' Desktop or similarly conspicuous place otherwise.
#' @param fork If `FALSE`, we clone `repo_spec`. If `TRUE`, we fork
#' `repo_spec`, clone that fork, and do additional setup favorable for
#' future pull requests:
#' * The source repo, `repo_spec`, is configured as the `upstream` remote,
#' using the indicated `protocol`.
#' * The local `DEFAULT` branch is set to track `upstream/DEFAULT`, where
#' `DEFAULT` is typically `main` or `master`. It is also immediately pulled,
#' to cover the case of a pre-existing, out-of-date fork.
#'
#' If `fork = NA` (the default), we check your permissions on `repo_spec`. If
#' you can push, we set `fork = FALSE`, If you cannot, we set `fork = TRUE`.
#' @param host GitHub host to target, passed to the `.api_url` argument of
#' [gh::gh()]. If `repo_spec` is a URL, `host` is extracted from that.
#'
#' If unspecified, gh defaults to "https://api.github.com", although gh's
#' default can be customised by setting the GITHUB_API_URL environment
#' variable.
#'
#' For a hypothetical GitHub Enterprise instance, either
#' "https://github.acme.com/api/v3" or "https://github.acme.com" is
#' acceptable.
#' @param rstudio Initiate an [RStudio
#' Project](https://r-pkgs.org/workflow101.html#sec-workflow101-rstudio-projects)?
#' Defaults to `TRUE` if in an RStudio session and project has no
#' pre-existing `.Rproj` file. Defaults to `FALSE` otherwise (but note that
#' the cloned repo may already be an RStudio Project, i.e. may already have a
#' `.Rproj` file).
#' @inheritParams use_github
#'
#' @export
#' @examples
#' \dontrun{
#' create_from_github("r-lib/usethis")
#'
#' # repo_spec can be a URL
#' create_from_github("https://github.com/r-lib/usethis")
#'
#' # a URL repo_spec also specifies the host (e.g. GitHub Enterprise instance)
#' create_from_github("https://github.acme.com/OWNER/REPO")
#' }
create_from_github <- function(repo_spec,
destdir = NULL,
fork = NA,
rstudio = NULL,
open = rlang::is_interactive(),
protocol = git_protocol(),
host = NULL,
auth_token = deprecated(),
credentials = deprecated()) {
if (lifecycle::is_present(auth_token)) {
deprecate_warn_auth_token("create_from_github")
}
if (lifecycle::is_present(credentials)) {
deprecate_warn_credentials("create_from_github")
}
check_protocol(protocol)
parsed_repo_spec <- parse_repo_url(repo_spec)
if (!is.null(parsed_repo_spec$host)) {
repo_spec <- parsed_repo_spec$repo_spec
host <- parsed_repo_spec$host
}
whoami <- suppressMessages(gh::gh_whoami(.api_url = host))
no_auth <- is.null(whoami)
user <- if (no_auth) NULL else whoami$login
hint <- code_hint_with_host("gh_token_help", host)
if (no_auth && is.na(fork)) {
ui_stop("
Unable to discover a GitHub personal access token
Therefore, can't determine your permissions on {ui_value(repo_spec)}
Therefore, can't decide if `fork` should be `TRUE` or `FALSE`
You have two choices:
1. Make your token available (if in doubt, DO THIS):
- Call {ui_code(hint)} for directions
2. Call {ui_code('create_from_github()')} again, but with \\
{ui_code('fork = FALSE')}
- Only do this if you are absolutely sure you don't want to fork
- Note you will NOT be in a position to make a pull request")
}
if (no_auth && isTRUE(fork)) {
ui_stop("
Unable to discover a GitHub personal access token
A token is required in order to fork {ui_value(repo_spec)}
Call {ui_code(hint)} for help configuring a token")
}
# one of these is true:
# - gh is discovering a token for `host`
# - gh is NOT discovering a token, but `fork = FALSE`, so that's OK
source_owner <- spec_owner(repo_spec)
repo_name <- spec_repo(repo_spec)
gh <- gh_tr(list(repo_owner = source_owner, repo_name = repo_name, api_url = host))
repo_info <- gh("GET /repos/{owner}/{repo}")
# 2023-01-28 We're seeing the GitHub bug again around default branch in a
# fresh fork. If I create a fork, the POST payload *sometimes* mis-reports the
# default branch. I.e. it reports `main`, even though the actual default
# branch is `master`. Therefore we're reverting to consulting the source repo
# for this info
default_branch <- repo_info$default_branch
if (is.na(fork)) {
fork <- !isTRUE(repo_info$permissions$push)
fork_status <- glue("fork = {fork}")
ui_done("Setting {ui_code(fork_status)}")
}
# fork is either TRUE or FALSE
if (fork && identical(user, repo_info$owner$login)) {
ui_stop("
Can't fork, because the authenticated user {ui_value(user)} \\
already owns the source repo {ui_value(repo_info$full_name)}")
}
destdir <- user_path_prep(destdir %||% conspicuous_place())
check_path_is_directory(destdir)
challenge_nested_project(destdir, repo_name)
repo_path <- path(destdir, repo_name)
create_directory(repo_path)
check_directory_is_empty(repo_path)
if (fork) {
## https://developer.github.com/v3/repos/forks/#create-a-fork
ui_done("Forking {ui_value(repo_info$full_name)}")
upstream_url <- switch(
protocol,
https = repo_info$clone_url,
ssh = repo_info$ssh_url
)
repo_info <- gh("POST /repos/{owner}/{repo}/forks")
ui_done("Waiting for the fork to finalize before cloning")
Sys.sleep(3)
}
origin_url <- switch(
protocol,
https = repo_info$clone_url,
ssh = repo_info$ssh_url
)
ui_done("Cloning repo from {ui_value(origin_url)} into {ui_value(repo_path)}")
gert::git_clone(origin_url, repo_path, verbose = FALSE)
proj_path <- find_rstudio_root(repo_path)
local_project(proj_path, force = TRUE) # schedule restoration of project
# 2023-01-28 again, it would be more natural to trust the default branch of
# the fork, but that cannot always be trusted. For now, we're still using
# the default branch learned from the source repo.
ui_info("Default branch is {ui_value(default_branch)}")
if (fork) {
ui_done("Adding {ui_value('upstream')} remote: {ui_value(upstream_url)}")
use_git_remote("upstream", upstream_url)
pr_merge_main()
upstream_remref <- glue("upstream/{default_branch}")
ui_done("
Setting remote tracking branch for local {ui_value(default_branch)} \\
branch to {ui_value(upstream_remref)}")
gert::git_branch_set_upstream(upstream_remref, repo = git_repo())
config_key <- glue("remote.upstream.created-by")
gert::git_config_set(config_key, "usethis::create_from_github", repo = git_repo())
}
rstudio <- rstudio %||% rstudio_available()
rstudio <- rstudio && !is_rstudio_project()
if (rstudio) {
use_rstudio(reformat = FALSE)
}
if (open) {
if (proj_activate(proj_get())) {
# Working directory/active project changed; so don't undo on exit
withr::deferred_clear()
}
}
invisible(proj_get())
}
# If there's a single directory containing an .Rproj file, use it.
# Otherwise work in the repo root
find_rstudio_root <- function(path) {
rproj <- rproj_paths(path, recurse = TRUE)
if (length(rproj) == 1) {
path_dir(rproj)
} else {
path
}
}
challenge_nested_project <- function(path, name) {
if (!possibly_in_proj(path)) {
return(invisible())
}
# creates an undocumented backdoor we can exploit when the interactive
# approval is impractical, e.g. in tests
if (isTRUE(getOption("usethis.allow_nested_project", FALSE))) {
return(invisible())
}
ui_line(
"New project {ui_value(name)} is nested inside an existing project \\
{ui_path(path)}, which is rarely a good idea.
If this is unexpected, the here package has a function, \\
{ui_code('here::dr_here()')} that reveals why {ui_path(path)} \\
is regarded as a project."
)
if (ui_nope("Do you want to create anyway?")) {
ui_stop("Cancelling project creation.")
}
invisible()
}
challenge_home_directory <- function(path) {
homes <- unique(c(path_home(), path_home_r()))
if (!path %in% homes) {
return(invisible())
}
qualification <- if (is_windows()) {
glue("a special directory, i.e. some applications regard it as ")
} else {
""
}
ui_line("
{ui_path(path)} is {qualification}your home directory.
It is generally a bad idea to create a new project here.
You should probably create your new project in a subdirectory.")
if (ui_nope("Do you want to create anyway?")) {
ui_stop("Good move! Cancelling project creation.")
}
invisible()
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/create.R
|
#' Prepare for importing data.table
#'
#' `use_data_table()` imports the `data.table()` function from the data.table
#' package, as well as several important symbols: `:=`, `.SD`, `.BY`, `.N`,
#' `.I`, `.GRP`, `.NGRP`, `.EACHI`. This is a minimal setup and you can learn
#' much more in the "Importing data.table" vignette:
#' `https://rdatatable.gitlab.io/data.table/articles/datatable-importing.html`.
#' In addition to importing these functions, `use_data_table()` also blocks the
#' usage of data.table in the `Depends` field of the `DESCRIPTION` file;
#' `data.table` should be used as an _imported_ or _suggested_ package only. See
#' this [discussion](https://github.com/Rdatatable/data.table/issues/3076).
#'
#' @export
use_data_table <- function() {
check_is_package("use_data_table()")
check_installed("data.table")
check_uses_roxygen("use_data_table()")
desc <- proj_desc()
deps <- desc$get_deps()
if (any(deps$type == "Depends" & deps$package == "data.table")) {
ui_warn("data.table should be in Imports or Suggests, not Depends")
ui_done("Deleting data.table from {ui_field('Depends')}")
desc$del_dep("data.table", "Depends")
desc$write()
}
use_import_from(
"data.table",
c("data.table", ":=", ".SD", ".BY", ".N", ".I", ".GRP", ".NGRP", ".EACHI")
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/data-table.R
|
#' Create package data
#'
#' `use_data()` makes it easy to save package data in the correct format. I
#' recommend you save scripts that generate package data in `data-raw`: use
#' `use_data_raw()` to set it up. You also need to document exported datasets.
#'
#' @param ... Unquoted names of existing objects to save.
#' @param internal If `FALSE`, saves each object in its own `.rda`
#' file in the `data/` directory. These data files bypass the usual
#' export mechanism and are available whenever the package is loaded
#' (or via [data()] if `LazyData` is not true).
#'
#' If `TRUE`, stores all objects in a single `R/sysdata.rda` file.
#' Objects in this file follow the usual export rules. Note that this means
#' they will be exported if you are using the common `exportPattern()`
#' rule which exports all objects except for those that start with `.`.
#' @param overwrite By default, `use_data()` will not overwrite existing
#' files. If you really want to do so, set this to `TRUE`.
#' @param compress Choose the type of compression used by [save()].
#' Should be one of "gzip", "bzip2", or "xz".
#' @param version The serialization format version to use. The default, 2, was
#' the default format from R 1.4.0 to 3.5.3. Version 3 became the default from
#' R 3.6.0 and can only be read by R versions 3.5.0 and higher.
#' @inheritParams base::save
#'
#' @seealso The [data chapter](https://r-pkgs.org/data.html) of [R
#' Packages](https://r-pkgs.org).
#' @export
#' @examples
#' \dontrun{
#' x <- 1:10
#' y <- 1:100
#'
#' use_data(x, y) # For external use
#' use_data(x, y, internal = TRUE) # For internal use
#' }
use_data <- function(...,
internal = FALSE,
overwrite = FALSE,
compress = "bzip2",
version = 2,
ascii = FALSE) {
check_is_package("use_data()")
objs <- get_objs_from_dots(dots(...))
if (version < 3) {
use_dependency("R", "depends", "2.10")
} else {
use_dependency("R", "depends", "3.5")
}
if (internal) {
use_directory("R")
paths <- path("R", "sysdata.rda")
objs <- list(objs)
} else {
use_directory("data")
paths <- path("data", objs, ext = "rda")
desc <- proj_desc()
if (!desc$has_fields("LazyData")) {
ui_done("Setting {ui_field('LazyData')} to \\
{ui_value('true')} in {ui_path('DESCRIPTION')}")
desc$set(LazyData = "true")
desc$write()
}
}
check_files_absent(proj_path(paths), overwrite = overwrite)
ui_done("Saving {ui_value(unlist(objs))} to {ui_value(paths)}")
if (!internal) ui_todo("Document your data (see {ui_value('https://r-pkgs.org/data.html')})")
envir <- parent.frame()
mapply(
save,
list = objs,
file = proj_path(paths),
MoreArgs = list(envir = envir, compress = compress, version = version, ascii = ascii)
)
invisible()
}
get_objs_from_dots <- function(.dots) {
if (length(.dots) == 0L) {
ui_stop("Nothing to save.")
}
is_name <- vapply(.dots, is.symbol, logical(1))
if (any(!is_name)) {
ui_stop("Can only save existing named objects.")
}
objs <- vapply(.dots, as.character, character(1))
duplicated_objs <- which(stats::setNames(duplicated(objs), objs))
if (length(duplicated_objs) > 0L) {
objs <- unique(objs)
ui_warn("Saving duplicates only once: {ui_value(names(duplicated_objs))}")
}
objs
}
check_files_absent <- function(paths, overwrite) {
if (overwrite) {
return()
}
ok <- !file_exists(paths)
if (all(ok)) {
return()
}
ui_stop(
"
{ui_path(paths[!ok])} already exist.,
Use {ui_code('overwrite = TRUE')} to overwrite.
"
)
}
#' @param name Name of the dataset to be prepared for inclusion in the package.
#' @inheritParams use_template
#' @rdname use_data
#' @export
#' @examples
#' \dontrun{
#' use_data_raw("daisy")
#' }
use_data_raw <- function(name = "DATASET", open = rlang::is_interactive()) {
check_name(name)
r_path <- path("data-raw", asciify(name), ext = "R")
use_directory("data-raw", ignore = TRUE)
use_template(
"packagename-data-prep.R",
save_as = r_path,
data = list(name = name),
ignore = FALSE,
open = open
)
ui_todo("Finish the data preparation script in {ui_value(r_path)}")
ui_todo("Use {ui_code('usethis::use_data()')} to add prepared data to package")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/data.R
|
#' Create or modify a DESCRIPTION file
#'
#' @description
#'
#' `use_description()` creates a `DESCRIPTION` file. Although mostly associated
#' with R packages, a `DESCRIPTION` file can also be used to declare
#' dependencies for a non-package project. Within such a project,
#' `devtools::install_deps()` can then be used to install all the required
#' packages. Note that, by default, `use_decription()` checks for a
#' CRAN-compliant package name. You can turn this off with `check_name = FALSE`.
#'
#' usethis consults the following sources, in this order, to set `DESCRIPTION`
#' fields:
#' * `fields` argument of [create_package()] or [use_description()]
#' * `getOption("usethis.description")`
#' * Defaults built into usethis
#'
#' The fields discovered via options or the usethis package can be viewed with
#' `use_description_defaults()`.
#'
#' If you create a lot of packages, consider storing personalized defaults as a
#' named list in an option named `"usethis.description"`. Here's an example of
#' code to include in `.Rprofile`, which can be opened via [edit_r_profile()]:
#'
#' ```
#' options(
#' usethis.description = list(
#' "Authors@R" = utils::person(
#' "Jane", "Doe",
#' email = "[email protected]",
#' role = c("aut", "cre"),
#' comment = c(ORCID = "YOUR-ORCID-ID")
#' ),
#' Language = "es",
#' License = "MIT + file LICENSE"
#' )
#' )
#' ```
#'
#' Prior to usethis v2.0.0, `getOption("devtools.desc")` was consulted for
#' backwards compatibility, but now only the `"usethis.description"` option is
#' supported.
#'
#' @param fields A named list of fields to add to `DESCRIPTION`, potentially
#' overriding default values. See [use_description()] for how you can set
#' personalized defaults using package options.
#' @param check_name Whether to check if the name is valid for CRAN and throw an
#' error if not.
#' @param roxygen If `TRUE`, sets `RoxygenNote` to current roxygen2 version
#' @seealso The [description chapter](https://r-pkgs.org/description.html)
#' of [R Packages](https://r-pkgs.org)
#' @export
#' @examples
#' \dontrun{
#' use_description()
#'
#' use_description(fields = list(Language = "es"))
#'
#' use_description_defaults()
#' }
use_description <- function(fields = list(),
check_name = TRUE,
roxygen = TRUE) {
name <- project_name()
if (check_name) {
check_package_name(name)
}
proj_desc_create(name = name, fields = fields, roxygen = roxygen)
}
#' @rdname use_description
#' @param package Package name
#' @export
use_description_defaults <- function(package = NULL,
roxygen = TRUE,
fields = list()) {
fields <- fields %||% list()
check_is_named_list(fields)
usethis <- usethis_description_defaults(package)
if (roxygen) {
if (is_installed("roxygen2")) {
roxygen_note <- utils::packageVersion("roxygen2")
} else {
roxygen_note <- "7.0.0" # version doesn't really matter
}
usethis$Roxygen <- "list(markdown = TRUE)"
usethis$RoxygenNote <- roxygen_note
}
options <- getOption("usethis.description") %||% list()
# A `person` object in Authors@R is not patched in by modifyList()
modify_this <- function(orig, patch) {
out <- utils::modifyList(orig, patch)
if (inherits(patch$`Authors@R`, "person")) {
#if (has_name(patch, "Authors@R")) {
out$`Authors@R` <- patch$`Authors@R`
}
out
}
defaults <- modify_this(usethis, options)
defaults <- modify_this(defaults, fields)
# Ensure each element is a single string
if (inherits(defaults$`Authors@R`, "person")) {
defaults$`Authors@R` <- format(defaults$`Authors@R`, style = "R")
defaults$`Authors@R` <- paste0(defaults$`Authors@R`, collapse = "\n")
}
defaults <- lapply(defaults, paste, collapse = "")
compact(defaults)
}
usethis_description_defaults <- function(package = NULL) {
list(
Package = package %||% "valid.package.name.goes.here",
Version = "0.0.0.9000",
Title = "What the Package Does (One Line, Title Case)",
Description = "What the package does (one paragraph).",
"Authors@R" = 'person("First", "Last", email = "[email protected]", role = c("aut", "cre"), comment = c(ORCID = "YOUR-ORCID-ID"))',
License = "`use_mit_license()`, `use_gpl3_license()` or friends to pick a license",
Encoding = "UTF-8"
)
}
check_package_name <- function(name) {
if (!valid_package_name(name)) {
ui_stop(c(
"{ui_value(name)} is not a valid package name. To be allowed on CRAN, it should:",
"* Contain only ASCII letters, numbers, and '.'",
"* Have at least two characters",
"* Start with a letter",
"* Not end with '.'"
))
}
}
valid_package_name <- function(x) {
grepl("^[a-zA-Z][a-zA-Z0-9.]+$", x) && !grepl("\\.$", x)
}
tidy_desc <- function(desc) {
desc$set("Encoding" = "UTF-8")
# Normalize all fields (includes reordering)
# Wrap in a try() so it always succeeds, even if user options are malformed
try(desc$normalize(), silent = TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/description.R
|
#' Use a directory
#'
#' `use_directory()` creates a directory (if it does not already exist) in the
#' project's top-level directory. This function powers many of the other `use_`
#' functions such as [use_data()] and [use_vignette()].
#'
#' @param path Path of the directory to create, relative to the project.
#' @inheritParams use_template
#'
#' @export
#' @examples
#' \dontrun{
#' use_directory("inst")
#' }
use_directory <- function(path,
ignore = FALSE) {
create_directory(proj_path(path))
if (ignore) {
use_build_ignore(path)
}
invisible(TRUE)
}
create_directory <- function(path) {
if (dir_exists(path)) {
return(invisible(FALSE))
} else if (file_exists(path)) {
ui_stop("{ui_path(path)} exists but is not a directory.")
}
dir_create(path, recurse = TRUE)
ui_done("Creating {ui_path(path)}")
invisible(TRUE)
}
check_path_is_directory <- function(path) {
if (!file_exists(path)) {
ui_stop("Directory {ui_path(path)} does not exist.")
}
if (is_link(path)) {
path <- link_path(path)
}
if (!is_dir(path)) {
ui_stop("{ui_path(path)} is not a directory.")
}
}
count_directory_files <- function(x) {
length(dir_ls(x))
}
directory_has_files <- function(x) {
count_directory_files(x) >= 1
}
check_directory_is_empty <- function(x) {
if (directory_has_files(x)) {
ui_stop("{ui_path(x)} exists and is not an empty directory.")
}
invisible(x)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/directory.R
|
#' Package-level documentation
#'
#' Adds a dummy `.R` file that will cause roxygen2 to generate basic
#' package-level documentation. If your package is named "foo", this will make
#' help available to the user via `?foo` or `package?foo`. Once you call
#' `devtools::document()`, roxygen2 will flesh out the `.Rd` file using data
#' from the `DESCRIPTION`. That ensures you don't need to repeat (and remember
#' to update!) the same information in multiple places. This `.R` file is also a
#' good place for roxygen directives that apply to the whole package (vs. a
#' specific function), such as global namespace tags like `@importFrom`.
#'
#' @seealso The [documentation chapter](https://r-pkgs.org/man.html) of [R
#' Packages](https://r-pkgs.org)
#' @inheritParams use_template
#' @export
use_package_doc <- function(open = rlang::is_interactive()) {
check_is_package("use_package_doc()")
use_template(
"packagename-package.R",
package_doc_path(),
open = open
)
}
package_doc_path <- function() {
path("R", paste0(project_name(), "-package"), ext = "R")
}
has_package_doc <- function() {
file_exists(proj_path(package_doc_path()))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/documentation.R
|
#' Open file for editing
#'
#' Opens a file for editing in RStudio, if that is the active environment, or
#' via [utils::file.edit()] otherwise. If the file does not exist, it is
#' created. If the parent directory does not exist, it is also created.
#' `edit_template()` specifically opens templates in `inst/templates` for use
#' with [use_template()].
#'
#' @param path Path to target file.
#' @param open Whether to open the file for interactive editing.
#' @return Target path, invisibly.
#' @export
#' @keywords internal
#'
#' @examples
#' \dontrun{
#' edit_file("DESCRIPTION")
#' edit_file("~/.gitconfig")
#' }
edit_file <- function(path, open = rlang::is_interactive()) {
open <- open && is_interactive()
path <- user_path_prep(path)
create_directory(path_dir(path))
file_create(path)
if (!open) {
ui_todo("Edit {ui_path(path)}")
return(invisible(path))
}
ui_todo("Modify {ui_path(path)}")
if (rstudio_available() && rstudioapi::hasFun("navigateToFile")) {
rstudioapi::navigateToFile(path)
} else {
utils::file.edit(path)
}
invisible(path)
}
#' @param template The target template file. If not specified, existing template
#' files are offered for interactive selection.
#' @export
#' @rdname edit_file
edit_template <- function(template = NULL, open = rlang::is_interactive()) {
check_is_package("edit_template()")
if (is.null(template)) {
ui_info("No template specified... checking {ui_path('inst/templates')}")
template <- choose_template()
}
if (is_empty(template)) {
return(invisible())
}
path <- proj_path("inst", "templates", template)
edit_file(path, open)
}
choose_template <- function() {
if (!is_interactive()) {
return(character())
}
templates <- path_file(dir_ls(proj_path("inst", "templates"), type = "file"))
if (is_empty(templates)) {
return(character())
}
choice <- utils::menu(
choices = templates,
title = "Which template do you want to edit? (0 to exit)"
)
templates[choice]
}
#' Open configuration files
#'
#' * `edit_r_profile()` opens `.Rprofile`
#' * `edit_r_environ()` opens `.Renviron`
#' * `edit_r_makevars()` opens `.R/Makevars`
#' * `edit_git_config()` opens `.gitconfig` or `.git/config`
#' * `edit_git_ignore()` opens global (user-level) gitignore file and ensures
#' its path is declared in your global Git config.
#' * `edit_pkgdown_config` opens the pkgdown YAML configuration file for the
#' current Project.
#' * `edit_rstudio_snippets()` opens RStudio's snippet config for the given type.
#' * `edit_rstudio_prefs()` opens RStudio's preference file.
#'
#' The `edit_r_*()` functions consult R's notion of user's home directory.
#' The `edit_git_*()` functions (and \pkg{usethis} in general) inherit home
#' directory behaviour from the \pkg{fs} package, which differs from R itself
#' on Windows. The \pkg{fs} default is more conventional in terms of the
#' location of user-level Git config files. See [fs::path_home()] for more
#' details.
#'
#' Files created by `edit_rstudio_snippets()` will *mask*, not supplement,
#' the built-in default snippets. If you like the built-in snippets, copy them
#' and include with your custom snippets.
#'
#' @return Path to the file, invisibly.
#'
#' @param scope Edit globally for the current __user__, or locally for the
#' current __project__
#' @name edit
NULL
#' @export
#' @rdname edit
edit_r_profile <- function(scope = c("user", "project")) {
path <- scoped_path_r(scope, ".Rprofile", envvar = "R_PROFILE_USER")
edit_file(path)
ui_todo("Restart R for changes to take effect")
invisible(path)
}
#' @export
#' @rdname edit
edit_r_environ <- function(scope = c("user", "project")) {
path <- scoped_path_r(scope, ".Renviron", envvar = "R_ENVIRON_USER")
edit_file(path)
ui_todo("Restart R for changes to take effect")
invisible(path)
}
#' @export
#' @rdname edit
edit_r_buildignore <- function() {
check_is_package("edit_r_buildignore()")
edit_file(proj_path(".Rbuildignore"))
}
#' @export
#' @rdname edit
edit_r_makevars <- function(scope = c("user", "project")) {
path <- scoped_path_r(scope, ".R", "Makevars")
edit_file(path)
}
#' @export
#' @rdname edit
#' @param type Snippet type (case insensitive text).
edit_rstudio_snippets <- function(type = c(
"r", "markdown", "c_cpp", "css",
"html", "java", "javascript", "python", "sql", "stan", "tex"
)) {
type <- tolower(type)
type <- match.arg(type)
file <- path_ext_set(type, "snippets")
# Snippet location changed in 1.3:
# https://blog.rstudio.com/2020/02/18/rstudio-1-3-preview-configuration/
new_rstudio <- !rstudioapi::isAvailable() || rstudioapi::getVersion() >= "1.3.0"
old_path <- path_home_r(".R", "snippets", file)
new_path <- rstudio_config_path("snippets", file)
# Mimic RStudio behaviour: copy to new location if you edit
if (new_rstudio && file_exists(old_path) && !file_exists(new_path)) {
create_directory(path_dir(new_path))
file_copy(old_path, new_path)
ui_done("Copying snippets file to {ui_path(new_path)}")
}
path <- if (new_rstudio) new_path else old_path
if (!file_exists(path)) {
ui_done("New snippet file at {ui_path(path)}")
ui_info(c(
"This masks the default snippets for {ui_field(type)}.",
"Delete this file and restart RStudio to restore the default snippets."
))
}
edit_file(path)
}
#' @export
#' @rdname edit
edit_rstudio_prefs <- function() {
path <- rstudio_config_path("rstudio-prefs.json")
edit_file(path)
ui_todo("Restart RStudio for changes to take effect")
invisible(path)
}
scoped_path_r <- function(scope = c("user", "project"), ..., envvar = NULL) {
scope <- match.arg(scope)
# Try environment variable in user scopes
if (scope == "user" && !is.null(envvar)) {
env <- Sys.getenv(envvar, unset = "")
if (!identical(env, "")) {
return(user_path_prep(env))
}
}
root <- switch(scope,
user = path_home_r(),
project = proj_get()
)
path(root, ...)
}
# git paths ---------------------------------------------------------------
# Note that on windows R's definition of ~ is in a nonstandard place,
# so it is important to use path_home(), not path_home_r()
#' @export
#' @rdname edit
edit_git_config <- function(scope = c("user", "project")) {
scope <- match.arg(scope)
path <- switch(
scope,
user = path_home(".gitconfig"),
project = proj_path(".git", "config")
)
invisible(edit_file(path))
}
#' @export
#' @rdname edit
edit_git_ignore <- function(scope = c("user", "project")) {
scope <- match.arg(scope)
if (scope == "user") {
ensure_core_excludesFile()
}
file <- git_ignore_path(scope)
if (scope == "user" && !file_exists(file)) {
git_vaccinate()
}
invisible(edit_file(file))
}
git_ignore_path <- function(scope = c("user", "project")) {
scope <- match.arg(scope)
switch(
scope,
user = git_cfg_get("core.excludesFile", where = "global"),
project = proj_path(".gitignore")
)
}
# pkgdown ---------------------------------------------------------------
#' @export
#' @rdname edit
edit_pkgdown_config <- function() {
path <- pkgdown_config_path()
if (is.null(path)) {
ui_oops("No pkgdown config file found in current Project.")
} else {
invisible(edit_file(path))
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/edit.R
|
#' Get or set the default Git branch
#'
#' @description
#' The `git_default_branch*()` functions put some structure around the somewhat
#' fuzzy (but definitely real) concept of the default branch. In particular,
#' they support new conventions around the Git default branch name, globally or
#' in a specific project / Git repository.
#'
#' @section Background on the default branch:
#'
#' Technically, Git has no official concept of the default branch. But in
#' reality, almost all Git repos have an *effective default branch*. If there's
#' only one branch, this is it! It is the branch that most bug fixes and
#' features get merged in to. It is the branch you see when you first visit a
#' repo on a site such as GitHub. On a Git remote, it is the branch that `HEAD`
#' points to.
#'
#' Historically, `master` has been the most common name for the default branch,
#' but `main` is an increasingly popular choice.
#'
#' @section `git_default_branch_configure()`:
#' This configures `init.defaultBranch` at the global (a.k.a user) level. This
#' setting determines the name of the branch that gets created when you make the
#' first commit in a new Git repo. `init.defaultBranch` only affects the local
#' Git repos you create in the future.
#'
#' @section `git_default_branch()`:
#' This figures out the default branch of the current Git repo, integrating
#' information from the local repo and, if applicable, the `upstream` or
#' `origin` remote. If there is a local vs. remote mismatch,
#' `git_default_branch()` throws an error with advice to call
#' `git_default_branch_rediscover()` to repair the situation.
#'
#' For a remote repo, the default branch is the branch that `HEAD` points to.
#'
#' For the local repo, if there is only one branch, that must be the default!
#' Otherwise we try to identify the relevant local branch by looking for
#' specific branch names, in this order:
#' * whatever the default branch of `upstream` or `origin` is, if applicable
#' * `main`
#' * `master`
#' * the value of the Git option `init.defaultBranch`, with the usual deal where
#' a local value, if present, takes precedence over a global (a.k.a.
#' user-level) value
#'
#' @section `git_default_branch_rediscover()`:
#' This consults an external authority -- specifically, the remote **source
#' repo** on GitHub -- to learn the default branch of the current project /
#' repo. If that doesn't match the apparent local default branch (for example,
#' the project switched from `master` to `main`), we do the corresponding branch
#' renaming in your local repo and, if relevant, in your fork.
#'
#' See <https://happygitwithr.com/common-remote-setups.html> for more about
#' GitHub remote configurations and, e.g., what we mean by the source repo. This
#' function works for the configurations `"ours"`, `"fork"`, and `"theirs"`.
#' @section `git_default_branch_rename()`:
#' Note: this only works for a repo that you effectively own. In terms of
#' GitHub, you must own the **source repo** personally or, if
#' organization-owned, you must have `admin` permission on the **source repo**.
#'
#' This renames the default branch in the **source repo** on GitHub and then
#' calls `git_default_branch_rediscover()`, to make any necessary changes in the
#' local repo and, if relevant, in your personal fork.
#'
#' See <https://happygitwithr.com/common-remote-setups.html> for more about
#' GitHub remote configurations and, e.g., what we mean by the source repo. This
#' function works for the configurations `"ours"`, `"fork"`, and `"no_github"`.
#'
#' Regarding `"no_github"`: Of course, this function does what you expect for a
#' local repo with no GitHub remotes, but that is not the primary use case.
#' @return Name of the default branch.
#' @name git-default-branch
NULL
#' @export
#' @rdname git-default-branch
#' @examples
#' \dontrun{
#' git_default_branch()
#' }
git_default_branch <- function() {
repo <- git_repo()
# TODO: often when we call git_default_branch(), we already have a GitHub
# configuration or target repo, as produced by github_remote_config() or
# target_repo(). In that case, we don't need to start from scratch as we do
# here. But I'm not sure it's worth adding complexity to allow passing this
# data in.
# TODO: this critique feels somewhat mis-placed, i.e. it brings up a general
# concern about a repo's config (or the user's permissions and creds)
# related to whether github_remotes() should be as silent as it is about
# 404s
critique_remote <- function(remote) {
if (remote$is_configured && is.na(remote$default_branch)) {
ui_oops("
The {ui_value(remote$name)} remote is configured, but we can't \\
determine its default branch.
Possible reasons:
- The remote repo no longer exists, suggesting the local remote should
be deleted.
- We are offline or that specific Git server is down.
- You don't have the necessary permission or something is wrong with
your credentials.")
}
}
upstream <- git_default_branch_remote("upstream")
if (is.na(upstream$default_branch)) {
critique_remote(upstream)
origin <- git_default_branch_remote("origin")
if (is.na(origin$default_branch)) {
critique_remote(origin)
db_source <- list()
} else {
db_source <- origin
}
} else {
db_source <- upstream
}
db_local_with_source <- tryCatch(
guess_local_default_branch(db_source$default_branch),
error = function(e) NA_character_
)
# these error sub-classes and error data are for the benefit of git_sitrep()
if (is.na(db_local_with_source) ) {
if (length(db_source)) {
usethis_abort(c(
"Default branch mismatch between local repo and remote.",
"The default branch of the {.val {db_source$name}} remote is
{.val {db_source$default_branch}}.",
"But the local repo has no branch named
{.val {db_source$default_branch}}.",
"Call {.code git_default_branch_rediscover()} to resolve this."
),
class = "error_default_branch",
db_source = db_source
)
} else {
usethis_abort(
"Can't determine the local repo's default branch.",
class = "error_default_branch"
)
}
}
# we learned a default branch from the local repo
if (is.null(db_source$default_branch) ||
is.na(db_source$default_branch) ||
identical(db_local_with_source, db_source$default_branch)) {
return(db_local_with_source)
}
# we learned a default branch from the source repo and it doesn't match
# the local default branch
usethis_abort(c(
"Default branch mismatch between local repo and remote.",
"The default branch of the {.val {db_source$name}} remote is
{.val {db_source$default_branch}}.",
"But the default branch of the local repo appears to be
{.val {db_local_with_source}}.",
"Call {.code git_default_branch_rediscover()} to resolve this."
),
class = "error_default_branch",
db_source = db_source, db_local = db_local_with_source
)
}
# returns a whole data structure, because the caller needs the surrounding
# context to produce a helpful error message
git_default_branch_remote <- function(remote = "origin") {
repo <- git_repo()
out <- list(
name = remote,
is_configured = NA,
url = NA_character_,
repo_spec = NA_character_,
default_branch = NA_character_
)
url <- git_remotes()[[remote]]
if (length(url) == 0) {
out$is_configured <- FALSE
return(out)
}
out$is_configured <- TRUE
out$url <- url
# TODO: generalize here for GHE hosts that don't include 'github'
parsed <- parse_github_remotes(url)
# if the protocol is ssh, I suppose we can't assume a PAT, i.e. it's better
# to use the Git approach vs. the GitHub API approach
if (grepl("github", parsed$host) && parsed$protocol == "https") {
remote_dat <- github_remotes(remote, github_get = NA)
out$repo_spec <- remote_dat$repo_spec
out$default_branch <- remote_dat$default_branch
return(out)
}
out$default_branch <- tryCatch(
{
gert::git_fetch(remote = remote, repo = repo, verbose = FALSE)
res <- gert::git_remote_ls(remote = remote, verbose = FALSE, repo = repo)
path_file(res$symref[res$ref == "HEAD"])
},
error = function(e) NA_character_
)
out
}
default_branch_candidates <- function() {
c(
"main",
"master",
# we use `where = "de_facto"` so that one can configure init.defaultBranch
# *locally* (which is unusual, but possible) in a repo that uses an
# unconventional default branch name
git_cfg_get("init.defaultBranch", where = "de_facto")
)
}
# `prefer` is available if you want to inject external information, such as
# the default branch of a remote
guess_local_default_branch <- function(prefer = NULL, verbose = FALSE) {
repo <- git_repo()
gb <- gert::git_branch_list(local = TRUE, repo = repo)[["name"]]
if (length(gb) == 0) {
ui_stop("
Can't find any local branches.
Do you need to make your first commit?")
}
candidates <- c(prefer, default_branch_candidates())
first_matched <- function(x, table) table[min(match(x, table), na.rm = TRUE)]
if (length(gb) == 1) {
db <- gb
} else if (any(gb %in% candidates)) {
db <- first_matched(gb, candidates)
} else {
# TODO: perhaps this should be classed, so I can catch it and distinguish
# from the ui_stop() above, where there are no local branches.
ui_stop("
Unable to guess which existing local branch plays the role of the default.")
}
if (verbose) {
ui_info("
Local branch {ui_value(db)} appears to play the role of \\
the default branch.")
}
db
}
#' @export
#' @rdname git-default-branch
#' @param name Default name for the initial branch in new Git repositories.
#' @examples
#' \dontrun{
#' git_default_branch_configure()
#' }
git_default_branch_configure <- function(name = "main") {
check_string(name)
ui_done("Configuring {ui_field('init.defaultBranch')} as {ui_value(name)}.")
ui_info("Remember: this only affects repos you create in the future.")
use_git_config(scope = "user", `init.defaultBranch` = name)
invisible(name)
}
#' @export
#' @rdname git-default-branch
#' @param current_local_default Name of the local branch that is currently
#' functioning as the default branch. If unspecified, this can often be
#' inferred.
#' @examples
#' \dontrun{
#' git_default_branch_rediscover()
#'
#' # you can always explicitly specify the local branch that's been playing the
#' # role of the default
#' git_default_branch_rediscover("unconventional_default_branch_name")
#' }
git_default_branch_rediscover <- function(current_local_default = NULL) {
rediscover_default_branch(old_name = current_local_default)
}
#' @export
#' @rdname git-default-branch
#' @param from Name of the branch that is currently functioning as the default
#' branch.
#' @param to New name for the default branch.
#' @examples
#' \dontrun{
#' git_default_branch_rename()
#'
#' # you can always explicitly specify one or both branch names
#' git_default_branch_rename(from = "this", to = "that")
#' }
git_default_branch_rename <- function(from = NULL, to = "main") {
repo <- git_repo()
maybe_name(from)
check_name(to)
if (!is.null(from) &&
!gert::git_branch_exists(from, local = TRUE, repo = repo)) {
ui_stop("Can't find existing branch named {ui_value(from)}.")
}
cfg <- github_remote_config(github_get = TRUE)
check_for_config(cfg, ok_configs = c("ours", "fork", "no_github"))
if (cfg$type == "no_github") {
from <- from %||% guess_local_default_branch(verbose = TRUE)
if (from == to) {
ui_info("Local repo already has {ui_value(from)} as its default branch.")
} else {
ui_done("Moving local {ui_value(from)} branch to {ui_value(to)}.")
gert::git_branch_move(branch = from, new_branch = to, repo = repo)
rstudio_git_tickle()
report_fishy_files(old_name = from, new_name = to)
}
return(invisible(to))
}
# cfg is now either fork or ours
tr <- target_repo(cfg, role = "source", ask = FALSE)
old_source_db <- tr$default_branch
if (!isTRUE(tr$can_admin)) {
ui_stop("
You don't seem to have {ui_field('admin')} permissions for the source \\
repo {ui_value(tr$repo_spec)}, which is required to rename the default \\
branch.")
}
old_local_db <- from %||%
guess_local_default_branch(old_source_db, verbose = FALSE)
if (old_local_db != old_source_db) {
ui_oops("
It's weird that the current default branch for your local repo and \\
the source repo are different:
{ui_value(old_local_db)} (local) != {ui_value(old_source_db)} (source)")
if (ui_nope(
"Are you sure you want to proceed?",
yes = "yes", no = "no", shuffle = FALSE)) {
ui_oops("Cancelling.")
return(invisible())
}
}
source_update <- old_source_db != to
if (source_update) {
gh <- gh_tr(tr)
gh(
"POST /repos/{owner}/{repo}/branches/{from}/rename",
from = old_source_db,
new_name = to
)
}
if (source_update) {
ui_done("
Default branch of the source repo {ui_value(tr$repo_spec)} has moved: \\
{ui_value(old_source_db)} --> {ui_value(to)}")
} else {
ui_done("
Default branch of source repo {ui_value(tr$repo_spec)} is \\
{ui_value(to)}. Nothing to be done.")
}
report_fishy_files(old_name = old_local_db, new_name = to)
rediscover_default_branch(old_name = old_local_db, report_on_source = FALSE)
}
rediscover_default_branch <- function(old_name = NULL, report_on_source = TRUE) {
maybe_name(old_name)
# GitHub's official TODOs re: manually updating local environments
# after a source repo renames the default branch:
# git branch -m OLD-BRANCH-NAME NEW-BRANCH-NAME
# git fetch origin
# git branch -u origin/NEW-BRANCH-NAME NEW-BRANCH-NAME
# git remote set-head origin -a
# optionally
# git remote prune origin
# Note: they are assuming the relevant repo is known as origin, but it could
# just as easily be, e.g., upstream.
repo <- git_repo()
if (!is.null(old_name) &&
!gert::git_branch_exists(old_name, local = TRUE, repo = repo)) {
ui_stop("Can't find existing local branch named {ui_value(old_name)}.")
}
cfg <- github_remote_config(github_get = TRUE)
check_for_config(cfg)
tr <- target_repo(cfg, role = "source", ask = FALSE)
db <- tr$default_branch
# goal, in Git-speak: git remote set-head <remote> -a
# goal, for humans: learn and record the default branch (i.e. the target of
# the symbolic-ref refs/remotes/<remote>/HEAD) for the named remote
# https://git-scm.com/docs/git-remote#Documentation/git-remote.txt-emset-headem
# for very stale repos, a fetch is a necessary pre-requisite
# I provide `refspec = db` to avoid fetching all refs, which can be VERY slow
# for a repo like ggplot2 (several minutes, with no progress reporting)
gert::git_fetch(remote = tr$name, refspec = db, verbose = FALSE, repo = repo)
gert::git_remote_ls(remote = tr$name, verbose = FALSE, repo = repo)
old_name <- old_name %||% guess_local_default_branch(db, verbose = FALSE)
local_update <- old_name != db
if (local_update) {
# goal, in Git-speak: git branch -m <old_name> <db>
gert::git_branch_move(branch = old_name, new_branch = db, repo = repo)
rstudio_git_tickle()
}
# goal, in Git-speak: git branch -u <remote>/<db> <db>
gert::git_branch_set_upstream(
branch = db,
upstream = glue("{tr$name}/{db}"),
repo = repo
)
# goal: get rid of old remote tracking branch, e.g. origin/master
# goal, in Git-speak: git remote prune origin
# I provide a refspec to avoid fetching all refs, which can be VERY slow
# for a repo like ggplot2 (several minutes, with no progress reporting)
gert::git_fetch(
remote = tr$name,
refspec = glue("refs/heads/{old_name}:refs/remotes/{tr$name}/{old_name}"),
verbose = FALSE, repo = repo, prune = TRUE
)
# for "ours" and "theirs", the source repo is the only remote on our radar and
# we're done ingesting the default branch from the source repo
# but for "fork", we also need to update
# the fork = the user's primary repo = the remote known as origin
if (cfg$type == "fork") {
old_name_fork <- cfg$origin$default_branch
fork_update <- old_name_fork != db
if (fork_update) {
gh <- gh_tr(cfg$origin)
gh(
"POST /repos/{owner}/{repo}/branches/{from}/rename",
from = old_name_fork,
new_name = db
)
gert::git_fetch(remote = "origin", refspec = db, verbose = FALSE, repo = repo)
gert::git_remote_ls(remote = "origin", verbose = FALSE, repo = repo)
gert::git_fetch(
remote = "origin",
refspec = glue("refs/heads/{old_name}:refs/remotes/origin/{old_name}"),
verbose = FALSE, repo = repo, prune = TRUE
)
}
}
if (report_on_source) {
ui_info("
Default branch of the source repo {ui_value(tr$repo_spec)}: {ui_value(db)}")
}
if (local_update) {
ui_done("
Default branch of local repo has moved: \\
{ui_value(old_name)} --> {ui_value(db)}")
} else {
ui_done("
Default branch of local repo is {ui_value(db)}. Nothing to be done.")
}
if (cfg$type == "fork") {
if (fork_update) {
ui_done("
Default branch of your fork has moved: \\
{ui_value(old_name_fork)} --> {ui_value(db)}")
} else {
ui_done("
Default branch of your fork is {ui_value(db)}. Nothing to be done.")
}
}
invisible(db)
}
challenge_non_default_branch <- function(details = "Are you sure you want to proceed?",
default_branch = NULL) {
actual <- git_branch()
default_branch <- default_branch %||% git_default_branch()
if (nzchar(details)) {
details <- paste0("\n", details)
}
if (actual != default_branch) {
if (ui_nope("
Current branch ({ui_value(actual)}) is not repo's default \\
branch ({ui_value(default_branch)}).{details}")) {
ui_stop("Cancelling. Not on desired branch.")
}
}
invisible()
}
report_fishy_files <- function(old_name = "master", new_name = "main") {
ui_todo("
Be sure to update files that refer to the default branch by name.
Consider searching within your project for {ui_value(old_name)}.")
# I don't want failure of a fishy file check to EVER cause
# git_default_branch_rename() to fail and prevent the call to
# git_default_branch_rediscover()
# using a simple try() wrapper because these hints are just "nice to have"
try(fishy_github_actions(new_name = new_name), silent = TRUE)
try(fishy_badges(old_name = old_name), silent = TRUE)
try(fishy_bookdown_config(old_name = old_name), silent = TRUE)
}
# good test cases: downlit, purrr, pkgbuild, zealot, glue, bench,
# textshaping, scales
fishy_github_actions <- function(new_name = "main") {
if (!uses_github_actions()) {
return(invisible(character()))
}
workflow_dir <- proj_path(".github", "workflows")
workflows <- dir_ls(workflow_dir, regexp = "[.]ya?ml$")
f <- function(pth, new_name) {
x <- yaml::read_yaml(pth)
x_unlisted <- unlist(x)
locs <- grep("branches", re_match(names(x_unlisted), "[^//.]+$")$.match)
branches <- x_unlisted[locs]
length(branches) == 0 || new_name %in% branches
}
includes_branch_name <- map_lgl(workflows, f, new_name = new_name)
paths <- proj_rel_path(workflows[!includes_branch_name])
if (length(paths) == 0) {
return(invisible(character()))
}
paths <- sort(paths)
ui_paths <- map_chr(paths, ui_path)
ui_oops(c(
"These GitHub Action files don't mention the new default branch {ui_value(new_name)}:",
paste0("- ", ui_paths)
))
invisible(paths)
}
fishy_badges <- function(old_name = "master") {
path <- find_readme()
if (is.null(path)) {
return(invisible(character()))
}
readme_lines <- read_utf8(path)
badge_lines_range <- block_find(
readme_lines,
block_start = badge_start,
block_end = badge_end
)
if (length(badge_lines_range) != 2) {
return(invisible(character()))
}
badge_lines <- readme_lines[badge_lines_range[1]:badge_lines_range[2]]
if (!any(grepl(old_name, badge_lines))) {
return(invisible(character()))
}
ui_path <- ui_path(proj_rel_path(path))
ui_oops(c(
"Some badges may refer to the old default branch {ui_value(old_name)}:",
paste0("- ", ui_path)
))
invisible(path)
}
fishy_bookdown_config <- function(old_name = "master") {
# https://github.com/dncamp/shift/blob/a12a3fb0cd30ae864525f7a9f1f907a05f15f9a3/_bookdown.yml
# https://github.com/Jattan08/Wonderland/blob/b9e7ddc694871d1d13a2a02abe2d3b4a944c4653/_bookdown.yml
# edit: https://github.com/dncamp/shift/edit/master/%s
# view: https://github.com/dncamp/shift/blob/master/%s
# history: https://github.com/YOUR GITHUB USERNAME/YOUR REPO NAME/commits/master/%s
bookdown_config <- dir_ls(
proj_get(),
regexp = "_bookdown[.]ya?ml$",
recurse = TRUE
)
if (length(bookdown_config) == 0) {
return(invisible(character()))
}
# I am (very weakly) worried about more than 1 match, hence the [[1]]
bookdown_config <- purrr::discard(bookdown_config, ~ grepl("revdep", .x))[[1]]
bookdown_config_lines <- read_utf8(bookdown_config)
linky_lines <- grep("^(edit|view|history)", bookdown_config_lines, value = TRUE)
if (!any(grepl(old_name, linky_lines))) {
return(invisible(character()))
}
ui_path <- ui_path(proj_rel_path(bookdown_config))
ui_oops(c(
"The bookdown configuration file may refer to the old default branch {ui_value(old_name)}:",
paste0("- ", ui_path)
))
invisible(path)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/git-default-branch.R
|
#' Initialise a git repository
#'
#' `use_git()` initialises a Git repository and adds important files to
#' `.gitignore`. If user consents, it also makes an initial commit.
#'
#' @param message Message to use for first commit.
#' @family git helpers
#' @export
#' @examples
#' \dontrun{
#' use_git()
#' }
use_git <- function(message = "Initial commit") {
needs_init <- !uses_git()
if (needs_init) {
ui_done("Initialising Git repo")
git_init()
}
use_git_ignore(git_ignore_lines)
if (git_uncommitted(untracked = TRUE)) {
git_ask_commit(message, untracked = TRUE)
}
if (needs_init) {
restart_rstudio("A restart of RStudio is required to activate the Git pane")
}
invisible(TRUE)
}
#' Add a git hook
#'
#' Sets up a git hook using the specified script. Creates a hook directory if
#' needed, and sets correct permissions on hook.
#'
#' @param hook Hook name. One of "pre-commit", "prepare-commit-msg",
#' "commit-msg", "post-commit", "applypatch-msg", "pre-applypatch",
#' "post-applypatch", "pre-rebase", "post-rewrite", "post-checkout",
#' "post-merge", "pre-push", "pre-auto-gc".
#' @param script Text of script to run
#' @family git helpers
#' @export
use_git_hook <- function(hook, script) {
check_uses_git()
hook_path <- proj_path(".git", "hooks", hook)
create_directory(path_dir(hook_path))
write_over(hook_path, script)
file_chmod(hook_path, "0744")
invisible()
}
#' Tell Git to ignore files
#'
#' @param ignores Character vector of ignores, specified as file globs.
#' @param directory Directory relative to active project to set ignores
#' @family git helpers
#' @export
use_git_ignore <- function(ignores, directory = ".") {
write_union(proj_path(directory, ".gitignore"), ignores)
rstudio_git_tickle()
}
#' Configure Git
#'
#' Sets Git options, for either the user or the project ("global" or "local", in
#' Git terminology). Wraps [gert::git_config_set()] and
#' [gert::git_config_global_set()]. To inspect Git config, see
#' [gert::git_config()].
#'
#' @param ... Name-value pairs, processed as
#' <[`dynamic-dots`][rlang::dyn-dots]>.
#'
#' @return Invisibly, the previous values of the modified components, as a named
#' list.
#' @inheritParams edit
#'
#' @family git helpers
#' @export
#' @examples
#' \dontrun{
#' # set the user's global user.name and user.email
#' use_git_config(user.name = "Jane", user.email = "[email protected]")
#'
#' # set the user.name and user.email locally, i.e. for current repo/project
#' use_git_config(
#' scope = "project",
#' user.name = "Jane",
#' user.email = "[email protected]"
#' )
#' }
use_git_config <- function(scope = c("user", "project"), ...) {
scope <- match.arg(scope)
dots <- list2(...)
stopifnot(is_dictionaryish(dots))
orig <- stats::setNames(
vector(mode = "list", length = length(dots)),
names(dots)
)
for (i in seq_along(dots)) {
nm <- names(dots)[[i]]
vl <- dots[[i]]
if (scope == "user") {
orig[nm] <- git_cfg_get(nm, "global") %||% list(NULL)
gert::git_config_global_set(nm, vl)
} else {
check_uses_git()
orig[nm] <- git_cfg_get(nm, "local") %||% list(NULL)
gert::git_config_set(nm, vl, repo = git_repo())
}
}
invisible(orig)
}
#' See or set the default Git protocol
#'
#' @description
#' Git operations that address a remote use a so-called "transport protocol".
#' usethis supports HTTPS and SSH. The protocol dictates the Git URL format used
#' when usethis needs to configure the first GitHub remote for a repo:
#' * `protocol = "https"` implies `https://github.com/<OWNER>/<REPO>.git`
#' * `protocol = "ssh"` implies `git@@github.com:<OWNER>/<REPO>.git`
#'
#' Two helper functions are available:
#' * `git_protocol()` reveals the protocol "in force". As of usethis v2.0.0,
#' this defaults to "https". You can change this for the duration of the
#' R session with `use_git_protocol()`. Change the default for all R
#' sessions with code like this in your `.Rprofile` (easily editable via
#' [edit_r_profile()]):
#' ```
#' options(usethis.protocol = "ssh")
#' ```
#' * `use_git_protocol()` sets the Git protocol for the current R session
#'
#' This protocol only affects the Git URL for newly configured remotes. All
#' existing Git remote URLs are always respected, whether HTTPS or SSH.
#'
#' @param protocol One of "https" or "ssh"
#'
#' @return The protocol, either "https" or "ssh"
#' @export
#'
#' @examples
#' \dontrun{
#' git_protocol()
#'
#' use_git_protocol("ssh")
#' git_protocol()
#'
#' use_git_protocol("https")
#' git_protocol()
#' }
git_protocol <- function() {
protocol <- tolower(getOption("usethis.protocol", "unset"))
if (identical(protocol, "unset")) {
ui_info("Defaulting to {ui_value('https')} Git protocol")
protocol <- "https"
} else {
check_protocol(protocol)
}
options("usethis.protocol" = protocol)
getOption("usethis.protocol")
}
#' @rdname git_protocol
#' @export
use_git_protocol <- function(protocol) {
options("usethis.protocol" = protocol)
invisible(git_protocol())
}
check_protocol <- function(protocol) {
if (!is_string(protocol) ||
!(tolower(protocol) %in% c("https", "ssh"))) {
options(usethis.protocol = NULL)
ui_stop("
{ui_code('protocol')} must be either {ui_value('https')} or \\
{ui_value('ssh')}")
}
invisible()
}
#' Configure and report Git remotes
#'
#' Two helpers are available:
#' * `use_git_remote()` sets the remote associated with `name` to `url`.
#' * `git_remotes()` reports the configured remotes, similar to
#' `git remote -v`.
#'
#' @param name A string giving the short name of a remote.
#' @param url A string giving the url of a remote.
#' @param overwrite Logical. Controls whether an existing remote can be
#' modified.
#'
#' @return Named list of Git remotes.
#' @export
#'
#' @examples
#' \dontrun{
#' # see current remotes
#' git_remotes()
#'
#' # add new remote named 'foo', a la `git remote add <name> <url>`
#' use_git_remote(name = "foo", url = "https://github.com/<OWNER>/<REPO>.git")
#'
#' # remove existing 'foo' remote, a la `git remote remove <name>`
#' use_git_remote(name = "foo", url = NULL, overwrite = TRUE)
#'
#' # change URL of remote 'foo', a la `git remote set-url <name> <newurl>`
#' use_git_remote(
#' name = "foo",
#' url = "https://github.com/<OWNER>/<REPO>.git",
#' overwrite = TRUE
#' )
#'
#' # Scenario: Fix remotes when you cloned someone's repo, but you should
#' # have fork-and-cloned (in order to make a pull request).
#'
#' # Store origin = main repo's URL, e.g., "[email protected]:<OWNER>/<REPO>.git"
#' upstream_url <- git_remotes()[["origin"]]
#'
#' # IN THE BROWSER: fork the main GitHub repo and get your fork's remote URL
#' my_url <- "[email protected]:<ME>/<REPO>.git"
#'
#' # Rotate the remotes
#' use_git_remote(name = "origin", url = my_url)
#' use_git_remote(name = "upstream", url = upstream_url)
#' git_remotes()
#'
#' # Scenario: Add upstream remote to a repo that you fork-and-cloned, so you
#' # can pull upstream changes.
#' # Note: If you fork-and-clone via `usethis::create_from_github()`, this is
#' # done automatically!
#'
#' # Get URL of main GitHub repo, probably in the browser
#' upstream_url <- "[email protected]:<OWNER>/<REPO>.git"
#' use_git_remote(name = "upstream", url = upstream_url)
#' }
use_git_remote <- function(name = "origin", url, overwrite = FALSE) {
check_name(name)
maybe_name(url)
check_bool(overwrite)
remotes <- git_remotes()
repo <- git_repo()
if (name %in% names(remotes) && !overwrite) {
ui_stop("
Remote {ui_value(name)} already exists. Use \\
{ui_code('overwrite = TRUE')} to edit it anyway.")
}
if (name %in% names(remotes)) {
if (is.null(url)) {
gert::git_remote_remove(remote = name, repo = repo)
} else {
gert::git_remote_set_url(url = url, remote = name, repo = repo)
}
} else if (!is.null(url)) {
gert::git_remote_add(url = url, name = name, repo = repo)
}
invisible(git_remotes())
}
#' @rdname use_git_remote
#' @export
git_remotes <- function() {
x <- gert::git_remote_list(repo = git_repo())
if (nrow(x) == 0) {
return(NULL)
}
stats::setNames(as.list(x$url), x$name)
}
# unexported function to improve my personal quality of life
git_clean <- function() {
if (!is_interactive() || !uses_git()) {
return(invisible())
}
st <- gert::git_status(staged = FALSE, repo = git_repo())
paths <- st[st$status == "new", ][["file"]]
n <- length(paths)
if (n == 0) {
ui_info("Found no untracked files")
return(invisible())
}
paths <- sort(paths)
ui_paths <- map_chr(paths, ui_path)
if (n > 10) {
ui_paths <- c(ui_paths[1:10], "...")
}
if (n == 1) {
file_hint <- "There is 1 untracked file:"
} else {
file_hint <- "There are {n} untracked files:"
}
ui_line(c(
file_hint,
paste0("* ", ui_paths)
))
if (ui_yeah("
Do you want to remove {if (n == 1) 'it' else 'them'}?",
yes = "yes", no = "no", shuffle = FALSE)) {
file_delete(paths)
ui_done("{n} file(s) deleted")
}
rstudio_git_tickle()
invisible()
}
#' Git/GitHub sitrep
#'
#' Get a situation report on your current Git/GitHub status. Useful for
#' diagnosing problems. The default is to report all values; provide values
#' for `tool` or `scope` to be more specific.
#'
#' @param tool Report for __git__, or __github__
#' @param scope Report globally for the current __user__, or locally for the
#' current __project__
#'
#' @export
#' @examples
#' \dontrun{
#' # report all
#' git_sitrep()
#'
#' # report git for current user
#' git_sitrep("git", "user")
#' }
git_sitrep <- function(tool = c("git", "github"),
scope = c("user", "project")) {
tool <- rlang::arg_match(tool, multiple = TRUE)
scope <- rlang::arg_match(scope, multiple = TRUE)
ui_silence(try(proj_get(), silent = TRUE))
# git (global / user) --------------------------------------------------------
init_default_branch <- git_cfg_get("init.defaultBranch", where = "global")
if ("git" %in% tool && "user" %in% scope) {
cli::cli_h3("Git global (user)")
git_user_sitrep("user")
kv_line("Global (user-level) gitignore file", git_ignore_path("user"))
vaccinated <- git_vaccinated()
kv_line("Vaccinated", vaccinated)
if (!vaccinated) {
ui_info("See {ui_code('?git_vaccinate')} to learn more")
}
kv_line("Default Git protocol", git_protocol())
kv_line("Default initial branch name", init_default_branch)
}
# github (global / user) -----------------------------------------------------
default_gh_host <- get_hosturl(default_api_url())
if ("github" %in% tool && "user" %in% scope) {
cli::cli_h3("GitHub user")
kv_line("Default GitHub host", default_gh_host)
pat_sitrep(default_gh_host, scope = "user")
}
# git and github for active project ------------------------------------------
if (!"project" %in% scope) {
return(invisible())
}
if (!proj_active()) {
ui_info("No active usethis project")
return(invisible())
}
cli::cli_h2(glue("Active usethis project: {ui_value(proj_get())}"))
if (!uses_git()) {
ui_info("Active project is not a Git repo")
return(invisible())
}
# current branch -------------------------------------------------------------
branch <- tryCatch(git_branch(), error = function(e) NULL)
tracking_branch <- if (is.null(branch)) NA_character_ else git_branch_tracking()
# TODO: can't really express with kv_line() helper
branch <- if (is.null(branch)) "<unset>" else branch
tracking_branch <- if (is.na(tracking_branch)) "<unset>" else tracking_branch
# local git config -----------------------------------------------------------
if ("git" %in% tool) {
cli::cli_h3("Git local (project)")
git_user_sitrep("project")
# default branch -------------------------------------------------------------
default_branch_sitrep()
# vertical alignment would make this nicer, but probably not worth it
ui_bullet(glue("
Current local branch -> remote tracking branch:
{ui_value(branch)} -> {ui_value(tracking_branch)}"))
}
# GitHub remote config -------------------------------------------------------
if ("github" %in% tool) {
cli::cli_h3("GitHub project")
cfg <- github_remote_config()
if (cfg$type == "no_github") {
ui_info("Project does not use GitHub")
return(invisible())
}
repo_host <- cfg$host_url
if (!is.na(repo_host) && repo_host != default_gh_host) {
cli::cli_text("Host:")
kv_line("Non-default GitHub host", repo_host)
pat_sitrep(repo_host, scope = "project", scold_for_renviron = FALSE)
cli::cli_text("Project:")
}
purrr::walk(format(cfg), ui_bullet)
}
invisible()
}
git_user_sitrep <- function(scope = c("user", "project")) {
scope <- rlang::arg_match(scope)
where <- where_from_scope(scope)
user <- git_user_get(where)
user_local <- git_user_get("local")
if (scope == "project" && !all(map_lgl(user_local, is.null))) {
ui_info("This repo has a locally configured user")
}
kv_line("Name", user$name)
kv_line("Email", user$email)
git_user_check(user)
invisible(NULL)
}
git_user_check <- function(user) {
if (all(map_lgl(user, is.null))) {
hint <-
'use_git_config(user.name = "<your name>", user.email = "<your email>")'
ui_oops(
"Git user's name and email are not set. Configure using {ui_code(hint)}."
)
return(invisible(NULL))
}
if (is.null(user$name)) {
hint <- 'use_git_config(user.name = "<your name>")'
ui_oops("Git user's name is not set. Configure using {ui_code(hint)}.")
}
if (is.null(user$email)) {
hint <- 'use_git_config(user.email = "<your email>")'
ui_oops("Git user's email is not set. Configure using {ui_code(hint)}.")
}
}
# TODO: when I really overhaul the UI, determine if I can just re-use the
# git_default_branch() error messages in the sitrep
# the main point is converting an error to an "oops" type of message
default_branch_sitrep <- function() {
tryCatch(
kv_line("Default branch", git_default_branch()),
error_default_branch = function(e) {
if (has_name(e, "db_local")) {
# FYI existence of db_local implies existence of db_source
ui_oops("
Default branch mismatch between local repo and remote!
{ui_value(e$db_source$name)} remote default branch: \\
{ui_value(e$db_source$default_branch)}
Local default branch: {ui_value(e$db_local)}
Call {ui_code('git_default_branch_rediscover()')} to resolve this.")
} else if (has_name(e, "db_source")) {
ui_oops("
Default branch mismatch between local repo and remote!
{ui_value(e$db_source$name)} remote default branch: \\
{ui_value(e$db_source$default_branch)}
Local repo has no branch by that name nor any other obvious candidates.
Call {ui_code('git_default_branch_rediscover()')} to resolve this.")
} else {
ui_oops("Default branch cannot be determined.")
}
}
)
}
# Vaccination -------------------------------------------------------------
#' Vaccinate your global gitignore file
#'
#' Adds `.Rproj.user`, `.Rhistory`, `.Rdata`, `.httr-oauth`, `.DS_Store`, and `.quarto` to
#' your global (a.k.a. user-level) `.gitignore`. This is good practice as it
#' decreases the chance that you will accidentally leak credentials to GitHub.
#' `git_vaccinate()` also tries to detect and fix the situation where you have a
#' global gitignore file, but it's missing from your global Git config.
#'
#' @export
git_vaccinate <- function() {
ensure_core_excludesFile()
path <- git_ignore_path(scope = "user")
if (!file_exists(path)) {
ui_done("Creating the global (user-level) gitignore: {ui_path(path)}")
}
write_union(path, git_ignore_lines)
}
git_vaccinated <- function() {
path <- git_ignore_path("user")
if (is.null(path) || !file_exists(path)) {
return(FALSE)
}
# on Windows, if ~/ is present, take care to expand it the fs way
lines <- read_utf8(user_path_prep(path))
all(git_ignore_lines %in% lines)
}
git_ignore_lines <- c(
".Rproj.user",
".Rhistory",
".Rdata",
".httr-oauth",
".DS_Store",
".quarto"
)
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/git.R
|
#' Set up a GitHub Actions workflow
#'
#' @description
#' Sets up continuous integration (CI) for an R package that is developed on
#' GitHub using [GitHub Actions](https://github.com/features/actions). CI can be
#' used to trigger various operations for each push or pull request, e.g.
#' running `R CMD check` or building and deploying a pkgdown site.
#'
#' ## Workflows
#'
#' There are four particularly important workflows that are used by many
#' packages:
#'
#' * `check-standard`: Run `R CMD check` using R-latest on Linux, Mac, and
#' Windows, and using R-devel and R-oldrel on Linux. This is a good baseline
#' if you plan on submitting your package to CRAN.
#' * `test-coverage`: Compute test coverage and report to
#' <https://about.codecov.io> by calling [covr::codecov()].
#' * `pkgdown`: Automatically build and publish a pkgdown website.
#' But we recommend instead calling [use_pkgdown_github_pages()] which
#' performs other important set up.
#' * `pr-commands`: Enables the use of two R-specific commands in pull request
#' issue comments: `/document` to run `roxygen2::roxygenise()` and
#' `/style` to run `styler::style_pkg()`. Both will update the PR with any
#' changes once they're done.
#'
#' If you call `use_github_action()` without arguments, you'll be prompted to
#' pick from one of these. Otherwise you can see a complete list of
#' possibilities provided by r-lib at
#' <https://github.com/r-lib/actions/tree/v2/examples>, or you can supply
#' your own `url` to use any other workflow.
#'
#' @param name For `use_github_action()`: Name of one of the example workflow
#' from <https://github.com/r-lib/actions/tree/v2/examples> (with or
#' without extension), e.g. `"pkgdown"`, `"check-standard.yaml"`.
#'
#' If the `name` starts with `check-`, `save_as` will default to
#' `R-CMD-check.yaml` and `badge` default to `TRUE`.
#' @param ref Desired Git reference, usually the name of a tag (`"v2"`) or
#' branch (`"main"`). Other possibilities include a commit SHA (`"d1c516d"`)
#' or `"HEAD"` (meaning "tip of remote's default branch"). If not specified,
#' defaults to the latest published release of `r-lib/actions`
#' (<https://github.com/r-lib/actions/releases>).
#' @param url The full URL to a `.yaml` file on GitHub. See more details in
#' [use_github_file()].
#' @param save_as Name of the local workflow file. Defaults to `name` or
#' `fs::path_file(url)` for `use_github_action()`. Do not specify any other
#' part of the path; the parent directory will always be `.github/workflows`,
#' within the active project.
#' @param readme The full URL to a `README` file that provides more details
#' about the workflow. Ignored when `url` is `NULL`.
#' @param badge Should we add a badge to the `README`?
#' @inheritParams use_template
#'
#' @examples
#' \dontrun{
#' use_github_action()
#'
#' use_github_action_check_standard()
#'
#' use_github_action("pkgdown")
#' }
#' @export
use_github_action <- function(name = NULL,
ref = NULL,
url = NULL,
save_as = NULL,
readme = NULL,
ignore = TRUE,
open = FALSE,
badge = NULL) {
maybe_name(name)
maybe_name(ref)
maybe_name(url)
maybe_name(save_as)
maybe_name(readme)
check_bool(ignore)
check_bool(open)
check_bool(badge, allow_null = TRUE)
if (is.null(url)) {
name <- name %||% choose_gha_workflow()
if (path_ext(name) == "") {
name <- path_ext_set(name, "yaml")
}
ref <- ref %||% latest_release()
url <- glue(
"https://raw.githubusercontent.com/r-lib/actions/{ref}/examples/{name}"
)
readme <- glue(
"https://github.com/r-lib/actions/blob/{ref}/examples/README.md"
)
}
withr::defer(rstudio_git_tickle())
use_dot_github(ignore = ignore)
if (is.null(save_as)) {
if (is_check_action(url)) {
save_as <- "R-CMD-check.yaml"
} else {
save_as <- path_file(url)
}
}
save_as <- path(".github", "workflows", save_as)
create_directory(path_dir(proj_path(save_as)))
if (grepl("^http", url)) {
# `ignore = FALSE` because we took care of this at directory level, above
new <- use_github_file(url, save_as = save_as, ignore = FALSE, open = open)
} else {
# local file case, https://github.com/r-lib/usethis/issues/1548
contents <- read_utf8(url)
new <- write_over(proj_path(save_as), contents)
}
if (!is.null(readme)) {
ui_todo("Learn more at <{readme}>.")
}
badge <- badge %||% is_check_action(url)
if (badge) {
use_github_actions_badge(path_file(save_as))
}
invisible(new)
}
choose_gha_workflow <- function(error_call = caller_env()) {
if (!is_interactive()) {
cli::cli_abort(
"{.arg name} is absent and must be supplied",
call = error_call
)
}
prompt <- cli::format_inline(
"Which action do you want to add? (0 to exit)\n",
"(See {.url https://github.com/r-lib/actions/tree/v2/examples} for other options)"
)
# Any changes here also need to be reflected in documentation
workflows <- c(
"check-standard" = "Run `R CMD check` on Linux, macOS, and Windows",
"test-coverage" = "Compute test coverage and report to https://about.codecov.io",
"pr-commands" = "Add /document and /style commands for pull requests"
)
options <- paste0(cli::style_bold(names(workflows)), ": ", workflows)
choice <- utils::menu(
title = prompt,
choices = options
)
if (choice == 0) {
cli::cli_abort("Selection terminated", call = error_call)
}
names(workflows)[choice]
}
is_check_action <- function(url) {
grepl("^check-", path_file(url))
}
#' Generates a GitHub Actions badge
#'
#' Generates a GitHub Actions badge and that's all. This exists primarily for
#' internal use.
#'
#' @keywords internal
#' @param name Name of the workflow's YAML configuration file (with or without
#' extension), e.g. `"R-CMD-check"`, `"R-CMD-check.yaml"`.
#' @inheritParams use_github_action
#' @export
use_github_actions_badge <- function(name = "R-CMD-check.yaml",
repo_spec = NULL) {
if (path_ext(name) == "") {
name <- path_ext_set(name, "yaml")
}
repo_spec <- repo_spec %||% target_repo_spec()
enc_name <- utils::URLencode(name)
img <- glue("https://github.com/{repo_spec}/actions/workflows/{enc_name}/badge.svg")
url <- glue("https://github.com/{repo_spec}/actions/workflows/{enc_name}")
use_badge(path_ext_remove(name), url, img)
}
# tidyverse GHA setup ----------------------------------------------------------
#' @details
#' * `use_tidy_github_actions()`: Sets up the following workflows using [GitHub
#' Actions](https://github.com/features/actions):
#' - Run `R CMD check` on the current release, devel, and four previous
#' versions of R. The build matrix also ensures `R CMD check` is run at
#' least once on each of the three major operating systems (Linux, macOS,
#' and Windows).
#' - Report test coverage.
#' - Build and deploy a pkgdown site.
#' - Provide two commands to be used in pull requests: `/document` to run
#' `roxygen2::roxygenise()` and update the PR, and `/style` to run
#' `styler::style_pkg()` and update the PR.
#'
#' This is how the tidyverse team checks its packages, but it is overkill
#' for less widely used packages. Consider using the more streamlined
#' workflows set up by [use_github_actions()] or
#' [use_github_action_check_standard()].
#' @export
#' @rdname tidyverse
#' @inheritParams use_github_action
use_tidy_github_actions <- function(ref = NULL) {
repo_spec <- target_repo_spec()
use_github_action("check-full.yaml", ref = ref, badge = TRUE)
use_github_action("pr-commands", ref = ref)
use_github_action("pkgdown", ref = ref)
use_coverage(repo_spec = repo_spec)
use_github_action("test-coverage", ref = ref)
old_configs <- proj_path(c(".travis.yml", "appveyor.yml"))
has_appveyor_travis <- file_exists(old_configs)
if (any(has_appveyor_travis)) {
if (ui_yeah(
"Remove existing {ui_path('.travis.yml')} and {ui_path('appveyor.yml')}?"
)) {
file_delete(old_configs[has_appveyor_travis])
ui_todo("Remove old badges from README")
}
}
invisible(TRUE)
}
# GHA helpers ------------------------------------------------------------------
uses_github_actions <- function() {
path <- proj_path(".github", "workflows")
file_exists(path)
}
check_uses_github_actions <- function() {
if (uses_github_actions()) {
return(invisible())
}
ui_stop("
Cannot detect that package {ui_value(project_name())} already \\
uses GitHub Actions.
Do you need to run {ui_code('use_github_actions()')}?")
}
latest_release <- function(repo_spec = "https://github.com/r-lib/actions") {
parsed <- parse_repo_url(repo_spec)
# https://docs.github.com/en/rest/reference/releases#list-releases
raw_releases <- gh::gh(
"/repos/{owner}/{repo}/releases",
owner = spec_owner(parsed$repo_spec),
repo = spec_repo(parsed$repo_spec),
.api_url = parsed$host,
.limit = Inf
)
tag_names <- purrr::discard(
map_chr(raw_releases, "tag_name"),
map_lgl(raw_releases, "prerelease")
)
pick_tag(tag_names)
}
# 1) filter to releases in the latest major version series
# 2) return the max, according to R's numeric_version logic
pick_tag <- function(nm) {
dat <- data.frame(nm = nm, stringsAsFactors = FALSE)
dat$version <- numeric_version(sub("^[^0-9]*", "", dat$nm))
dat <- dat[dat$version == max(dat$version), ]
dat$nm[1]
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/github-actions.R
|
#' Manage GitHub issue labels
#'
#' @description
#' `use_github_labels()` can create new labels, update colours and descriptions,
#' and optionally delete GitHub's default labels (if `delete_default = TRUE`).
#' It will never delete labels that have associated issues.
#'
#' `use_tidy_github_labels()` calls `use_github_labels()` with tidyverse
#' conventions powered by `tidy_labels()`, `tidy_labels_rename()`,
#' `tidy_label_colours()` and `tidy_label_descriptions()`.
#'
#' ## tidyverse label usage
#' Labels are used as part of the issue-triage process, designed to minimise the
#' time spent re-reading issues. The absence of a label indicates that an issue
#' is new, and has yet to be triaged.
#'
#' There are four mutually exclusive labels that indicate the overall "type" of
#' issue:
#'
#' * `bug`: an unexpected problem or unintended behavior.
#' * `documentation`: requires changes to the docs.
#' * `feature`: feature requests and enhancement.
#' * `upkeep`: general package maintenance work that makes future development
#' easier.
#'
#' Then there are five labels that are needed in most repositories:
#'
#' * `breaking change`: issue/PR will requires a breaking change so should
#' be not be included in patch releases.
#' * `reprex` indicates that an issue does not have a minimal reproducible
#' example, and that a reply has been sent requesting one from the user.
#' * `good first issue` indicates a good issue for first-time contributors.
#' * `help wanted` indicates that a maintainer wants help on an issue.
#' * `wip` indicates that someone is working on it or has promised to.
#'
#' Finally most larger repos will accumulate their own labels for specific
#' areas of functionality. For example, usethis has labels like "description",
#' "paths", "readme", because time has shown these to be common sources of
#' problems. These labels are helpful for grouping issues so that you can
#' tackle related problems at the same time.
#'
#' Repo-specific issues should have a grey background (`#eeeeee`) and an emoji.
#' This keeps the issue page visually harmonious while still giving enough
#' variation to easily distinguish different types of label.
#'
#' @param repo_spec,host,auth_token `r lifecycle::badge("deprecated")`: These
#' arguments are now deprecated and will be removed in the future. Any input
#' provided via these arguments is not used. The target repo, host, and auth
#' token are all now determined from the current project's Git remotes.
#' @param labels A character vector giving labels to add.
#' @param rename A named vector with names giving old names and values giving
#' new names.
#' @param colours,descriptions Named character vectors giving hexadecimal
#' colours (like `e02a2a`) and longer descriptions. The names should match
#' label names, and anything unmatched will be left unchanged. If you create a
#' new label, and don't supply colours, it will be given a random colour.
#' @param delete_default If `TRUE`, removes GitHub default labels that do not
#' appear in the `labels` vector and that do not have associated issues.
#'
#' @export
#' @examples
#' \dontrun{
#' # typical use in, e.g., a new tidyverse project
#' use_github_labels(delete_default = TRUE)
#'
#' # create labels without changing colours/descriptions
#' use_github_labels(
#' labels = c("foofy", "foofier", "foofiest"),
#' colours = NULL,
#' descriptions = NULL
#' )
#'
#' # change descriptions without changing names/colours
#' use_github_labels(
#' labels = NULL,
#' colours = NULL,
#' descriptions = c("foofiest" = "the foofiest issue you ever saw")
#' )
#' }
use_github_labels <- function(repo_spec = deprecated(),
labels = character(),
rename = character(),
colours = character(),
descriptions = character(),
delete_default = FALSE,
host = deprecated(),
auth_token = deprecated()) {
if (lifecycle::is_present(repo_spec)) {
deprecate_warn_repo_spec("use_github_labels")
}
if (lifecycle::is_present(host)) {
deprecate_warn_host("use_github_labels")
}
if (lifecycle::is_present(auth_token)) {
deprecate_warn_auth_token("use_github_labels")
}
tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork"))
check_can_push(tr = tr, "to modify labels")
gh <- gh_tr(tr)
cur_labels <- gh("GET /repos/{owner}/{repo}/labels")
label_attr <- function(x, l, mapper = map_chr) {
mapper(l, x, .default = NA)
}
# Rename existing labels
cur_label_names <- label_attr("name", cur_labels)
to_rename <- intersect(cur_label_names, names(rename))
if (length(to_rename) > 0) {
delta <- purrr::map2_chr(
to_rename, rename[to_rename],
~ paste0(ui_value(.x), " -> ", ui_value(.y))
)
ui_done("Renaming labels: {paste0(delta, collapse = '\n')}")
# Can't do this at label level, i.e. "old_label_name --> new_label_name"
# Fails if "new_label_name" already exists
# https://github.com/r-lib/usethis/issues/551
# Must first PATCH issues, then sort out labels
issues <- map(
to_rename,
~ gh("GET /repos/{owner}/{repo}/issues", labels = .x)
)
issues <- purrr::flatten(issues)
number <- map_int(issues, "number")
old_labels <- map(issues, "labels")
df <- data.frame(
number = rep.int(number, lengths(old_labels))
)
df$labels <- purrr::flatten(old_labels)
df$labels <- map_chr(df$labels, "name")
# enact relabelling
m <- match(df$labels, names(rename))
df$labels[!is.na(m)] <- rename[m[!is.na(m)]]
df <- df[!duplicated(df), ]
new_labels <- split(df$labels, df$number)
purrr::iwalk(
new_labels,
~ gh(
"PATCH /repos/{owner}/{repo}/issues/{issue_number}",
issue_number = .y,
labels = I(.x)
)
)
# issues have correct labels now; safe to edit labels themselves
purrr::walk(
to_rename,
~ gh("DELETE /repos/{owner}/{repo}/labels/{name}", name = .x)
)
labels <- union(labels, setdiff(rename, cur_label_names))
} else {
ui_info("No labels need renaming")
}
cur_labels <- gh("GET /repos/{owner}/{repo}/labels")
cur_label_names <- label_attr("name", cur_labels)
# Add missing labels
if (all(labels %in% cur_label_names)) {
ui_info("No new labels needed")
} else {
to_add <- setdiff(labels, cur_label_names)
ui_done("Adding missing labels: {ui_value(to_add)}")
for (label in to_add) {
gh(
"POST /repos/{owner}/{repo}/labels",
name = label,
color = purrr::pluck(colours, label, .default = random_colour()),
description = purrr::pluck(descriptions, label, .default = "")
)
}
}
cur_labels <- gh("GET /repos/{owner}/{repo}/labels")
cur_label_names <- label_attr("name", cur_labels)
# Update colours
cur_label_colours <- set_names(
label_attr("color", cur_labels), cur_label_names
)
if (identical(cur_label_colours[names(colours)], colours)) {
ui_info("Label colours are up-to-date")
} else {
to_update <- intersect(cur_label_names, names(colours))
ui_done("Updating colours: {ui_value(to_update)}")
for (label in to_update) {
gh(
"PATCH /repos/{owner}/{repo}/labels/{name}",
name = label,
color = colours[[label]]
)
}
}
# Update descriptions
cur_label_descriptions <- set_names(
label_attr("description", cur_labels), cur_label_names
)
if (identical(cur_label_descriptions[names(descriptions)], descriptions)) {
ui_info("Label descriptions are up-to-date")
} else {
to_update <- intersect(cur_label_names, names(descriptions))
ui_done("Updating descriptions: {ui_value(to_update)}")
for (label in to_update) {
gh(
"PATCH /repos/{owner}/{repo}/labels/{name}",
name = label,
description = descriptions[[label]]
)
}
}
# Delete unused default labels
if (delete_default) {
default <- map_lgl(cur_labels, "default")
to_remove <- setdiff(cur_label_names[default], labels)
if (length(to_remove) > 0) {
ui_done("Removing default labels: {ui_value(to_remove)}")
for (label in to_remove) {
issues <- gh("GET /repos/{owner}/{repo}/issues", labels = label)
if (length(issues) > 0) {
ui_todo("Delete {ui_value(label)} label manually; it has associated issues")
} else {
gh("DELETE /repos/{owner}/{repo}/labels/{name}", name = label)
}
}
}
}
}
#' @export
#' @rdname use_github_labels
use_tidy_github_labels <- function() {
use_github_labels(
labels = tidy_labels(),
rename = tidy_labels_rename(),
colours = tidy_label_colours(),
descriptions = tidy_label_descriptions(),
delete_default = TRUE
)
}
#' @rdname use_github_labels
#' @export
tidy_labels <- function() {
names(tidy_label_colours())
}
#' @rdname use_github_labels
#' @export
tidy_labels_rename <- function() {
c(
# before = after
"enhancement" = "feature",
"question" = "reprex",
"good first issue" = "good first issue :heart:",
"help wanted" = "help wanted :heart:",
"docs" = "documentation"
)
}
#' @rdname use_github_labels
#' @export
tidy_label_colours <- function() {
# http://tristen.ca/hcl-picker/#/hlc/5/0.26/E0B3A2/E1B996
c(
"breaking change :skull_and_crossbones:" = "E0B3A2",
"bug" = "E0B3A2",
"documentation" = "CBBAB8",
"feature" = "B4C3AE",
"upkeep" = "C2ACC0",
"good first issue :heart:" = "CBBAB8",
"help wanted :heart:" = "C5C295",
"reprex" = "C5C295",
"tidy-dev-day :nerd_face:" = "CBBAB8"
)
}
#' @rdname use_github_labels
#' @export
tidy_label_descriptions <- function() {
c(
"bug" = "an unexpected problem or unintended behavior",
"feature" = "a feature request or enhancement",
"upkeep" = "maintenance, infrastructure, and similar",
"reprex" = "needs a minimal reproducible example",
"wip" = "work in progress",
"documentation" = "",
"good first issue :heart:" = "good issue for first-time contributors",
"help wanted :heart:" = "we'd love your help!",
"breaking change :skull_and_crossbones:" = "API change likely to affect existing code",
"tidy-dev-day :nerd_face:" = "Tidyverse Developer Day rstd.io/tidy-dev-day"
)
}
random_colour <- function() {
format(as.hexmode(sample(256 * 256 * 256 - 1, 1)), width = 6)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/github-labels.R
|
#' Configure a GitHub Pages site
#'
#' Activates or reconfigures a GitHub Pages site for a project hosted on GitHub.
#' This function anticipates two specific usage modes:
#' * Publish from the root directory of a `gh-pages` branch, which is assumed to
#' be only (or at least primarily) a remote branch. Typically the `gh-pages`
#' branch is managed by an automatic "build and deploy" job, such as the one
#' configured by [`use_github_action("pkgdown")`][use_github_action()].
#' * Publish from the `"/docs"` directory of a "regular" branch, probably the
#' repo's default branch. The user is assumed to have a plan for how they will
#' manage the content below `"/docs"`.
#'
#' @param branch,path Branch and path for the site source. The default of
#' `branch = "gh-pages"` and `path = "/"` reflects strong GitHub support for
#' this configuration: when a `gh-pages` branch is first created, it is
#' *automatically* published to Pages, using the source found in `"/"`. If a
#' `gh-pages` branch does not yet exist on the host, `use_github_pages()`
#' creates an empty, orphan remote branch.
#'
#' The most common alternative is to use the repo's default branch, coupled
#' with `path = "/docs"`. It is the user's responsibility to ensure that this
#' `branch` pre-exists on the host.
#'
#' Note that GitHub does not support an arbitrary `path` and, at the time of
#' writing, only `"/"` or `"/docs"` are accepted.
#' @param cname Optional, custom domain name. The `NA` default means "don't set
#' or change this", whereas a value of `NULL` removes any previously
#' configured custom domain.
#'
#' Note that this *can* add or modify a CNAME file in your repository. If you
#' are using Pages to host a pkgdown site, it is better to specify its URL in
#' the pkgdown config file and let pkgdown manage CNAME.
#'
#' @seealso
#' * [use_pkgdown_github_pages()] combines `use_github_pages()` with other
#' functions to fully configure a pkgdown site
#' * <https://docs.github.com/en/pages>
#' * <https://docs.github.com/en/rest/pages>
#' @return Site metadata returned by the GitHub API, invisibly
#' @export
#'
#' @examples
#' \dontrun{
#' use_github_pages()
#' use_github_pages(branch = git_default_branch(), path = "/docs")
#' }
use_github_pages <- function(branch = "gh-pages", path = "/", cname = NA) {
check_name(branch)
check_name(path)
check_string(cname, allow_empty = FALSE, allow_na = TRUE, allow_null = TRUE)
tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork"))
check_can_push(tr = tr, "to turn on GitHub Pages")
gh <- gh_tr(tr)
safe_gh <- purrr::safely(gh)
if (branch == "gh-pages") {
new_branch <- create_gh_pages_branch(tr, branch = "gh-pages")
if (new_branch) {
# merely creating gh-pages branch automatically activates publishing
# BUT we need to give the servers time to sync up before a new GET
# retrieves accurate info... ask me how I know
Sys.sleep(2)
}
}
site <- safe_gh("GET /repos/{owner}/{repo}/pages")[["result"]]
if (is.null(site)) {
ui_done("Activating GitHub Pages for {ui_value(tr$repo_spec)}")
site <- gh(
"POST /repos/{owner}/{repo}/pages",
source = list(branch = branch, path = path),
.accept = "application/vnd.github.switcheroo-preview+json"
)
}
need_update <-
site$source$branch != branch ||
site$source$path != path ||
(is.null(cname) && !is.null(site$cname)) ||
(is_string(cname) && (is.null(site$cname) || cname != site$cname))
if (need_update) {
args <- list(
endpoint = "PUT /repos/{owner}/{repo}/pages",
source = list(branch = branch, path = path)
)
if (is.null(cname) && !is.null(site$cname)) {
# this goes out as a JSON `null`, which is necessary to clear cname
args$cname <- NA
}
if (is_string(cname) && (is.null(site$cname) || cname != site$cname)) {
args$cname <- cname
}
Sys.sleep(2)
exec(gh, !!!args)
Sys.sleep(2)
site <- safe_gh("GET /repos/{owner}/{repo}/pages")[["result"]]
}
ui_done("GitHub Pages is publishing from:")
if (!is.null(site$cname)) {
kv_line("Custom domain", site$cname)
}
kv_line("URL", site$html_url)
kv_line("Branch", site$source$branch)
kv_line("Path", site$source$path)
invisible(site)
}
# returns FALSE if it does NOT create the branch (because it already exists)
# returns TRUE if it does create the branch
create_gh_pages_branch <- function(tr, branch = "gh-pages") {
gh <- gh_tr(tr)
safe_gh <- purrr::safely(gh)
branch_GET <- safe_gh(
"GET /repos/{owner}/{repo}/branches/{branch}",
branch = branch
)
if (!inherits(branch_GET$error, "http_error_404")) {
return(FALSE)
}
ui_done("
Initializing empty, orphan {ui_value(branch)} branch in GitHub repo \\
{ui_value(tr$repo_spec)}")
# GitHub no longer allows you to directly create an empty tree
# hence this roundabout method of getting an orphan branch with no files
tree <- gh(
"POST /repos/{owner}/{repo}/git/trees",
tree = list(list(
path = "_temp_file_ok_to_delete",
mode = "100644",
type = "blob",
content = ""
))
)
commit <- gh(
"POST /repos/{owner}/{repo}/git/commits",
message = "Init orphan branch",
tree = tree$sha
)
ref <- gh(
"POST /repos/{owner}/{repo}/git/refs",
ref = glue("refs/heads/{branch}"),
sha = commit$sha
)
# this should succeed, but if somehow it does not, it's not worth failing and
# leaving pkgdown + GitHub Pages setup half-done --> why I use safe_gh()
safe_gh(
"DELETE /repos/{owner}/{repo}/contents/_temp_file_ok_to_delete",
message = "Remove temp file",
sha = purrr::pluck(tree, "tree", 1, "sha"),
branch = branch
)
TRUE
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/github-pages.R
|
#' Connect a local repo with GitHub
#'
#' @description
#' `use_github()` takes a local project and:
#' * Checks that the initial state is good to go:
#' - Project is already a Git repo
#' - Current branch is the default branch, e.g. `main` or `master`
#' - No uncommitted changes
#' - No pre-existing `origin` remote
#' * Creates an associated repo on GitHub
#' * Adds that GitHub repo to your local repo as the `origin` remote
#' * Makes an initial push to GitHub
#' * Calls [use_github_links()], if the project is an R package
#' * Configures `origin/DEFAULT` to be the upstream branch of the local
#' `DEFAULT` branch, e.g. `main` or `master`
#'
#' See below for the authentication setup that is necessary for all of this to
#' work.
#'
#' @template double-auth
#'
#' @param organisation If supplied, the repo will be created under this
#' organisation, instead of the login associated with the GitHub token
#' discovered for this `host`. The user's role and the token's scopes must be
#' such that you have permission to create repositories in this
#' `organisation`.
#' @param private If `TRUE`, creates a private repository.
#' @param visibility Only relevant for organisation-owned repos associated with
#' certain GitHub Enterprise products. The special "internal" `visibility`
#' grants read permission to all organisation members, i.e. it's intermediate
#' between "private" and "public", within GHE. When specified, `visibility`
#' takes precedence over `private = TRUE/FALSE`.
#' @inheritParams git_protocol
#' @param host GitHub host to target, passed to the `.api_url` argument of
#' [gh::gh()]. If unspecified, gh defaults to "https://api.github.com",
#' although gh's default can be customised by setting the GITHUB_API_URL
#' environment variable.
#'
#' For a hypothetical GitHub Enterprise instance, either
#' "https://github.acme.com/api/v3" or "https://github.acme.com" is
#' acceptable.
#' @param auth_token,credentials `r lifecycle::badge("deprecated")`: No longer
#' consulted now that usethis uses the gert package for Git operations,
#' instead of git2r; gert relies on the credentials package for auth. The API
#' requests are now authorized with the token associated with the `host`, as
#' retrieved by [gh::gh_token()].
#'
#' @export
#' @examples
#' \dontrun{
#' pkgpath <- file.path(tempdir(), "testpkg")
#' create_package(pkgpath)
#'
#' ## now, working inside "testpkg", initialize git repository
#' use_git()
#'
#' ## create github repository and configure as git remote
#' use_github()
#' }
use_github <- function(organisation = NULL,
private = FALSE,
visibility = c("public", "private", "internal"),
protocol = git_protocol(),
host = NULL,
auth_token = deprecated(),
credentials = deprecated()) {
if (lifecycle::is_present(auth_token)) {
deprecate_warn_auth_token("use_github")
}
if (lifecycle::is_present(credentials)) {
deprecate_warn_credentials("use_github")
}
visibility_specified <- !missing(visibility)
visibility <- match.arg(visibility)
check_protocol(protocol)
check_uses_git()
default_branch <- git_default_branch()
check_current_branch(
is = default_branch,
# glue-ing happens inside check_current_branch(), where `gb` gives the
# current branch
"Must be on the default branch ({ui_value(is)}), not {ui_value(gb)}."
)
challenge_uncommitted_changes(msg = "
There are uncommitted changes and we're about to create and push to a new \\
GitHub repo")
check_no_origin()
if (is.null(organisation)) {
if (visibility_specified) {
ui_stop("
The {ui_code('visibility')} setting is only relevant for
organisation-owned repos, within the context of certain \\
GitHub Enterprise products.")
}
visibility <- if (private) "private" else "public"
}
if (!is.null(organisation) && !visibility_specified) {
visibility <- if (private) "private" else "public"
}
whoami <- suppressMessages(gh::gh_whoami(.api_url = host))
if (is.null(whoami)) {
ui_stop("
Unable to discover a GitHub personal access token
A token is required in order to create and push to a new repo
Call {ui_code('gh_token_help()')} for help configuring a token")
}
empirical_host <- parse_github_remotes(glue("{whoami$html_url}/REPO"))$host
if (empirical_host != "github.com") {
ui_info("Targeting the GitHub host {ui_value(empirical_host)}")
}
owner <- organisation %||% whoami$login
repo_name <- project_name()
check_no_github_repo(owner, repo_name, host)
repo_desc <- if (is_package()) proj_desc()$get_field("Title") %||% "" else ""
repo_desc <- gsub("\n", " ", repo_desc)
repo_spec <- glue("{owner}/{repo_name}")
visibility_string <- if (visibility == "public") "" else glue("{visibility} ")
ui_done("Creating {visibility_string}GitHub repository {ui_value(repo_spec)}")
if (is.null(organisation)) {
create <- gh::gh(
"POST /user/repos",
name = repo_name,
description = repo_desc,
private = private,
.api_url = host
)
} else {
create <- gh::gh(
"POST /orgs/{org}/repos",
org = organisation,
name = repo_name,
description = repo_desc,
visibility = visibility,
# this is necessary to set `visibility` in GHE 2.22 (but not in 3.2)
# hopefully it's harmless when not needed
.accept = "application/vnd.github.nebula-preview+json",
.api_url = host
)
}
origin_url <- switch(
protocol,
https = create$clone_url,
ssh = create$ssh_url
)
withr::defer(view_url(create$html_url))
ui_done("Setting remote {ui_value('origin')} to {ui_value(origin_url)}")
use_git_remote("origin", origin_url)
if (is_package()) {
# we tryCatch(), because we can't afford any failure here to result in not
# doing the first push and configuring the default branch
# such an incomplete setup is hard to diagnose / repair post hoc
tryCatch(
use_github_links(),
error = function(e) NULL
)
}
git_push_first(default_branch, "origin")
repo <- git_repo()
gbl <- gert::git_branch_list(local = TRUE, repo = repo)
if (nrow(gbl) > 1) {
ui_done("
Setting {ui_value(default_branch)} as default branch on GitHub")
gh::gh(
"PATCH /repos/{owner}/{repo}",
owner = owner, repo = repo_name,
default_branch = default_branch,
.api_url = host
)
}
invisible()
}
#' Use GitHub links in URL and BugReports
#'
#' @description
#' Populates the `URL` and `BugReports` fields of a GitHub-using R package with
#' appropriate links. The GitHub repo to link to is determined from the current
#' project's GitHub remotes:
#' * If we are not working with a fork, this function expects `origin` to be a
#' GitHub remote and the links target that repo.
#' * If we are working in a fork, this function expects to find two GitHub
#' remotes: `origin` (the fork) and `upstream` (the fork's parent) remote. In
#' an interactive session, the user can confirm which repo to use for the
#' links. In a noninteractive session, links are formed using `upstream`.
#'
#' @param host,auth_token `r lifecycle::badge("deprecated")`: No longer consulted
#' now that usethis consults the current project's GitHub remotes to get the
#' `host` and then relies on gh to discover an appropriate token.
#' @param overwrite By default, `use_github_links()` will not overwrite existing
#' fields. Set to `TRUE` to overwrite existing links.
#' @export
#' @examples
#' \dontrun{
#' use_github_links()
#' }
#'
use_github_links <- function(auth_token = deprecated(),
host = deprecated(),
overwrite = FALSE) {
if (lifecycle::is_present(auth_token)) {
deprecate_warn_auth_token("use_github_links")
}
if (lifecycle::is_present(host)) {
deprecate_warn_host("use_github_links")
}
check_is_package("use_github_links()")
gh_url <- github_url_from_git_remotes()
proj_desc_field_update("URL", gh_url, overwrite = overwrite, append = TRUE)
proj_desc_field_update(
"BugReports",
glue("{gh_url}/issues"),
overwrite = overwrite
)
git_ask_commit(
"Add GitHub links to DESCRIPTION",
untracked = TRUE,
paths = "DESCRIPTION"
)
invisible()
}
has_github_links <- function() {
github_url <- github_url_from_git_remotes()
if (is.null(github_url)) {
return(FALSE)
}
desc <- proj_desc()
has_github_url <- github_url %in% desc$get_urls()
bug_reports <- desc$get_field("BugReports", default = character())
has_github_issues <- glue("{github_url}/issues") %in% bug_reports
has_github_url && has_github_issues
}
check_no_origin <- function() {
remotes <- git_remotes()
if ("origin" %in% names(remotes)) {
ui_stop("
This repo already has an {ui_value('origin')} remote, \\
with value {ui_value(remotes[['origin']])}.
You can remove this setting with:
{ui_code('usethis::use_git_remote(\"origin\", url = NULL, overwrite = TRUE)')}")
}
invisible()
}
check_no_github_repo <- function(owner, repo, host) {
repo_found <- tryCatch(
{
repo_info <- gh::gh(
"/repos/{owner}/{repo}",
owner = owner, repo = repo,
.api_url = host
)
TRUE
},
"http_error_404" = function(err) FALSE
)
if (!repo_found) {
return(invisible())
}
spec <- glue("{owner}/{repo}")
empirical_host <- parse_github_remotes(repo_info$html_url)$host
ui_stop("Repo {ui_value(spec)} already exists on {ui_value(empirical_host)}")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/github.R
|
#' Get help with GitHub personal access tokens
#'
#' @description
#' A [personal access
#' token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)
#' (PAT) is needed for certain tasks usethis does via the GitHub API, such as
#' creating a repository, a fork, or a pull request. If you use HTTPS remotes,
#' your PAT is also used when interacting with GitHub as a conventional Git
#' remote. These functions help you get and manage your PAT:
#' * `gh_token_help()` guides you through token troubleshooting and setup.
#' * `create_github_token()` opens a browser window to the GitHub form to
#' generate a PAT, with suggested scopes pre-selected. It also offers advice
#' on storing your PAT.
#' * `gitcreds::gitcreds_set()` helps you register your PAT with the Git
#' credential manager used by your operating system. Later, other packages,
#' such as usethis, gert, and gh can automatically retrieve that PAT and use
#' it to work with GitHub on your behalf.
#'
#' Usually, the first time the PAT is retrieved in an R session, it is cached in
#' an environment variable, for easier reuse for the duration of that R session.
#' After initial acquisition and storage, all of this should happen
#' automatically in the background. GitHub is encouraging the use of PATs that
#' expire after, e.g., 30 days, so prepare yourself to re-generate and re-store
#' your PAT periodically.
#'
#' Git/GitHub credential management is covered in a dedicated article: [Managing
#' Git(Hub)
#' Credentials](https://usethis.r-lib.org/articles/articles/git-credentials.html)
#'
#' @details
#' `create_github_token()` has previously gone by some other names:
#' `browse_github_token()` and `browse_github_pat()`.
#'
#' @param scopes Character vector of token scopes, pre-selected in the web form.
#' Final choices are made in the GitHub form. Read more about GitHub API
#' scopes at
#' <https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/>.
#' @param description Short description or nickname for the token. You might
#' (eventually) have multiple tokens on your GitHub account and a label can
#' help you keep track of what each token is for.
#' @inheritParams use_github
#'
#' @seealso [gh::gh_whoami()] for information on an existing token and
#' `gitcreds::gitcreds_set()` and `gitcreds::gitcreds_get()` for a secure way
#' to store and retrieve your PAT.
#'
#' @return Nothing
#' @name github-token
NULL
#' @export
#' @rdname github-token
#' @examples
#' \dontrun{
#' create_github_token()
#' }
create_github_token <- function(scopes = c("repo", "user", "gist", "workflow"),
description = "DESCRIBE THE TOKEN'S USE CASE",
host = NULL) {
scopes <- glue_collapse(scopes, ",")
host <- get_hosturl(host %||% default_api_url())
url <- glue(
"{host}/settings/tokens/new?scopes={scopes}&description={description}"
)
withr::defer(view_url(url))
hint <- code_hint_with_host("gitcreds::gitcreds_set", host)
ui_todo("
Call {ui_code(hint)} to register this token in the \\
local Git credential store
It is also a great idea to store this token in any password-management \\
software that you use")
invisible()
}
#' @inheritParams use_github
#' @export
#' @rdname github-token
#' @examples
#' \dontrun{
#' gh_token_help()
#' }
gh_token_help <- function(host = NULL) {
host_url <- get_hosturl(host %||% default_api_url())
kv_line("GitHub host", host_url)
pat_sitrep(host_url, scope = "project")
}
code_hint_with_host <- function(function_name, host = NULL, arg_name = NULL) {
arg_hint <- function(host, arg_name) {
if (is.null(host) || is_github_dot_com(host)) {
return("")
}
if (is_null(arg_name)) {
glue('"{host}"')
} else {
glue('{arg_name} = "{host}"')
}
}
glue_chr("{function_name}({arg_hint(host, arg_name)})")
}
# workhorse behind gh_token_help() and called, possibly twice, in git_sitrep()
# hence the need for `scold_for_renviron = TRUE/FALSE`
# scope determines if "global" or "de_facto" email is checked
pat_sitrep <- function(host = "https://github.com",
scope = c("user", "project"),
scold_for_renviron = TRUE) {
scope <- rlang::arg_match(scope)
if (scold_for_renviron) {
scold_for_renviron()
}
maybe_pat <- purrr::safely(gh::gh_token)(api_url = host)
if (is.null(maybe_pat$result)) {
ui_oops("The PAT discovered for {ui_path(host)} has the wrong structure.")
ui_inform(maybe_pat$error)
return(invisible(FALSE))
}
pat <- maybe_pat$result
have_pat <- pat != ""
if (!have_pat) {
kv_line("Personal access token for {ui_value(host)}", NULL)
hint <- code_hint_with_host("create_github_token", host, "host")
ui_todo("To create a personal access token, call {ui_code(hint)}")
hint <- code_hint_with_host("gitcreds::gitcreds_set", host)
ui_todo("To store a token for current and future use, call {ui_code(hint)}")
ui_info("
Read more in the {ui_value('Managing Git(Hub) Credentials')} article:
https://usethis.r-lib.org/articles/articles/git-credentials.html")
return(invisible(FALSE))
}
kv_line("Personal access token for {ui_value(host)}", "<discovered>")
online <- is_online(host)
if (!online) {
ui_oops("
Host is not reachable.
No further vetting of the personal access token is possible.
Try again when {ui_value(host)} can be reached.")
return(invisible())
}
maybe_who <- purrr::safely(gh::gh_whoami)(.token = pat, .api_url = host)
if (is.null(maybe_who$result)) {
message <- "Can't get user information for this token."
if (inherits(maybe_who$error, "http_error_401")) {
message <- "
Can't get user information for this token.
The token may no longer be valid or perhaps it lacks the \\
{ui_value('user')} scope."
}
ui_oops(message)
ui_inform(maybe_who$error)
return(invisible(FALSE))
}
who <- maybe_who$result
kv_line("GitHub user", who$login)
scopes <- who$scopes
kv_line("Token scopes", who$scopes)
scopes <- strsplit(scopes, ", ")[[1]]
scold_for_scopes(scopes)
maybe_emails <-
purrr::safely(gh::gh)("/user/emails", .token = pat, .api_url = host)
if (is.null(maybe_emails$result)) {
ui_oops("
Can't retrieve registered email addresses from GitHub.
Consider re-creating your PAT with the {ui_value('user')} \\
or at least {ui_value('user:email')} scope.")
} else {
emails <- maybe_emails$result
addresses <- map_chr(
emails,
~ if (.x$primary) glue_data(.x, "{email} (primary)") else .x[["email"]]
)
kv_line("Email(s)", addresses)
ui_silence(
user <- git_user_get(where_from_scope(scope))
)
git_user_check(user)
if (!is.null(user$email) && !any(grepl(user$email, addresses))) {
ui_oops("
Git user's email ({ui_value(user$email)}) doesn't appear to \\
be registered with GitHub host.")
}
}
invisible(TRUE)
}
scold_for_renviron <- function() {
renviron_path <- scoped_path_r("user", ".Renviron", envvar = "R_ENVIRON_USER")
if (!file_exists(renviron_path)) {
return(invisible())
}
renviron_lines <- read_utf8(renviron_path)
fishy_lines <- grep("^GITHUB_(PAT|TOKEN).*=.+", renviron_lines, value = TRUE)
if (length(fishy_lines) == 0) {
return(invisible())
}
fishy_keys <- re_match(fishy_lines, "^(?<key>.+)=.+")$key
# TODO: when I switch to cli, this is a good place for `!`
# in general, lots below is suboptimal, but good enough for now
ui_info(c(
"{ui_path(renviron_path)} defines environment variable(s):",
paste0("- ", fishy_keys),
"This can prevent your PAT from being retrieved from the Git credential store."
))
ui_info("
If you are troubleshooting PAT problems, the root cause may be an old, \\
invalid PAT defined in {ui_path(renviron_path)}.")
ui_todo("Call {ui_code('edit_r_environ()')} to edit that file.")
ui_info("
For most use cases, it is better to NOT define the PAT in \\
{ui_code('.Renviron')}.
Instead, call {ui_code('gitcreds::gitcreds_set()')} to put the PAT into \\
the Git credential store.")
invisible()
}
scold_for_scopes <- function(scopes) {
if (length(scopes) == 0) {
ui_oops("
Token has no scopes!
{ui_code('create_github_token()')} defaults to the recommended scopes.")
return(invisible())
}
# https://docs.github.com/en/free-pro-team@latest/developers/apps/scopes-for-oauth-apps
# why these checks?
# previous defaults for create_github_token(): repo, gist, user:email
# more recently: repo, user, gist, workflow
# (gist scope is a very weak recommendation)
has_repo <- "repo" %in% scopes
has_workflow <- "workflow" %in% scopes
has_user_email <- "user" %in% scopes || "user:email" %in% scopes
if (has_repo && has_workflow && has_user_email) {
return(invisible())
}
# current design of the ui_*() functions makes this pretty hard :(
suggestions <- c(
if (!has_repo) {
"- {ui_value('repo')}: needed to fully access user's repos"
},
if (!has_workflow) {
"- {ui_value('workflow')}: needed to manage GitHub Actions workflow files"
},
if (!has_user_email) {
"- {ui_value('user:email')}: needed to read user's email addresses"
}
)
message <- c(
"Token lacks recommended scopes:",
suggestions,
"Consider re-creating your PAT with the missing scopes.",
"{ui_code('create_github_token()')} defaults to the recommended scopes."
)
ui_oops(glue_collapse(message, sep = "\n"))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/github_token.R
|
use_dependency <- function(package, type, min_version = NULL) {
check_name(package)
check_name(type)
if (package != "R") {
check_installed(package)
}
if (package == "R" && tolower(type) != "depends") {
ui_stop("Set {ui_code('type = \"Depends\"')} when specifying an R version")
} else if (package == "R" && is.null(min_version)) {
ui_stop("Specify {ui_code('min_version')} when {ui_code('package = \"R\"')}")
}
if (isTRUE(min_version) && package == "R") {
min_version <- r_version()
} else if (isTRUE(min_version)) {
min_version <- utils::packageVersion(package)
}
version <- if (is.null(min_version)) "*" else glue(">= {min_version}")
types <- c("Depends", "Imports", "Suggests", "Enhances", "LinkingTo")
names(types) <- tolower(types)
type <- types[[match.arg(tolower(type), names(types))]]
desc <- proj_desc()
deps <- desc$get_deps()
deps <- deps[deps$package == package, ]
new_linking_to <- type == "LinkingTo" && !"LinkingTo" %in% deps$type
new_non_linking_to <- type != "LinkingTo" && identical(deps$type, "LinkingTo")
changed <- FALSE
# One of:
# * No existing dependency on this package
# * Adding existing non-LinkingTo dependency to LinkingTo
# * First use of a LinkingTo package as a non-LinkingTo dependency
# In all cases, we can can simply make the change.
if (nrow(deps) == 0 || new_linking_to || new_non_linking_to) {
ui_done("Adding {ui_value(package)} to {ui_field(type)} field in DESCRIPTION")
desc$set_dep(package, type, version = version)
desc$write()
changed <- TRUE
return(invisible(changed))
}
if (type == "LinkingTo") {
deps <- deps[deps$type == "LinkingTo", ]
} else {
deps <- deps[deps$type != "LinkingTo", ]
}
existing_type <- deps$type
existing_version <- deps$version
delta <- sign(match(existing_type, types) - match(type, types))
if (delta < 0) {
# don't downgrade
ui_warn(
"Package {ui_value(package)} is already listed in \\
{ui_value(existing_type)} in DESCRIPTION, no change made."
)
} else if (delta == 0 && !is.null(min_version)) {
# change version
upgrade <- existing_version == "*" ||
numeric_version(min_version) > version_spec(existing_version)
if (upgrade) {
ui_done(
"Increasing {ui_value(package)} version to {ui_value(version)} in \\
DESCRIPTION")
desc$set_dep(package, type, version = version)
desc$write()
changed <- TRUE
}
} else if (delta > 0) {
# moving from, e.g., Suggests to Imports
ui_done(
"Moving {ui_value(package)} from {ui_field(existing_type)} to \\
{ui_field(type)} field in DESCRIPTION")
desc$del_dep(package, existing_type)
desc$set_dep(package, type, version = version)
desc$write()
changed <- TRUE
}
invisible(changed)
}
r_version <- function() {
version <- getRversion()
glue("{version$major}.{version$minor}")
}
version_spec <- function(x) {
x <- gsub("(<=|<|>=|>|==)\\s*", "", x)
numeric_version(x)
}
view_url <- function(..., open = is_interactive()) {
url <- paste(..., sep = "/")
if (open) {
ui_done("Opening URL {ui_value(url)}")
utils::browseURL(url)
} else {
ui_todo("Open URL {ui_value(url)}")
}
invisible(url)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/helpers.R
|
#' Add files to `.Rbuildignore`
#'
#' @description
#' `.Rbuildignore` has a regular expression on each line, but it's
#' usually easier to work with specific file names. By default,
#' `use_build_ignore()` will (crudely) turn a filename into a regular
#' expression that will only match that path. Repeated entries will be
#' silently removed.
#'
#' `use_build_ignore()` is designed to ignore *individual* files. If you
#' want to ignore *all* files with a given extension, consider providing
#' an "as-is" regular expression, using `escape = FALSE`; see examples.
#'
#' @param files Character vector of path names.
#' @param escape If `TRUE`, the default, will escape `.` to
#' `\\.` and surround with `^` and `$`.
#' @export
#' @examples
#' \dontrun{
#' # ignore all Excel files
#' use_build_ignore("[.]xlsx$", escape = FALSE)
#' }
use_build_ignore <- function(files, escape = TRUE) {
if (escape) {
files <- escape_path(files)
}
write_union(proj_path(".Rbuildignore"), files)
}
escape_path <- function(x) {
x <- gsub("\\.", "\\\\.", x)
x <- gsub("/$", "", x)
paste0("^", x, "$")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/ignore.R
|
# Standalone file: do not edit by hand
# Source: <https://github.com/r-lib/rlang/blob/main/R/standalone-obj-type.R>
# ----------------------------------------------------------------------
#
# ---
# repo: r-lib/rlang
# file: standalone-obj-type.R
# last-updated: 2022-10-04
# license: https://unlicense.org
# imports: rlang (>= 1.1.0)
# ---
#
# ## Changelog
#
# 2022-10-04:
# - `obj_type_friendly(value = TRUE)` now shows numeric scalars
# literally.
# - `stop_friendly_type()` now takes `show_value`, passed to
# `obj_type_friendly()` as the `value` argument.
#
# 2022-10-03:
# - Added `allow_na` and `allow_null` arguments.
# - `NULL` is now backticked.
# - Better friendly type for infinities and `NaN`.
#
# 2022-09-16:
# - Unprefixed usage of rlang functions with `rlang::` to
# avoid onLoad issues when called from rlang (#1482).
#
# 2022-08-11:
# - Prefixed usage of rlang functions with `rlang::`.
#
# 2022-06-22:
# - `friendly_type_of()` is now `obj_type_friendly()`.
# - Added `obj_type_oo()`.
#
# 2021-12-20:
# - Added support for scalar values and empty vectors.
# - Added `stop_input_type()`
#
# 2021-06-30:
# - Added support for missing arguments.
#
# 2021-04-19:
# - Added support for matrices and arrays (#141).
# - Added documentation.
# - Added changelog.
#
# nocov start
#' Return English-friendly type
#' @param x Any R object.
#' @param value Whether to describe the value of `x`. Special values
#' like `NA` or `""` are always described.
#' @param length Whether to mention the length of vectors and lists.
#' @return A string describing the type. Starts with an indefinite
#' article, e.g. "an integer vector".
#' @noRd
obj_type_friendly <- function(x, value = TRUE) {
if (is_missing(x)) {
return("absent")
}
if (is.object(x)) {
if (inherits(x, "quosure")) {
type <- "quosure"
} else {
type <- paste(class(x), collapse = "/")
}
return(sprintf("a <%s> object", type))
}
if (!is_vector(x)) {
return(.rlang_as_friendly_type(typeof(x)))
}
n_dim <- length(dim(x))
if (!n_dim) {
if (!is_list(x) && length(x) == 1) {
if (is_na(x)) {
return(switch(
typeof(x),
logical = "`NA`",
integer = "an integer `NA`",
double =
if (is.nan(x)) {
"`NaN`"
} else {
"a numeric `NA`"
},
complex = "a complex `NA`",
character = "a character `NA`",
.rlang_stop_unexpected_typeof(x)
))
}
show_infinites <- function(x) {
if (x > 0) {
"`Inf`"
} else {
"`-Inf`"
}
}
str_encode <- function(x, width = 30, ...) {
if (nchar(x) > width) {
x <- substr(x, 1, width - 3)
x <- paste0(x, "...")
}
encodeString(x, ...)
}
if (value) {
if (is.numeric(x) && is.infinite(x)) {
return(show_infinites(x))
}
if (is.numeric(x) || is.complex(x)) {
number <- as.character(round(x, 2))
what <- if (is.complex(x)) "the complex number" else "the number"
return(paste(what, number))
}
return(switch(
typeof(x),
logical = if (x) "`TRUE`" else "`FALSE`",
character = {
what <- if (nzchar(x)) "the string" else "the empty string"
paste(what, str_encode(x, quote = "\""))
},
raw = paste("the raw value", as.character(x)),
.rlang_stop_unexpected_typeof(x)
))
}
return(switch(
typeof(x),
logical = "a logical value",
integer = "an integer",
double = if (is.infinite(x)) show_infinites(x) else "a number",
complex = "a complex number",
character = if (nzchar(x)) "a string" else "\"\"",
raw = "a raw value",
.rlang_stop_unexpected_typeof(x)
))
}
if (length(x) == 0) {
return(switch(
typeof(x),
logical = "an empty logical vector",
integer = "an empty integer vector",
double = "an empty numeric vector",
complex = "an empty complex vector",
character = "an empty character vector",
raw = "an empty raw vector",
list = "an empty list",
.rlang_stop_unexpected_typeof(x)
))
}
}
vec_type_friendly(x)
}
vec_type_friendly <- function(x, length = FALSE) {
if (!is_vector(x)) {
abort("`x` must be a vector.")
}
type <- typeof(x)
n_dim <- length(dim(x))
add_length <- function(type) {
if (length && !n_dim) {
paste0(type, sprintf(" of length %s", length(x)))
} else {
type
}
}
if (type == "list") {
if (n_dim < 2) {
return(add_length("a list"))
} else if (is.data.frame(x)) {
return("a data frame")
} else if (n_dim == 2) {
return("a list matrix")
} else {
return("a list array")
}
}
type <- switch(
type,
logical = "a logical %s",
integer = "an integer %s",
numeric = ,
double = "a double %s",
complex = "a complex %s",
character = "a character %s",
raw = "a raw %s",
type = paste0("a ", type, " %s")
)
if (n_dim < 2) {
kind <- "vector"
} else if (n_dim == 2) {
kind <- "matrix"
} else {
kind <- "array"
}
out <- sprintf(type, kind)
if (n_dim >= 2) {
out
} else {
add_length(out)
}
}
.rlang_as_friendly_type <- function(type) {
switch(
type,
list = "a list",
NULL = "`NULL`",
environment = "an environment",
externalptr = "a pointer",
weakref = "a weak reference",
S4 = "an S4 object",
name = ,
symbol = "a symbol",
language = "a call",
pairlist = "a pairlist node",
expression = "an expression vector",
char = "an internal string",
promise = "an internal promise",
... = "an internal dots object",
any = "an internal `any` object",
bytecode = "an internal bytecode object",
primitive = ,
builtin = ,
special = "a primitive function",
closure = "a function",
type
)
}
.rlang_stop_unexpected_typeof <- function(x, call = caller_env()) {
abort(
sprintf("Unexpected type <%s>.", typeof(x)),
call = call
)
}
#' Return OO type
#' @param x Any R object.
#' @return One of `"bare"` (for non-OO objects), `"S3"`, `"S4"`,
#' `"R6"`, or `"R7"`.
#' @noRd
obj_type_oo <- function(x) {
if (!is.object(x)) {
return("bare")
}
class <- inherits(x, c("R6", "R7_object"), which = TRUE)
if (class[[1]]) {
"R6"
} else if (class[[2]]) {
"R7"
} else if (isS4(x)) {
"S4"
} else {
"S3"
}
}
#' @param x The object type which does not conform to `what`. Its
#' `obj_type_friendly()` is taken and mentioned in the error message.
#' @param what The friendly expected type as a string. Can be a
#' character vector of expected types, in which case the error
#' message mentions all of them in an "or" enumeration.
#' @param show_value Passed to `value` argument of `obj_type_friendly()`.
#' @param ... Arguments passed to [abort()].
#' @inheritParams args_error_context
#' @noRd
stop_input_type <- function(x,
what,
...,
allow_na = FALSE,
allow_null = FALSE,
show_value = TRUE,
arg = caller_arg(x),
call = caller_env()) {
# From standalone-cli.R
cli <- env_get_list(
nms = c("format_arg", "format_code"),
last = topenv(),
default = function(x) sprintf("`%s`", x),
inherit = TRUE
)
if (allow_na) {
what <- c(what, cli$format_code("NA"))
}
if (allow_null) {
what <- c(what, cli$format_code("NULL"))
}
if (length(what)) {
what <- oxford_comma(what)
}
message <- sprintf(
"%s must be %s, not %s.",
cli$format_arg(arg),
what,
obj_type_friendly(x, value = show_value)
)
abort(message, ..., call = call, arg = arg)
}
oxford_comma <- function(chr, sep = ", ", final = "or") {
n <- length(chr)
if (n < 2) {
return(chr)
}
head <- chr[seq_len(n - 1)]
last <- chr[n]
head <- paste(head, collapse = sep)
# Write a or b. But a, b, or c.
if (n > 2) {
paste0(head, sep, final, " ", last)
} else {
paste0(head, " ", final, " ", last)
}
}
# nocov end
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/import-standalone-obj-type.R
|
# Standalone file: do not edit by hand
# Source: <https://github.com/r-lib/rlang/blob/main/R/standalone-types-check.R>
# ----------------------------------------------------------------------
#
# ---
# repo: r-lib/rlang
# file: standalone-types-check.R
# last-updated: 2023-03-13
# license: https://unlicense.org
# dependencies: standalone-obj-type.R
# imports: rlang (>= 1.1.0)
# ---
#
# ## Changelog
#
# 2023-03-13:
# - Improved error messages of number checkers (@teunbrand)
# - Added `allow_infinite` argument to `check_number_whole()` (@mgirlich).
# - Added `check_data_frame()` (@mgirlich).
#
# 2023-03-07:
# - Added dependency on rlang (>= 1.1.0).
#
# 2023-02-15:
# - Added `check_logical()`.
#
# - `check_bool()`, `check_number_whole()`, and
# `check_number_decimal()` are now implemented in C.
#
# - For efficiency, `check_number_whole()` and
# `check_number_decimal()` now take a `NULL` default for `min` and
# `max`. This makes it possible to bypass unnecessary type-checking
# and comparisons in the default case of no bounds checks.
#
# 2022-10-07:
# - `check_number_whole()` and `_decimal()` no longer treat
# non-numeric types such as factors or dates as numbers. Numeric
# types are detected with `is.numeric()`.
#
# 2022-10-04:
# - Added `check_name()` that forbids the empty string.
# `check_string()` allows the empty string by default.
#
# 2022-09-28:
# - Removed `what` arguments.
# - Added `allow_na` and `allow_null` arguments.
# - Added `allow_decimal` and `allow_infinite` arguments.
# - Improved errors with absent arguments.
#
#
# 2022-09-16:
# - Unprefixed usage of rlang functions with `rlang::` to
# avoid onLoad issues when called from rlang (#1482).
#
# 2022-08-11:
# - Added changelog.
#
# nocov start
# Scalars -----------------------------------------------------------------
.standalone_types_check_dot_call <- .Call
check_bool <- function(x,
...,
allow_na = FALSE,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x) && .standalone_types_check_dot_call(ffi_standalone_is_bool_1.0.7, x, allow_na, allow_null)) {
return(invisible(NULL))
}
stop_input_type(
x,
c("`TRUE`", "`FALSE`"),
...,
allow_na = allow_na,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_string <- function(x,
...,
allow_empty = TRUE,
allow_na = FALSE,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
is_string <- .rlang_check_is_string(
x,
allow_empty = allow_empty,
allow_na = allow_na,
allow_null = allow_null
)
if (is_string) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a single string",
...,
allow_na = allow_na,
allow_null = allow_null,
arg = arg,
call = call
)
}
.rlang_check_is_string <- function(x,
allow_empty,
allow_na,
allow_null) {
if (is_string(x)) {
if (allow_empty || !is_string(x, "")) {
return(TRUE)
}
}
if (allow_null && is_null(x)) {
return(TRUE)
}
if (allow_na && (identical(x, NA) || identical(x, na_chr))) {
return(TRUE)
}
FALSE
}
check_name <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
is_string <- .rlang_check_is_string(
x,
allow_empty = FALSE,
allow_na = FALSE,
allow_null = allow_null
)
if (is_string) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a valid name",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
IS_NUMBER_true <- 0
IS_NUMBER_false <- 1
IS_NUMBER_oob <- 2
check_number_decimal <- function(x,
...,
min = NULL,
max = NULL,
allow_infinite = TRUE,
allow_na = FALSE,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (missing(x)) {
exit_code <- IS_NUMBER_false
} else if (0 == (exit_code <- .standalone_types_check_dot_call(
ffi_standalone_check_number_1.0.7,
x,
allow_decimal = TRUE,
min,
max,
allow_infinite,
allow_na,
allow_null
))) {
return(invisible(NULL))
}
.stop_not_number(
x,
...,
exit_code = exit_code,
allow_decimal = TRUE,
min = min,
max = max,
allow_na = allow_na,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_number_whole <- function(x,
...,
min = NULL,
max = NULL,
allow_infinite = FALSE,
allow_na = FALSE,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (missing(x)) {
exit_code <- IS_NUMBER_false
} else if (0 == (exit_code <- .standalone_types_check_dot_call(
ffi_standalone_check_number_1.0.7,
x,
allow_decimal = FALSE,
min,
max,
allow_infinite,
allow_na,
allow_null
))) {
return(invisible(NULL))
}
.stop_not_number(
x,
...,
exit_code = exit_code,
allow_decimal = FALSE,
min = min,
max = max,
allow_na = allow_na,
allow_null = allow_null,
arg = arg,
call = call
)
}
.stop_not_number <- function(x,
...,
exit_code,
allow_decimal,
min,
max,
allow_na,
allow_null,
arg,
call) {
if (allow_decimal) {
what <- "a number"
} else {
what <- "a whole number"
}
if (exit_code == IS_NUMBER_oob) {
min <- min %||% -Inf
max <- max %||% Inf
if (min > -Inf && max < Inf) {
what <- sprintf("%s between %s and %s", what, min, max)
} else if (x < min) {
what <- sprintf("%s larger than or equal to %s", what, min)
} else if (x > max) {
what <- sprintf("%s smaller than or equal to %s", what, max)
} else {
abort("Unexpected state in OOB check", .internal = TRUE)
}
}
stop_input_type(
x,
what,
...,
allow_na = allow_na,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_symbol <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_symbol(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a symbol",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_arg <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_symbol(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"an argument name",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_call <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_call(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a defused call",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_environment <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_environment(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"an environment",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_function <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_function(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a function",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_closure <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_closure(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"an R function",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_formula <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_formula(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a formula",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
# Vectors -----------------------------------------------------------------
check_character <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_character(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a character vector",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_logical <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is_logical(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a logical vector",
...,
allow_na = FALSE,
allow_null = allow_null,
arg = arg,
call = call
)
}
check_data_frame <- function(x,
...,
allow_null = FALSE,
arg = caller_arg(x),
call = caller_env()) {
if (!missing(x)) {
if (is.data.frame(x)) {
return(invisible(NULL))
}
if (allow_null && is_null(x)) {
return(invisible(NULL))
}
}
stop_input_type(
x,
"a data frame",
...,
allow_null = allow_null,
arg = arg,
call = call
)
}
# nocov end
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/import-standalone-types-check.R
|
#' Helpers for GitHub issues
#'
#' @description
#' The `issue_*` family of functions allows you to perform common operations on
#' GitHub issues from within R. They're designed to help you efficiently deal
#' with large numbers of issues, particularly motivated by the challenges faced
#' by the tidyverse team.
#'
#' * `issue_close_community()` closes an issue, because it's not a bug report or
#' feature request, and points the author towards RStudio Community as a
#' better place to discuss usage (<https://community.rstudio.com>).
#'
#' * `issue_reprex_needed()` labels the issue with the "reprex" label and
#' gives the author some advice about what is needed.
#'
#' @section Saved replies:
#'
#' Unlike GitHub's "saved replies", these functions can:
#' * Be shared between people
#' * Perform other actions, like labelling, or closing
#' * Have additional arguments
#' * Include randomness (like friendly gifs)
#'
#' @param number Issue number
#' @param reprex Does the issue also need a reprex?
#'
#' @examples
#' \dontrun{
#' issue_close_community(12, reprex = TRUE)
#'
#' issue_reprex_needed(241)
#' }
#' @name issue-this
NULL
#' @export
#' @rdname issue-this
issue_close_community <- function(number, reprex = FALSE) {
tr <- target_repo(github_get = TRUE)
if (!tr$can_push) {
# https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#repository-access-for-each-permission-level
# I have not found a way to detect triage permission via API.
# It seems you just have to try?
ui_line("
You don't seem to have push access for {ui_value(tr$repo_spec)}.
Unless you have triage permissions, you won't be allowed to close an \\
issue.")
if (ui_nope("Do you want to try anyway?")) {
ui_oops("Cancelling.")
return(invisible())
}
}
info <- issue_info(number, tr)
issue <- issue_details(info)
ui_done("
Closing issue {ui_value(issue$shorthand)} \\
({ui_field(issue$author)}): {ui_value(issue$title)}")
if (info$state == "closed") {
ui_stop("Issue {number} is already closed")
}
reprex_insert <- glue("
But before you ask there, I'd suggest that you create a \\
[reprex](https://reprex.tidyverse.org/articles/reprex-dos-and-donts.htm), \\
because that greatly increases your chances getting help.")
message <- glue(
"Hi {issue$author},\n",
"\n",
"This issue doesn't appear to be a bug report or a specific feature ",
"request, so it's more suitable for ",
"[RStudio Community](https://community.rstudio.com). ",
if (reprex) reprex_insert else "",
"\n\n",
"Thanks!"
)
issue_comment_add(number, message = message, tr = tr)
issue_edit(number, state = "closed", tr = tr)
}
#' @export
#' @rdname issue-this
issue_reprex_needed <- function(number) {
tr <- target_repo(github_get = TRUE)
if (!tr$can_push) {
# https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#repository-access-for-each-permission-level
# I can't find anyway to detect triage permission via API.
# It seems you just have to try?
ui_line("
You don't seem to have push access for {ui_value(tr$repo_spec)}.
Unless you have triage permissions, you won't be allowed to label an \\
issue.")
if (ui_nope("Do you want to try anyway?")) {
ui_oops("Cancelling.")
return(invisible())
}
}
info <- issue_info(number, tr)
labels <- map_chr(info$labels, "name")
issue <- issue_details(info)
if ("reprex" %in% labels) {
ui_stop("Issue {number} already has 'reprex' label")
}
ui_done("
Labelling and commenting on issue {ui_value(issue$shorthand)} \\
({ui_field(issue$author)}): {ui_value(issue$title)}")
message <- glue("
Can you please provide a minimal reproducible example using the \\
[reprex](http://reprex.tidyverse.org) package?
The goal of a reprex is to make it as easy as possible for me to \\
recreate your problem so that I can fix it.
If you've never made a minimal reprex before, there is lots of good advice \\
[here](https://reprex.tidyverse.org/articles/reprex-dos-and-donts.html).")
issue_comment_add(number, message = message, tr = tr)
issue_edit(number, labels = as.list(union(labels, "reprex")), tr = tr)
}
# low-level operations ----------------------------------------------------
issue_comment_add <- function(number, message, tr = NULL) {
issue_gh(
"POST /repos/{owner}/{repo}/issues/{issue_number}/comments",
number = number,
body = message,
tr = tr
)
}
issue_edit <- function(number, ..., tr = NULL) {
issue_gh(
"PATCH /repos/{owner}/{repo}/issues/{issue_number}",
...,
number = number,
tr = tr
)
}
issue_info <- function(number, tr = NULL) {
issue_gh(
"GET /repos/{owner}/{repo}/issues/{issue_number}",
number = number,
tr = tr
)
}
# Helpers -----------------------------------------------------------------
# Assumptions:
# * Issue number is called `issue_number`; make sure to tweak `endpoint` if
# necessary.
# * The user-facing caller should pass information about the target repo,
# because that is required to vet the GitHub remote config anyway.
# The fallback to target_repo() is purely for development convenience.
issue_gh <- function(endpoint, ..., number, tr = NULL) {
tr <- tr %||% target_repo(github_get = NA)
gh <- gh_tr(tr)
out <- gh(endpoint, ..., issue_number = number)
if (substr(endpoint, 1, 4) == "GET ") {
out
} else {
invisible(out)
}
}
issue_details <- function(info) {
repo_dat <- parse_github_remotes(info$html_url)
list(
shorthand = glue(
"{repo_dat$repo_owner}/{repo_dat$repo_name}/#{info$number}"
),
author = glue("@{info$user$login}"),
title = info$title
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/issue.R
|
#' Create Jenkinsfile for Jenkins CI Pipelines
#'
#' `use_jenkins()` adds a basic Jenkinsfile for R packages to the project root
#' directory. The Jenkinsfile stages take advantage of calls to `make`, and so
#' calling this function will also run `use_make()` if a Makefile does not
#' already exist at the project root.
#'
#' @seealso The [documentation on Jenkins
#' Pipelines](https://www.jenkins.io/doc/book/pipeline/jenkinsfile/).
#' @seealso [use_make()]
#' @export
use_jenkins <- function() {
use_make()
use_template(
"Jenkinsfile",
data = list(name = project_name())
)
use_build_ignore("Jenkinsfile")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/jenkins.R
|
#' Use "latest" versions of all dependencies
#'
#' Pins minimum versions of all `Imports` and `Depends` dependencies to latest
#' ones (as determined by `source`). Useful for the tidyverse package, but
#' should otherwise be used with extreme care.
#'
#' @keywords internal
#' @export
#' @param overwrite By default (`TRUE`), all dependencies will be modified.
#' Set to `FALSE` to only modify dependencies without version
#' specifications.
#' @param source Use "CRAN" or "local" package versions.
use_latest_dependencies <- function(overwrite = TRUE, source = c("CRAN", "local")) {
source <- arg_match(source)
desc <- proj_desc()
updated <- update_versions(
desc$get_deps(),
overwrite = overwrite,
source = source
)
desc$set_deps(updated)
desc$write()
invisible(TRUE)
}
update_versions <- function(deps, overwrite = TRUE, source = c("CRAN", "local")) {
baserec <- base_and_recommended()
to_change <- !deps$package %in% c("R", baserec) & deps$type != "Suggests"
if (!overwrite) {
to_change <- to_change & deps$version == "*"
}
packages <- deps$package[to_change]
versions <- switch(match.arg(source),
local = map_chr(packages, ~ as.character(utils::packageVersion(.x))),
CRAN = utils::available.packages()[packages, "Version"]
)
deps$version[to_change] <- paste0(">= ", versions)
deps
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/latest-dependencies.R
|
#' License a package
#'
#' @description
#' Adds the necessary infrastructure to declare your package as licensed
#' with one of these popular open source licenses:
#'
#' Permissive:
#' * [MIT](https://choosealicense.com/licenses/mit/): simple and permissive.
#' * [Apache 2.0](https://choosealicense.com/licenses/apache-2.0/): MIT +
#' provides patent protection.
#'
#' Copyleft:
#' * [GPL v2](https://choosealicense.com/licenses/gpl-2.0/): requires sharing
#' of improvements.
#' * [GPL v3](https://choosealicense.com/licenses/gpl-3.0/): requires sharing
#' of improvements.
#' * [AGPL v3](https://choosealicense.com/licenses/agpl-3.0/): requires sharing
#' of improvements.
#' * [LGPL v2.1](https://choosealicense.com/licenses/lgpl-2.1/): requires sharing
#' of improvements.
#' * [LGPL v3](https://choosealicense.com/licenses/lgpl-3.0/): requires sharing
#' of improvements.
#'
#' Creative commons licenses appropriate for data packages:
#' * [CC0](https://creativecommons.org/publicdomain/zero/1.0/): dedicated
#' to public domain.
#' * [CC-BY](https://creativecommons.org/licenses/by/4.0/): Free to share and
#' adapt, must give appropriate credit.
#'
#' See <https://choosealicense.com> for more details and other options.
#'
#' Alternatively, for code that you don't want to share with others,
#' `use_proprietary_license()` makes it clear that all rights are reserved,
#' and the code is not open source.
#'
#' @details
#' CRAN does not permit you to include copies of standard licenses in your
#' package, so these functions save the license as `LICENSE.md` and add it
#' to `.Rbuildignore`.
#'
#' @name licenses
#' @param copyright_holder Name of the copyright holder or holders. This
#' defaults to `"{package name} authors"`; you should only change this if you
#' use a CLA to assign copyright to a single entity.
#' @param version License version. This defaults to latest version all licenses.
#' @param include_future If `TRUE`, will license your package under the current
#' and any potential future versions of the license. This is generally
#' considered to be good practice because it means your package will
#' automatically include "bug" fixes in licenses.
#' @seealso For more details, refer to the the
#' [license chapter](https://r-pkgs.org/license.html) in _R Packages_.
#' @aliases NULL
NULL
#' @rdname licenses
#' @export
use_mit_license <- function(copyright_holder = NULL) {
data <- list(
year = format(Sys.Date(), "%Y"),
copyright_holder = copyright_holder %||% glue("{project_name()} authors")
)
if (is_package()) {
proj_desc_field_update("License", "MIT + file LICENSE", overwrite = TRUE)
use_template("year-copyright.txt", save_as = "LICENSE", data = data)
}
use_license_template("mit", data)
}
#' @rdname licenses
#' @export
use_gpl_license <- function(version = 3, include_future = TRUE) {
version <- check_license_version(version, 2:3)
if (is_package()) {
abbr <- license_abbr("GPL", version, include_future)
proj_desc_field_update("License", abbr, overwrite = TRUE)
}
use_license_template(glue("GPL-{version}"))
}
#' @rdname licenses
#' @export
use_agpl_license <- function(version = 3, include_future = TRUE) {
version <- check_license_version(version, 3)
if (is_package()) {
abbr <- license_abbr("AGPL", version, include_future)
proj_desc_field_update("License", abbr, overwrite = TRUE)
}
use_license_template(glue("AGPL-{version}"))
}
#' @rdname licenses
#' @export
use_lgpl_license <- function(version = 3, include_future = TRUE) {
version <- check_license_version(version, c(2.1, 3))
if (is_package()) {
abbr <- license_abbr("LGPL", version, include_future)
proj_desc_field_update("License", abbr, overwrite = TRUE)
}
use_license_template(glue("LGPL-{version}"))
}
#' @rdname licenses
#' @export
use_apache_license <- function(version = 2, include_future = TRUE) {
version <- check_license_version(version, 2)
if (is_package()) {
abbr <- license_abbr("Apache License", version, include_future)
proj_desc_field_update("License", abbr, overwrite = TRUE)
}
use_license_template(glue("apache-{version}"))
}
#' @rdname licenses
#' @export
use_cc0_license <- function() {
if (is_package()) {
proj_desc_field_update("License", "CC0", overwrite = TRUE)
}
use_license_template("cc0")
}
#' @rdname licenses
#' @export
use_ccby_license <- function() {
if (is_package()) {
proj_desc_field_update("License", "CC BY 4.0", overwrite = TRUE)
}
use_license_template("ccby-4")
}
#' @rdname licenses
#' @export
use_proprietary_license <- function(copyright_holder) {
data <- list(
year = year(),
copyright_holder = copyright_holder
)
if (is_package()) {
proj_desc_field_update("License", "file LICENSE", overwrite = TRUE)
}
use_template("license-proprietary.txt", save_as = "LICENSE", data = data)
}
# Fallbacks ---------------------------------------------------------------
#' @rdname licenses
#' @export
#' @usage NULL
use_gpl3_license <- function() {
use_gpl_license(3)
}
#' @rdname licenses
#' @export
#' @usage NULL
use_agpl3_license <- function() {
use_agpl_license(3)
}
#' @rdname licenses
#' @export
#' @usage NULL
use_apl2_license <- function() {
use_apache_license(2)
}
# Helpers -----------------------------------------------------------------
use_license_template <- function(license, data = list()) {
license_template <- glue("license-{license}.md")
use_template(license_template,
save_as = "LICENSE.md",
data = data,
ignore = TRUE
)
}
check_license_version <- function(version, possible) {
version <- as.double(version)
if (!version %in% possible) {
possible <- glue_collapse(possible, sep = ", ", last = ", or ")
ui_stop("`version` must be {possible}")
}
version
}
license_abbr <- function(name, version, include_future) {
if (include_future) {
glue_chr("{name} (>= {version})")
} else {
if (name %in% c("GPL", "LGPL", "AGPL")) {
# Standard abbreviations listed at
# https://cran.rstudio.com/doc/manuals/r-devel/R-exts.html#Licensing
glue_chr("{name}-{version}")
} else {
glue_chr("{name} (== {version})")
}
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/license.R
|
#' Use lifecycle badges
#'
#' @description
#' This helper:
#'
#' * Adds lifecycle as a dependency.
#' * Imports [lifecycle::deprecated()] for use in function arguments.
#' * Copies the lifecycle badges into `man/figures`.
#' * Reminds you how to use the badge syntax.
#'
#' Learn more at <https://lifecycle.r-lib.org/articles/communicate.html>
#'
#' @seealso [use_lifecycle_badge()] to signal the
#' [lifecycle stage](https://lifecycle.r-lib.org/articles/stages.html) of
#' your package as whole
#' @export
use_lifecycle <- function() {
check_is_package("use_lifecycle()")
check_uses_roxygen("use_lifecycle()")
if (!uses_roxygen_md()) {
ui_stop("
Turn on roxygen2 markdown support {ui_code('use_roxygen_md()')}")
}
use_package("lifecycle")
use_import_from("lifecycle", "deprecated")
dest_dir <- proj_path("man", "figures")
create_directory(dest_dir)
templ_dir <- path_package("usethis", "templates")
templ_files <- dir_ls(templ_dir, glob = "*/lifecycle-*.svg")
purrr::walk(templ_files, file_copy, dest_dir, overwrite = TRUE)
ui_done("Copied SVG badges to {ui_path(dest_dir)}")
ui_todo(c(
"Add badges in documentation topics by inserting one of:",
"#' `r lifecycle::badge('experimental')`",
"#' `r lifecycle::badge('superseded')`",
"#' `r lifecycle::badge('deprecated')`"
))
invisible(TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/lifecycle.R
|
proj_line_ending <- function() {
# First look in .Rproj file
proj_path <- proj_path(paste0(project_name(), ".Rproj"))
if (file_exists(proj_path)) {
config <- read_utf8(proj_path)
if (any(grepl("^LineEndingConversion: Posix", config))) {
return("\n")
} else if (any(grepl("^LineEndingConversion: Windows", config))) {
return("\r\n")
}
}
# Then try DESCRIPTION
desc_path <- proj_path("DESCRIPTION")
if (file_exists(desc_path)) {
return(detect_line_ending(desc_path))
}
# Then try any .R file
r_path <- proj_path("R")
if (dir_exists(r_path)) {
r_files <- dir_ls(r_path, regexp = "[.][rR]$")
if (length(r_files) > 0) {
return(detect_line_ending(r_files[[1]]))
}
}
# Then give up - this is used (for example), when writing the
# first file into the package
platform_line_ending()
}
platform_line_ending <- function() {
if (.Platform$OS.type == "windows") "\r\n" else "\n"
}
detect_line_ending <- function(path) {
samp <- suppressWarnings(readChar(path, nchars = 500))
if (isTRUE(grepl("\r\n", samp))) "\r\n" else "\n"
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/line-ending.R
|
#' Use a package logo
#'
#' This function helps you use a logo in your package:
#' * Enforces a specific size
#' * Stores logo image file at `man/figures/logo.png`
#' * Produces the markdown text you need in README to include the logo
#'
#' @param img The path to an existing image file
#' @param geometry a [magick::geometry] string specifying size. The default
#' assumes that you have a hex logo using spec from
#' <http://hexb.in/sticker.html>.
#' @param retina `TRUE`, the default, scales the image on the README,
#' assuming that geometry is double the desired size.
#'
#' @examples
#' \dontrun{
#' use_logo("usethis.png")
#' }
#' @export
use_logo <- function(img, geometry = "240x278", retina = TRUE) {
check_is_package("use_logo()")
logo_path <- proj_path("man", "figures", "logo", ext = path_ext(img))
create_directory(path_dir(logo_path))
if (!can_overwrite(logo_path)) {
return(invisible(FALSE))
}
if (path_ext(img) == "svg") {
logo_path <- path("man", "figures", "logo.svg")
file_copy(img, proj_path(logo_path), overwrite = TRUE)
ui_done("Copied {ui_path(img)} to {ui_path(logo_path)}")
height <- as.integer(sub(".*x", "", geometry))
} else {
check_installed("magick")
img_data <- magick::image_read(img)
img_data <- magick::image_resize(img_data, geometry)
magick::image_write(img_data, logo_path)
ui_done("Resized {ui_path(img)} to {geometry}")
height <- magick::image_info(magick::image_read(logo_path))$height
}
pkg <- project_name()
if (retina) {
height <- round(height / 2)
}
ui_todo("Add logo to your README with the following html:")
pd_link <- pkgdown_url(pedantic = TRUE)
if (is.null(pd_link)) {
ui_code_block("# {pkg} <img src=\"{proj_rel_path(logo_path)}\" align=\"right\" height=\"{height}\" alt=\"\" />")
} else {
ui_code_block("# {pkg} <a href=\"{pd_link}\"><img src=\"{proj_rel_path(logo_path)}\" align=\"right\" height=\"{height}\" alt=\"{pkg} website\" /></a>")
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/logo.R
|
#' Create Makefile
#'
#' `use_make()` adds a basic Makefile to the project root directory.
#'
#' @seealso The [documentation for GNU
#' Make](https://www.gnu.org/software/make/manual/html_node/).
#' @export
use_make <- function() {
use_template(
"Makefile",
data = list(name = project_name())
)
use_build_ignore("Makefile")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/make.R
|
#' Use a basic `NAMESPACE`
#'
#' If `roxygen` is `TRUE` generates an empty `NAMESPACE` that exports nothing;
#' you'll need to explicitly export functions with `@export`. If `roxygen`
#' is `FALSE`, generates a default `NAMESPACE` that exports all functions
#' except those that start with `.`.
#'
#' @param roxygen Do you plan to manage `NAMESPACE` with roxygen2?
#' @seealso The [namespace
#' chapter](https://r-pkgs.org/dependencies-mindset-background.html#sec-dependencies-namespace)
#' of [R Packages](https://r-pkgs.org).
#' @export
use_namespace <- function(roxygen = TRUE) {
check_is_package("use_namespace()")
path <- proj_path("NAMESPACE")
if (roxygen) {
write_over(path, c("# Generated by roxygen2: do not edit by hand", ""))
} else {
write_over(path, 'exportPattern("^[^\\\\.]")')
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/namespace.R
|
#' Create a simple `NEWS.md`
#'
#' This creates a basic `NEWS.md` in the root directory.
#'
#' @inheritParams use_template
#' @seealso The [other markdown files
#' section](https://r-pkgs.org/other-markdown.html) of [R
#' Packages](https://r-pkgs.org).
#' @export
use_news_md <- function(open = rlang::is_interactive()) {
check_is_package("use_news_md()")
version <- if (is_dev_version()) "(development version)" else proj_version()
on_cran <- !is.null(cran_version())
if (on_cran) {
init_bullet <- "Added a `NEWS.md` file to track changes to the package."
} else {
init_bullet <- "Initial CRAN submission."
}
use_template(
"NEWS.md",
data = list(
Package = project_name(),
Version = version,
InitialBullet = init_bullet
),
open = open
)
git_ask_commit("Add NEWS.md", untracked = TRUE, paths = "NEWS.md")
}
use_news_heading <- function(version) {
news_path <- proj_path("NEWS.md")
if (!file_exists(news_path)) {
return(invisible())
}
news <- read_utf8(news_path)
title <- glue("# {project_name()} {version}")
if (title == news[[1]]) {
return(invisible())
}
development_title <- glue("# {project_name()} (development version)")
if (development_title == news[[1]]) {
news[[1]] <- title
ui_done("Replacing development heading in NEWS.md")
return(write_utf8(news_path, news))
}
ui_done("Adding new heading to NEWS.md")
write_utf8(news_path, c(title, "", news))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/news.R
|
#' Depend on another package
#'
#' @description
#' `use_package()` adds a CRAN package dependency to `DESCRIPTION` and offers a
#' little advice about how to best use it. `use_dev_package()` adds a dependency
#' on an in-development package, adding the dev repo to `Remotes` so it will be
#' automatically installed from the correct location. There is no helper to
#' remove a dependency: to do that, simply remove that package from your
#' `DESCRIPTION` file.
#'
#' `use_package()` exists to support a couple of common maneuvers:
#' * Add a dependency to `Imports` or `Suggests` or `LinkingTo`.
#' * Add a minimum version to a dependency.
#' * Specify the minimum supported version for R.
#'
#' `use_package()` probably works for slightly more exotic modifications, but at
#' some point, you should edit `DESCRIPTION` yourself by hand. There is no
#' intention to account for all possible edge cases.
#'
#' @param package Name of package to depend on.
#' @param type Type of dependency: must be one of "Imports", "Depends",
#' "Suggests", "Enhances", or "LinkingTo" (or unique abbreviation). Matching
#' is case insensitive.
#' @param min_version Optionally, supply a minimum version for the package. Set
#' to `TRUE` to use the currently installed version.
#' @param remote By default, an `OWNER/REPO` GitHub remote is inserted.
#' Optionally, you can supply a character string to specify the remote, e.g.
#' `"gitlab::jimhester/covr"`, using any syntax supported by the [remotes
#' package](
#' https://remotes.r-lib.org/articles/dependencies.html#other-sources).
#'
#' @seealso The [dependencies section](https://r-pkgs.org/dependencies-mindset-background.html) of
#' [R Packages](https://r-pkgs.org).
#'
#' @export
#' @examples
#' \dontrun{
#' use_package("ggplot2")
#' use_package("dplyr", "suggests")
#' use_dev_package("glue")
#'
#' # Depend on R version 4.1
#' use_package("R", type = "Depends", min_version = "4.1")
#' }
use_package <- function(package, type = "Imports", min_version = NULL) {
if (type == "Imports") {
refuse_package(package, verboten = c("tidyverse", "tidymodels"))
}
changed <- use_dependency(package, type, min_version = min_version)
if (changed) {
how_to_use(package, type)
}
invisible()
}
#' @export
#' @rdname use_package
use_dev_package <- function(package, type = "Imports", remote = NULL) {
refuse_package(package, verboten = c("tidyverse", "tidymodels"))
changed <- use_dependency(package, type = type, min_version = TRUE)
use_remote(package, remote)
if (changed) {
how_to_use(package, type)
}
invisible()
}
use_remote <- function(package, package_remote = NULL) {
desc <- proj_desc()
remotes <- desc$get_remotes()
if (any(grepl(package, remotes))) {
return(invisible())
}
if (is.null(package_remote)) {
package_desc <- desc::desc(package = package)
package_remote <- package_remote(package_desc)
}
ui_done("
Adding {ui_value(package_remote)} to {ui_field('Remotes')} field in \\
DESCRIPTION")
remotes <- c(remotes, package_remote)
desc$set_remotes(remotes)
desc$write()
invisible()
}
# Helpers -----------------------------------------------------------------
package_remote <- function(desc) {
remote <- as.list(desc$get(c("RemoteType", "RemoteUsername", "RemoteRepo")))
is_recognized_remote <- all(map_lgl(remote, ~ is_string(.x) && !is.na(.x)))
if (is_recognized_remote) {
# non-GitHub remotes get a 'RemoteType::' prefix
if (!identical(remote$RemoteType, "github")) {
remote$RemoteUsername <- paste0(remote$RemoteType, "::", remote$RemoteUsername)
}
return(paste0(remote$RemoteUsername, "/", remote$RemoteRepo))
}
package <- desc$get_field("Package")
urls <- desc_urls(package, desc = desc)
urls <- urls[urls$is_github, ]
if (nrow(urls) < 1) {
ui_stop("Cannot determine remote for {ui_value(package)}")
}
parsed <- parse_github_remotes(urls$url[[1]])
remote <- paste0(parsed$repo_owner, "/", parsed$repo_name)
if (ui_yeah("
{ui_value(package)} was either installed from CRAN or local source.
Based on DESCRIPTION, we propose the remote: {ui_value(remote)}
Is this OK?")) {
remote
} else {
ui_stop("Cannot determine remote for {ui_value(package)}")
}
}
refuse_package <- function(package, verboten) {
if (package %in% verboten) {
code <- glue("use_package(\"{package}\", type = \"depends\")")
ui_stop(
"{ui_value(package)} is a meta-package and it is rarely a good idea to \\
depend on it. Please determine the specific underlying package(s) that \\
offer the function(s) you need and depend on that instead. \\
For data analysis projects that use a package structure but do not implement \\
a formal R package, adding {ui_value(package)} to Depends is a \\
reasonable compromise. Call {ui_code(code)} to achieve this.
"
)
}
invisible(package)
}
how_to_use <- function(package, type) {
types <- tolower(c("Imports", "Depends", "Suggests", "Enhances", "LinkingTo"))
type <- match.arg(tolower(type), types)
if (package == "R" && type == "depends") {
return("")
}
switch(type,
imports = ui_todo("Refer to functions with {ui_code(paste0(package, '::fun()'))}"),
depends = ui_todo(
"Are you sure you want {ui_field('Depends')}? \\
{ui_field('Imports')} is almost always the better choice."
),
suggests = suggests_usage_hint(package),
enhances = "",
linkingto = show_includes(package)
)
}
suggests_usage_hint <- function(package) {
imports_rlang <- proj_desc()$has_dep("rlang", type = "Imports")
if (imports_rlang) {
code1 <- glue('rlang::is_installed("{package}")')
code2 <- glue('rlang::check_installed("{package}")')
ui_todo("
In your package code, use {ui_code(code1)} or {ui_code(code2)} to test \\
if {package} is installed")
code <- glue("{package}::fun()")
ui_todo("Then directly refer to functions with {ui_code(code)}")
} else {
code <- glue("requireNamespace(\"{package}\", quietly = TRUE)")
ui_todo("Use {ui_code(code)} to test if package is installed")
code <- glue("{package}::fun()")
ui_todo("Then directly refer to functions with {ui_code(code)}")
}
}
show_includes <- function(package) {
incl <- path_package("include", package = package)
h <- dir_ls(incl, regexp = "[.](h|hpp)$")
if (length(h) == 0) {
return()
}
ui_todo("Possible includes are:")
ui_code_block("#include <{path_file(h)}>", copy = FALSE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/package.R
|
#' Use magrittr's pipe in your package
#'
#' Does setup necessary to use magrittr's pipe operator, `%>%` in your package.
#' This function requires the use roxygen.
#' * Adds magrittr to "Imports" in `DESCRIPTION`.
#' * Imports the pipe operator specifically, which is necessary for internal
#' use.
#' * Exports the pipe operator, if `export = TRUE`, which is necessary to make
#' `%>%` available to the users of your package.
#'
#' @param export If `TRUE`, the file `R/utils-pipe.R` is added, which provides
#' the roxygen template to import and re-export `%>%`. If `FALSE`, the necessary
#' roxygen directive is added, if possible, or otherwise instructions are given.
#'
#' @export
#'
#' @examples
#' \dontrun{
#' use_pipe()
#' }
use_pipe <- function(export = TRUE) {
check_is_package("use_pipe()")
check_uses_roxygen("use_pipe()")
if (export) {
use_dependency("magrittr", "Imports")
use_template("pipe.R", "R/utils-pipe.R") && roxygen_remind()
return(invisible(TRUE))
}
use_import_from("magrittr", "%>%")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/pipe.R
|
#' Use pkgdown
#'
#' @description
#' [pkgdown](https://pkgdown.r-lib.org) makes it easy to turn your package into
#' a beautiful website. usethis provides two functions to help you use pkgdown:
#'
#' * `use_pkgdown()`: creates a pkgdown config file and adds relevant files or
#' directories to `.Rbuildignore` and `.gitignore`.
#'
#' * `use_pkgdown_github_pages()`: implements the GitHub setup needed to
#' automatically publish your pkgdown site to GitHub pages:
#'
#' - (first, it calls `use_pkgdown()`)
#' - [use_github_pages()] prepares to publish the pkgdown site from the
#' `gh-pages` branch
#' - [`use_github_action("pkgdown")`][use_github_action()] configures a
#' GitHub Action to automatically build the pkgdown site and deploy it via
#' GitHub Pages
#' - The pkgdown site's URL is added to the pkgdown configuration file,
#' to the URL field of DESCRIPTION, and to the GitHub repo.
#' - Packages owned by certain GitHub organizations (tidyverse, r-lib, and
#' tidymodels) get some special treatment, in terms of anticipating the
#' (eventual) site URL and the use of a pkgdown template.
#'
#' @seealso <https://pkgdown.r-lib.org/articles/pkgdown.html#configuration>
#' @param config_file Path to the pkgdown yaml config file, relative to the
#' project.
#' @param destdir Target directory for pkgdown docs.
#' @export
use_pkgdown <- function(config_file = "_pkgdown.yml", destdir = "docs") {
check_is_package("use_pkgdown()")
check_installed("pkgdown")
use_build_ignore(c(config_file, destdir, "pkgdown"))
use_git_ignore(destdir)
config <- pkgdown_config(destdir)
config_path <- proj_path(config_file)
write_over(config_path, yaml::as.yaml(config))
edit_file(config_path)
invisible(TRUE)
}
pkgdown_config <- function(destdir) {
config <- list(
url = NULL
)
if (pkgdown_version() >= "1.9000") {
config$template <- list(bootstrap = 5L)
}
if (!identical(destdir, "docs")) {
config$destination <- destdir
}
config
}
# wrapping because I need to be able to mock this in tests
pkgdown_version <- function() {
utils::packageVersion("pkgdown")
}
#' @rdname use_pkgdown
#' @export
use_pkgdown_github_pages <- function() {
tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork"))
check_can_push(tr = tr, "to turn on GitHub Pages")
use_pkgdown()
site <- use_github_pages()
use_github_action("pkgdown")
site_url <- tidyverse_url(url = site$html_url, tr = tr)
use_pkgdown_url(url = site_url, tr = tr)
if (is_posit_pkg()) {
proj_desc_field_update("Config/Needs/website", "tidyverse/tidytemplate", append = TRUE)
}
}
# helpers ----------------------------------------------------------------------
use_pkgdown_url <- function(url, tr = NULL) {
tr <- tr %||% target_repo(github_get = TRUE)
config_path <- pkgdown_config_path()
ui_done("
Recording {ui_value(url)} as site's {ui_field('url')} in \\
{ui_path(config_path)}")
config <- pkgdown_config_meta()
if (has_name(config, "url")) {
config$url <- url
} else {
config <- c(url = url, config)
}
write_utf8(config_path, yaml::as.yaml(config))
proj_desc_field_update("URL", url, append = TRUE)
if (has_package_doc()) {
ui_todo("
Run {ui_code('devtools::document()')} to update package-level documentation.")
}
gh <- gh_tr(tr)
homepage <- gh("GET /repos/{owner}/{repo}")[["homepage"]]
if (is.null(homepage) || homepage != url) {
ui_done("Setting {ui_value(url)} as homepage of GitHub repo \\
{ui_value(tr$repo_spec)}")
gh("PATCH /repos/{owner}/{repo}", homepage = url)
}
invisible()
}
tidyverse_url <- function(url, tr = NULL) {
tr <- tr %||% target_repo(github_get = TRUE)
if (!is_interactive() ||
!tr$repo_owner %in% c("tidyverse", "r-lib", "tidymodels")) {
return(url)
}
custom_url <- glue("https://{tr$repo_name}.{tr$repo_owner}.org")
if (grepl(glue("{custom_url}/?"), url)) {
return(url)
}
if (ui_yeah("
{ui_value(tr$repo_name)} is owned by the {ui_value(tr$repo_owner)} GitHub \\
organization.
Shall we configure {ui_value(custom_url)} as the (eventual) \\
pkgdown URL?")) {
custom_url
} else {
url
}
}
pkgdown_config_path <- function() {
path_first_existing(
proj_path(
c(
"_pkgdown.yml",
"_pkgdown.yaml",
"pkgdown/_pkgdown.yml",
"pkgdown/_pkgdown.yaml",
"inst/_pkgdown.yml",
"inst/_pkgdown.yaml"
)
)
)
}
uses_pkgdown <- function() {
!is.null(pkgdown_config_path())
}
pkgdown_config_meta <- function() {
if (!uses_pkgdown()) {
return(list())
}
path <- pkgdown_config_path()
yaml::read_yaml(path) %||% list()
}
pkgdown_url <- function(pedantic = FALSE) {
if (!uses_pkgdown()) {
return(NULL)
}
meta <- pkgdown_config_meta()
url <- meta$url
if (!is.null(url)) {
return(url)
}
if (pedantic) {
ui_warn("
pkgdown config does not specify the site's {ui_field('url')}, \\
which is optional but recommended")
}
NULL
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/pkgdown.R
|
#' Helpers for GitHub pull requests
#'
#' @description
#' The `pr_*` family of functions is designed to make working with GitHub pull
#' requests (PRs) as painless as possible for both contributors and package
#' maintainers.
#'
#' To use the `pr_*` functions, your project must be a Git repo and have one of
#' these GitHub remote configurations:
#' * "ours": You can push to the GitHub remote configured as `origin` and it's
#' not a fork.
#' * "fork": You can push to the GitHub remote configured as `origin`, it's a
#' fork, and its parent is configured as `upstream`. `origin` points to your
#' **personal** copy and `upstream` points to the **source repo**.
#'
#' "Ours" and "fork" are two of several GitHub remote configurations examined in
#' [Common remote setups](https://happygitwithr.com/common-remote-setups.html)
#' in Happy Git and GitHub for the useR.
#'
#' The [Pull Request
#' Helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html)
#' article walks through the process of making a pull request with the `pr_*`
#' functions.
#'
#' The `pr_*` functions also use your Git/GitHub credentials to carry out
#' various remote operations; see below for more about auth. The `pr_*`
#' functions also proactively check for agreement re: the default branch in your
#' local repo and the source repo. See [git_default_branch()] for more.
#'
#' @template double-auth
#'
#' @section For contributors:
#' To contribute to a package, first use `create_from_github("OWNER/REPO")`.
#' This forks the source repository and checks out a local copy.
#'
#' Next use `pr_init()` to create a branch for your PR. It is best practice to
#' never make commits to the default branch branch of a fork (usually named
#' `main` or `master`), because you do not own it. A pull request should always
#' come from a feature branch. It will be much easier to pull upstream changes
#' from the fork parent if you only allow yourself to work in feature branches.
#' It is also much easier for a maintainer to explore and extend your PR if you
#' create a feature branch.
#'
#' Work locally, in your branch, making changes to files, and committing your
#' work. Once you're ready to create the PR, run `pr_push()` to push your local
#' branch to GitHub, and open a webpage that lets you initiate the PR (or draft
#' PR).
#'
#' To learn more about the process of making a pull request, read the [Pull
#' Request
#' Helpers](https://usethis.r-lib.org/articles/articles/pr-functions.html)
#' vignette.
#'
#' If you are lucky, your PR will be perfect, and the maintainer will accept it.
#' You can then run `pr_finish()` to delete your PR branch. In most cases,
#' however, the maintainer will ask you to make some changes. Make the changes,
#' then run `pr_push()` to update your PR.
#'
#' It's also possible that the maintainer will contribute some code to your PR:
#' to get those changes back onto your computer, run `pr_pull()`. It can also
#' happen that other changes have occurred in the package since you first
#' created your PR. You might need to merge the default branch (usually named
#' `main` or `master`) into your PR branch. Do that by running
#' `pr_merge_main()`: this makes sure that your PR is compatible with the
#' primary repo's main line of development. Both `pr_pull()` and
#' `pr_merge_main()` can result in merge conflicts, so be prepared to resolve
#' before continuing.
#'
#' @section For maintainers:
#' To download a PR locally so that you can experiment with it, run
#' `pr_fetch()` and select the PR or, if you already know its number, call
#' `pr_fetch(<pr_number>)`. If you make changes, run `pr_push()` to push them
#' back to GitHub. After you have merged the PR, run `pr_finish()` to delete the
#' local branch and remove the remote associated with the contributor's fork.
#'
#' @section Overview of all the functions:
#' * `pr_init()`: As a contributor, start work on a new PR by ensuring that
#' your local repo is up-to-date, then creating and checking out a new branch.
#' Nothing is pushed to or created on GitHub until you call `pr_push()`.
#' * `pr_fetch()`: As a maintainer, review or contribute changes to an existing
#' PR by creating a local branch that tracks the remote PR. `pr_fetch()` does as
#' little work as possible, so you can also use it to resume work on an PR that
#' already has a local branch (where it will also ensure your local branch is
#' up-to-date). If called with no arguments, up to 9 open PRs are offered for
#' interactive selection.
#' * `pr_resume()`: Resume work on a PR by switching to an existing local branch
#' and pulling any changes from its upstream tracking branch, if it has one. If
#' called with no arguments, up to 9 local branches are offered for interactive
#' selection, with a preference for branches connected to PRs and for branches
#' with recent activity.
#' * `pr_push()`: The first time it's called, a PR branch is pushed to GitHub
#' and you're taken to a webpage where a new PR (or draft PR) can be created.
#' This also sets up the local branch to track its remote counterpart.
#' Subsequent calls to `pr_push()` make sure the local branch has all the remote
#' changes and, if so, pushes local changes, thereby updating the PR.
#' * `pr_pull()`: Pulls changes from the local branch's remote tracking branch.
#' If a maintainer has extended your PR, this is how you bring those changes
#' back into your local work.
#' * `pr_merge_main()`: Pulls changes from the default branch of the source repo
#' into the current local branch. This can be used when the local branch is the
#' default branch or when it's a PR branch.
#' * `pr_pause()`: Makes sure you're up-to-date with any remote changes in the
#' PR. Then switches back to the default branch and pulls from the source repo.
#' Use `pr_resume()` with name of branch or use `pr_fetch()` to resume using PR
#' number.
#' * `pr_view()`: Visits the PR associated with the current branch in the
#' browser (default) or the specific PR identified by `number`.
#' (FYI [browse_github_pulls()] is a handy way to visit the list of all PRs for
#' the current project.)
#' * `pr_forget()`: Does local clean up when the current branch is an actual or
#' notional PR that you want to abandon. Maybe you initiated it yourself, via
#' `pr_init()`, or you used `pr_fetch()` to explore a PR from GitHub. Only does
#' *local* operations: does not update or delete any remote branches, nor does
#' it close any PRs. Alerts the user to any uncommitted or unpushed work that is
#' at risk of being lost. If user chooses to proceed, switches back to the
#' default branch, pulls changes from source repo, and deletes local PR branch.
#' Any associated Git remote is deleted, if the "forgotten" PR was the only
#' branch using it.
#' * `pr_finish()`: Does post-PR clean up, but does NOT actually merge or close
#' a PR (maintainer should do this in the browser). If `number` is not given,
#' infers the PR from the upstream tracking branch of the current branch. If
#' `number` is given, it does not matter whether the PR exists locally. If PR
#' exists locally, alerts the user to uncommitted or unpushed changes, then
#' switches back to the default branch, pulls changes from source repo, and
#' deletes local PR branch. If the PR came from an external fork, any associated
#' Git remote is deleted, provided it's not in use by any other local branches.
#' If the PR has been merged and user has permission, deletes the remote branch
#' (this is the only remote operation that `pr_finish()` potentially does).
#'
#' @name pull-requests
NULL
#' @export
#' @rdname pull-requests
#' @param branch Name of a new or existing local branch. If creating a new
#' branch, note this should usually consist of lower case letters, numbers,
#' and `-`.
pr_init <- function(branch) {
check_string(branch)
repo <- git_repo()
if (gert::git_branch_exists(branch, local = TRUE, repo = repo)) {
code <- glue("pr_resume(\"{branch}\")")
ui_info("
Branch {ui_value(branch)} already exists, calling {ui_code(code)}")
return(pr_resume(branch))
}
# don't absolutely require PAT success, because we could be offline
# or in another salvageable situation, e.g. need to configure PAT
cfg <- github_remote_config(github_get = NA)
check_for_bad_config(cfg)
tr <- target_repo(cfg, ask = FALSE)
maybe_good_configs <- c("maybe_ours_or_theirs", "maybe_fork")
if (cfg$type %in% maybe_good_configs) {
ui_line('
Unable to confirm the GitHub remote configuration is "pull request ready".
You probably need to configure a personal access token for \\
{ui_value(tr$host)}.
See {ui_code("gh_token_help()")} for help.
(Or maybe we\'re just offline?)')
if (ui_github_remote_config_wat(cfg)) {
ui_oops("Cancelling.")
return(invisible())
}
}
default_branch <- git_default_branch()
challenge_non_default_branch(
"Are you sure you want to create a PR branch based on a non-default branch?",
default_branch = default_branch
)
online <- is_online(tr$host)
if (online) {
# this is not pr_pull_source_override() because:
# a) we may NOT be on default branch (although we probably are)
# b) we didn't just switch to the branch we're on, therefore we have to
# consider that the pull may be affected by uncommitted changes or a
# merge
current_branch <- git_branch()
if (current_branch == default_branch) {
# override for mis-configured forks, that have default branch tracking
# the fork (origin) instead of the source (upstream)
remref <- glue("{tr$remote}/{default_branch}")
} else {
remref <- git_branch_tracking(current_branch)
}
if (!is.na(remref)) {
comparison <- git_branch_compare(current_branch, remref)
if (comparison$remote_only > 0) {
challenge_uncommitted_changes()
}
ui_done("Pulling changes from {ui_value(remref)}.")
git_pull(remref = remref, verbose = FALSE)
}
} else {
ui_info("
Unable to pull changes for current branch, since we are offline.")
}
ui_done("Creating and switching to local branch {ui_value(branch)}.")
gert::git_branch_create(branch, repo = repo)
config_key <- glue("branch.{branch}.created-by")
gert::git_config_set(config_key, value = "usethis::pr_init", repo = repo)
ui_todo("Use {ui_code('pr_push()')} to create a PR.")
invisible()
}
#' @export
#' @rdname pull-requests
pr_resume <- function(branch = NULL) {
repo <- git_repo()
if (is.null(branch)) {
ui_info("
No branch specified ... looking up local branches and associated PRs.")
default_branch <- git_default_branch()
branch <- choose_branch(exclude = default_branch)
if (is.null(branch)) {
ui_oops("Repo doesn't seem to have any non-default branches.")
return(invisible())
}
if (length(branch) == 0) {
ui_oops("No branch selected, exiting.")
return(invisible())
}
}
check_string(branch)
if (!gert::git_branch_exists(branch, local = TRUE, repo = repo)) {
code <- glue("pr_init(\"{branch}\")")
ui_stop("
No branch named {ui_value(branch)} exists.
Call {ui_code(code)} to create a new PR branch.")
}
challenge_uncommitted_changes()
ui_done("Switching to branch {ui_value(branch)}.")
gert::git_branch_checkout(branch, repo = repo)
git_pull()
ui_todo("Use {ui_code('pr_push()')} to create or update PR.")
invisible()
}
#' @export
#' @rdname pull-requests
#' @param number Number of PR.
#' @param target Which repo to target? This is only a question in the case of a
#' fork. In a fork, there is some slim chance that you want to consider pull
#' requests against your fork (the primary repo, i.e. `origin`) instead of
#' those against the source repo (i.e. `upstream`, which is the default).
#'
#' @examples
#' \dontrun{
#' pr_fetch(123)
#' }
pr_fetch <- function(number = NULL, target = c("source", "primary")) {
repo <- git_repo()
tr <- target_repo(github_get = NA, role = target, ask = FALSE)
challenge_uncommitted_changes()
if (is.null(number)) {
ui_info("No PR specified ... looking up open PRs.")
pr <- choose_pr(tr = tr)
if (is.null(pr)) {
ui_oops("No open PRs found for {ui_value(tr$repo_spec)}.")
return(invisible())
}
if (min(lengths(pr)) == 0) {
ui_oops("No PR selected, exiting.")
return(invisible())
}
} else {
pr <- pr_get(number = number, tr = tr)
}
if (is.na(pr$pr_repo_owner)) {
ui_stop("
The repo or branch where PR #{pr$pr_number} originates seems to have been \\
deleted.")
}
pr_user <- glue("@{pr$pr_user}")
ui_done("
Checking out PR {ui_value(pr$pr_string)} ({ui_field(pr_user)}): \\
{ui_value(pr$pr_title)}.")
if (pr$pr_from_fork && isFALSE(pr$maintainer_can_modify)) {
ui_info("
Note that user does NOT allow maintainer to modify this PR at this \\
time, although this can be changed.")
}
remote <- github_remote_list(pr$pr_remote)
if (nrow(remote) == 0) {
url <- switch(tr$protocol, https = pr$pr_https_url, ssh = pr$pr_ssh_url)
ui_done("Adding remote {ui_value(pr$pr_remote)} as {ui_value(url)}.")
gert::git_remote_add(url = url, name = pr$pr_remote, repo = repo)
config_key <- glue("remote.{pr$pr_remote}.created-by")
gert::git_config_set(config_key, "usethis::pr_fetch", repo = repo)
}
pr_remref <- glue_data(pr, "{pr_remote}/{pr_ref}")
gert::git_fetch(
remote = pr$pr_remote,
refspec = pr$pr_ref,
repo = repo,
verbose = FALSE
)
if (is.na(pr$pr_local_branch)) {
pr$pr_local_branch <-
if (pr$pr_from_fork) sub(":", "-", pr$pr_label) else pr$pr_ref
}
# Create local branch, if necessary, and switch to it ----
if (!gert::git_branch_exists(pr$pr_local_branch, local = TRUE, repo = repo)) {
ui_done("
Creating and switching to local branch {ui_value(pr$pr_local_branch)}.")
ui_done("Setting {ui_value(pr_remref)} as remote tracking branch.")
gert::git_branch_create(pr$pr_local_branch, ref = pr_remref, repo = repo)
config_key <- glue("branch.{pr$pr_local_branch}.created-by")
gert::git_config_set(config_key, "usethis::pr_fetch", repo = repo)
config_url <- glue("branch.{pr$pr_local_branch}.pr-url")
gert::git_config_set(config_url, pr$pr_html_url, repo = repo)
return(invisible())
}
# Local branch pre-existed; make sure tracking branch is set, switch, & pull
ui_done("Switching to branch {ui_value(pr$pr_local_branch)}.")
gert::git_branch_checkout(pr$pr_local_branch, repo = repo)
config_url <- glue("branch.{pr$pr_local_branch}.pr-url")
gert::git_config_set(config_url, pr$pr_html_url, repo = repo)
pr_branch_ours_tracking <- git_branch_tracking(pr$pr_local_branch)
if (is.na(pr_branch_ours_tracking) ||
pr_branch_ours_tracking != pr_remref) {
ui_done("Setting {ui_value(pr_remref)} as remote tracking branch.")
gert::git_branch_set_upstream(pr_remref, repo = repo)
}
git_pull(verbose = FALSE)
}
#' @export
#' @rdname pull-requests
pr_push <- function() {
repo <- git_repo()
cfg <- github_remote_config(github_get = TRUE)
check_for_config(cfg, ok_configs = c("ours", "fork"))
default_branch <- git_default_branch()
check_pr_branch(default_branch)
challenge_uncommitted_changes()
branch <- git_branch()
remref <- git_branch_tracking(branch)
if (is.na(remref)) {
# this is the first push
if (cfg$type == "fork" && cfg$upstream$can_push && is_interactive()) {
choices <- c(
origin = glue(
"{cfg$origin$repo_spec} = {ui_value('origin')} (external PR)"),
upstream = glue(
"{cfg$upstream$repo_spec} = {ui_value('upstream')} (internal PR)")
)
title <- glue("Which repo do you want to push to?")
choice <- utils::menu(choices, graphics = FALSE, title = title)
remote <- names(choices)[[choice]]
} else {
remote <- "origin"
}
git_push_first(branch, remote)
} else {
check_branch_pulled(use = "pr_pull()")
git_push(branch, remref)
}
# Prompt to create PR if does not exist yet
tr <- target_repo(cfg, ask = FALSE)
pr <- pr_find(branch, tr = tr)
if (is.null(pr)) {
pr_create()
} else {
ui_todo("
View PR at {ui_value(pr$pr_html_url)} or call {ui_code('pr_view()')}.")
}
invisible()
}
#' @export
#' @rdname pull-requests
pr_pull <- function() {
cfg <- github_remote_config(github_get = TRUE)
check_for_config(cfg)
default_branch <- git_default_branch()
check_pr_branch(default_branch)
challenge_uncommitted_changes()
git_pull()
# note associated PR in git config, if applicable
tr <- target_repo(cfg, ask = FALSE)
pr_find(tr = tr)
invisible(TRUE)
}
#' @export
#' @rdname pull-requests
pr_merge_main <- function() {
tr <- target_repo(github_get = TRUE, ask = FALSE)
challenge_uncommitted_changes()
remref <- glue("{tr$remote}/{tr$default_branch}")
ui_done("Pulling changes from {ui_value(remref)}.")
git_pull(remref, verbose = FALSE)
}
#' @export
#' @rdname pull-requests
pr_view <- function(number = NULL, target = c("source", "primary")) {
tr <- target_repo(github_get = NA, role = target, ask = FALSE)
url <- NULL
if (is.null(number)) {
branch <- git_branch()
default_branch <- git_default_branch()
if (branch != default_branch) {
url <- pr_url(branch = branch, tr = tr)
if (is.null(url)) {
ui_info("
Current branch ({ui_value(branch)}) does not appear to be \\
connected to a PR.")
} else {
number <- sub("^.+pull/", "", url)
ui_info("
Current branch ({ui_value(branch)}) is connected to PR #{number}.")
}
}
} else {
pr <- pr_get(number = number, tr = tr)
url <- pr$pr_html_url
}
if (is.null(url)) {
ui_info("No PR specified ... looking up open PRs.")
pr <- choose_pr(tr = tr)
if (is.null(pr)) {
ui_oops("No open PRs found for {ui_value(tr$repo_spec)}.")
return(invisible())
}
if (min(lengths(pr)) == 0) {
ui_oops("No PR selected, exiting.")
return(invisible())
}
url <- pr$pr_html_url
}
view_url(url)
}
#' @export
#' @rdname pull-requests
pr_pause <- function() {
# intentionally naive selection of target repo
tr <- target_repo(github_get = FALSE, ask = FALSE)
ui_done("Switching back to the default branch.")
default_branch <- git_default_branch()
if (git_branch() == default_branch) {
ui_info("
Already on this repo's default branch ({ui_value(default_branch)}), \\
nothing to do.")
return(invisible())
}
challenge_uncommitted_changes()
# TODO: what happens here if offline?
check_branch_pulled(use = "pr_pull()")
ui_done("Switching back to default branch ({ui_value(default_branch)}).")
gert::git_branch_checkout(default_branch, repo = git_repo())
pr_pull_source_override(tr = tr, default_branch = default_branch)
}
#' @export
#' @rdname pull-requests
pr_finish <- function(number = NULL, target = c("source", "primary")) {
pr_clean(number = number, target = target, mode = "finish")
}
#' @export
#' @rdname pull-requests
pr_forget <- function() pr_clean(mode = "forget")
# unexported helpers ----
# Removes local evidence of PRs that you're done with or wish you'd never
# started or fetched
# Only possible remote action is to delete the remote branch for a merged PR
pr_clean <- function(number = NULL,
target = c("source", "primary"),
mode = c("finish", "forget")) {
mode <- match.arg(mode)
repo <- git_repo()
tr <- target_repo(github_get = NA, role = target, ask = FALSE)
default_branch <- git_default_branch()
if (is.null(number)) {
check_pr_branch(default_branch)
pr <- pr_find(git_branch(), tr = tr, state = "all")
# if the remote branch has already been deleted (probably post-merge), we
# can't always reverse engineer what the corresponding local branch was, but
# we already know it -- it's how we found the PR in the first place!
if (!is.null(pr)) {
pr$pr_local_branch <- pr$pr_local_branch %|% git_branch()
}
} else {
pr <- pr_get(number = number, tr = tr)
}
pr_local_branch <- if (is.null(pr)) git_branch() else pr$pr_local_branch
if (!is.na(pr_local_branch)) {
if (pr_local_branch == git_branch()) {
challenge_uncommitted_changes()
}
tracking_branch <- git_branch_tracking(pr_local_branch)
if (is.na(tracking_branch)) {
if (ui_nope("
Local branch {ui_value(pr_local_branch)} has no associated remote \\
branch.
If we delete {ui_value(pr_local_branch)}, any work that exists only \\
on this branch may be hard for you to recover.
Proceed anyway?")) {
ui_oops("Cancelling.")
return(invisible())
}
} else {
cmp <- git_branch_compare(
branch = pr_local_branch,
remref = tracking_branch
)
if (cmp$local_only > 0 && ui_nope("
Local branch {ui_value(pr_local_branch)} has 1 or more commits \\
that have not been pushed to {ui_value(tracking_branch)}.
If we delete {ui_value(pr_local_branch)}, this work may be hard \\
for you to recover.
Proceed anyway?")) {
ui_oops("Cancelling.")
return(invisible())
}
}
}
if (git_branch() != default_branch) {
ui_done("Switching back to default branch ({ui_value(default_branch)}).")
gert::git_branch_checkout(default_branch, force = TRUE, repo = repo)
pr_pull_source_override(tr = tr, default_branch = default_branch)
}
if (!is.na(pr_local_branch)) {
ui_done("Deleting local {ui_value(pr_local_branch)} branch.")
gert::git_branch_delete(pr_local_branch, repo = repo)
}
if (is.null(pr)) {
return(invisible())
}
if (mode == "finish") {
pr_branch_delete(pr)
}
# delete remote, if we (usethis) added it AND no remaining tracking branches
created_by <- git_cfg_get(glue("remote.{pr$pr_remote}.created-by"))
if (is.null(created_by) || !grepl("^usethis::", created_by)) {
return(invisible())
}
branches <- gert::git_branch_list(local = TRUE, repo = repo)
branches <- branches[!is.na(branches$upstream), ]
if (sum(grepl(glue("^refs/remotes/{pr$pr_remote}"), branches$upstream)) == 0) {
ui_done("Removing remote {ui_value(pr$pr_remote)}")
gert::git_remote_remove(remote = pr$pr_remote, repo = repo)
}
invisible()
}
# Make sure to pull from upstream/DEFAULT (as opposed to origin/DEFAULT) if
# we're in DEFAULT branch of a fork. I wish everyone set up DEFAULT to track the
# DEFAULT branch in the source repo, but this protects us against sub-optimal
# setup.
pr_pull_source_override <- function(tr = NULL, default_branch = NULL) {
# naive selection of target repo; calling function should analyse the config
tr <- tr %||% target_repo(github_get = FALSE, ask = FALSE)
# TODO: why does this not use a check_*() function, i.e. shared helper?
# I guess to issue a specific error message?
current_branch <- git_branch()
default_branch <- default_branch %||% git_default_branch()
if (current_branch != default_branch) {
ui_stop("
Internal error: pr_pull_source_override() should only be used when on \\
default branch")
}
# guard against mis-configured forks, that have default branch tracking
# the fork (origin) instead of the source (upstream)
# TODO: should I just change the upstream tracking branch, i.e. fix it?
remref <- glue("{tr$remote}/{default_branch}")
if (is_online(tr$host)) {
ui_done("Pulling changes from {ui_value(remref)}")
git_pull(remref = remref, verbose = FALSE)
} else {
ui_info("
Can't reach {ui_value(tr$host)}, therefore unable to pull changes from \\
{ui_value(remref)}")
}
}
pr_create <- function() {
branch <- git_branch()
tracking_branch <- git_branch_tracking(branch)
remote <- remref_remote(tracking_branch)
remote_dat <- github_remotes(remote, github_get = FALSE)
ui_todo("Create PR at link given below")
view_url(glue_data(remote_dat, "{host_url}/{repo_spec}/compare/{branch}"))
}
# retrieves 1 PR, if:
# * we can establish a tracking relationship between `branch` and a PR branch
# * we can get the user to choose 1
pr_find <- function(branch = git_branch(),
tr = NULL,
state = c("open", "closed", "all")) {
# Have we done this before? Check if we've cached pr-url in git config.
config_url <- glue("branch.{branch}.pr-url")
url <- git_cfg_get(config_url, where = "local")
if (!is.null(url)) {
return(pr_get(number = sub("^.+pull/", "", url), tr = tr))
}
tracking_branch <- git_branch_tracking(branch)
if (is.na(tracking_branch)) {
return(NULL)
}
state <- match.arg(state)
remote <- remref_remote(tracking_branch)
remote_dat <- github_remotes(remote)
pr_head <- glue("{remote_dat$repo_owner}:{remref_branch(tracking_branch)}")
pr_dat <- pr_list(tr = tr, state = state, head = pr_head)
if (nrow(pr_dat) == 0) {
return(NULL)
}
if (nrow(pr_dat) > 1) {
spec <- sub(":", "/", pr_head)
ui_info("Multiple PRs are associated with {ui_value(spec)}.")
pr_dat <- choose_pr(pr_dat = pr_dat)
if (min(lengths(pr_dat)) == 0) {
ui_stop("
One of these PRs must be specified explicitly or interactively: \\
{ui_value(paste0('#', pr_dat$pr_number))}")
}
}
gert::git_config_set(config_url, pr_dat$pr_html_url, repo = git_repo())
as.list(pr_dat)
}
pr_url <- function(branch = git_branch(),
tr = NULL,
state = c("open", "closed", "all")) {
state <- match.arg(state)
pr <- pr_find(branch, tr = tr, state = state)
if (is.null(pr)) {
NULL
} else {
pr$pr_html_url
}
}
pr_data_tidy <- function(pr) {
out <- list(
pr_number = pluck_int(pr, "number"),
pr_title = pluck_chr(pr, "title"),
pr_state = pluck_chr(pr, "state"),
pr_user = pluck_chr(pr, "user", "login"),
pr_created_at = pluck_chr(pr, "created_at"),
pr_updated_at = pluck_chr(pr, "updated_at"),
pr_merged_at = pluck_chr(pr, "merged_at"),
pr_label = pluck_chr(pr, "head", "label"),
# the 'repo' element of 'head' is NULL when fork has been deleted
pr_repo_owner = pluck_chr(pr, "head", "repo", "owner", "login"),
pr_ref = pluck_chr(pr, "head", "ref"),
pr_repo_spec = pluck_chr(pr, "head", "repo", "full_name"),
pr_from_fork = pluck_lgl(pr, "head", "repo", "fork"),
# 'maintainer_can_modify' is only present when we GET one specific PR
pr_maintainer_can_modify = pluck_lgl(pr, "maintainer_can_modify"),
pr_https_url = pluck_chr(pr, "head", "repo", "clone_url"),
pr_ssh_url = pluck_chr(pr, "head", "repo", "ssh_url"),
pr_html_url = pluck_chr(pr, "html_url"),
pr_string = glue("
{pluck_chr(pr, 'base', 'repo', 'full_name')}/#{pluck_int(pr, 'number')}")
)
grl <- github_remote_list(these = NULL)
m <- match(out$pr_repo_spec, grl$repo_spec)
out$pr_remote <- if (is.na(m)) out$pr_repo_owner else grl$remote[m]
pr_remref <- glue("{out$pr_remote}/{out$pr_ref}")
gbl <- gert::git_branch_list(local = TRUE, repo = git_repo())
gbl <- gbl[!is.na(gbl$upstream), c("name", "upstream")]
gbl$upstream <- sub("^refs/remotes/", "", gbl$upstream)
m <- match(pr_remref, gbl$upstream)
out$pr_local_branch <- if (is.na(m)) NA_character_ else gbl$name[m]
# If the fork has been deleted, these are all NA
# - Because pr$head$repo is NULL:
# pr_repo_owner, pr_repo_spec, pr_from_fork, pr_https_url, pr_ssh_url
# - Because derived from those above:
# pr_remote, pr_remref pr_local_branch
# I suppose one could already have a local branch, if you fetched the PR
# before the fork got deleted.
# But an initial pr_fetch() won't work if the fork has been deleted.
# I'm willing to accept that the pr_*() functions don't necessarily address
# the "deleted fork" scenario. It's relatively rare.
# example: https://github.com/r-lib/httr/pull/634
out
}
pr_list <- function(tr = NULL,
github_get = NA,
state = c("open", "closed", "all"),
head = NULL) {
tr <- tr %||% target_repo(github_get = github_get, ask = FALSE)
state <- match.arg(state)
gh <- gh_tr(tr)
safely_gh <- purrr::safely(gh, otherwise = NULL)
out <- safely_gh(
"GET /repos/{owner}/{repo}/pulls",
state = state, head = head, .limit = Inf
)
if (is.null(out$error)) {
prs <- out$result
} else {
ui_oops("Unable to retrieve PRs for {ui_value(tr$repo_spec)}.")
prs <- NULL
}
no_prs <- length(prs) == 0
if (no_prs) {
prs <- list(list())
}
out <- map(prs, pr_data_tidy)
out <- map(out, ~ as.data.frame(.x, stringsAsFactors = FALSE))
out <- do.call(rbind, out)
if (no_prs) {
out[0, ]
} else {
pr_is_open <- out$pr_state == "open"
rbind(out[pr_is_open, ], out[!pr_is_open, ])
}
}
# retrieves specific PR by number
pr_get <- function(number, tr = NULL, github_get = NA) {
tr <- tr %||% target_repo(github_get = github_get, ask = FALSE)
gh <- gh_tr(tr)
raw <- gh("GET /repos/{owner}/{repo}/pulls/{number}", number = number)
pr_data_tidy(raw)
}
branches_with_no_upstream_or_github_upstream <- function(tr = NULL) {
repo <- git_repo()
gb_dat <- gert::git_branch_list(local = TRUE, repo = repo)
gb_dat <- gb_dat[, c("name", "upstream", "updated")]
gb_dat$remref <- sub("^refs/remotes/", "", gb_dat$upstream)
gb_dat$upstream <- NULL
gb_dat$remote <- remref_remote(gb_dat$remref)
gb_dat$ref <- remref_branch(gb_dat$remref)
gb_dat$cfg_pr_url <- map_chr(
glue("branch.{gb_dat$name}.pr-url"),
~ git_cfg_get(.x, where = "local") %||% NA_character_
)
ghr <- github_remote_list(these = NULL)[["remote"]]
gb_dat <- gb_dat[is.na(gb_dat$remref) | (gb_dat$remote %in% ghr), ]
pr_dat <- pr_list(tr = tr)
dat <- merge(
x = gb_dat, y = pr_dat,
by.x = "name", by.y = "pr_local_branch",
all.x = TRUE
)
dat <- dat[order(dat$pr_number, dat$pr_updated_at, dat$updated, decreasing = TRUE), ]
missing_cfg <- is.na(dat$cfg_pr_url) & !is.na(dat$pr_html_url)
purrr::walk2(
glue("branch.{dat$name[missing_cfg]}.pr-url"),
dat$pr_html_url[missing_cfg],
~ gert::git_config_set(.x, .y, repo = repo)
)
dat
}
choose_branch <- function(exclude = character()) {
if (!is_interactive()) {
return(character())
}
dat <- branches_with_no_upstream_or_github_upstream()
dat <- dat[!dat$name %in% exclude, ]
if (nrow(dat) == 0) {
return()
}
prompt <- "Which branch do you want to checkout? (0 to exit)"
if (nrow(dat) > 9) {
branches_not_shown <- utils::tail(dat$name, -9)
n <- length(branches_not_shown)
dat <- dat[1:9, ]
pre <- glue("{n} branch{if (n > 1) 'es' else ''} not listed: ")
listing <- glue_collapse(
branches_not_shown, sep = ", ", width = getOption("width") - nchar(pre)
)
prompt <- glue("
{prompt}
{pre}{listing}")
}
dat$pretty_user <- map(dat$pr_user, ~ glue("@{.x}"))
dat$pretty_name <- format(dat$name, justify = "right")
dat_pretty <- purrr::pmap_chr(
dat[c("pretty_name", "pr_number", "pretty_user", "pr_title")],
function(pretty_name, pr_number, pretty_user, pr_title) {
if (is.na(pr_number)) {
glue("{pretty_name}")
} else {
glue("{pretty_name} --> #{pr_number} ({ui_value(pretty_user)}): {pr_title}")
}
}
)
choice <- utils::menu(title = prompt, choices = cli::ansi_strtrim(dat_pretty))
dat$name[choice]
}
choose_pr <- function(tr = NULL, pr_dat = NULL) {
if (!is_interactive()) {
return(list(pr_number = list()))
}
if (is.null(pr_dat)) {
tr <- tr %||% target_repo()
pr_dat <- pr_list(tr)
}
if (nrow(pr_dat) == 0) {
return()
}
# wording needs to make sense for several PR-choosing tasks, e.g. fetch, view,
# finish, forget
prompt <- "Which PR are you interested in? (0 to exit)"
if (nrow(pr_dat) > 9) {
n <- nrow(pr_dat) - 9
pr_dat <- pr_dat[1:9, ]
prompt <- glue("
{prompt}
Not shown: {n} more {if (n > 1) 'PRs' else 'PR'}; \\
call {ui_code('browse_github_pulls()')} to browse all PRs.")
}
some_closed <- any(pr_dat$pr_state == "closed")
pr_pretty <- purrr::pmap_chr(
pr_dat[c("pr_number", "pr_user", "pr_state", "pr_title")],
function(pr_number, pr_user, pr_state, pr_title) {
hash_number <- glue("#{pr_number}")
at_user <- glue("@{pr_user}")
if (some_closed) {
glue("{hash_number} ({ui_field(at_user)}, {pr_state}): {ui_value(pr_title)}")
} else {
glue("{hash_number} ({ui_field(at_user)}): {ui_value(pr_title)}")
}
}
)
choice <- utils::menu(
title = prompt,
choices = cli::ansi_strtrim(pr_pretty)
)
as.list(pr_dat[choice, ])
}
# deletes the remote branch associated with a PR
# returns invisible TRUE/FALSE re: whether a deletion actually occurred
# reasons this returns FALSE
# * don't have push permission on remote where PR branch lives
# * PR has not been merged
# * remote branch has already been deleted
pr_branch_delete <- function(pr) {
remote <- pr$pr_remote
remote_dat <- github_remotes(remote)
if (!isTRUE(remote_dat$can_push)) {
return(invisible(FALSE))
}
gh <- gh_tr(remote_dat)
pr_ref <- tryCatch(
gh(
"GET /repos/{owner}/{repo}/git/ref/{ref}",
ref = glue("heads/{pr$pr_ref}")
),
http_error_404 = function(cnd) NULL
)
pr_remref <- glue_data(pr, "{pr_remote}/{pr_ref}")
if (is.null(pr_ref)) {
ui_info("
PR {ui_value(pr$pr_string)} originated from branch \\
{ui_value(pr_remref)}, which no longer exists")
return(invisible(FALSE))
}
if (is.na(pr$pr_merged_at)) {
ui_info("
PR {ui_value(pr$pr_string)} is unmerged, \\
we will not delete the remote branch {ui_value(pr_remref)}")
return(invisible(FALSE))
}
ui_done("
PR {ui_value(pr$pr_string)} has been merged, \\
deleting remote branch {ui_value(pr_remref)}")
# TODO: tryCatch here?
gh(
"DELETE /repos/{owner}/{repo}/git/refs/{ref}",
ref = glue("heads/{pr$pr_ref}")
)
invisible(TRUE)
}
check_pr_branch <- function(default_branch = git_default_branch()) {
# the glue-ing happens inside check_current_branch(), where `gb` gives the
# current git branch
check_current_branch(
is_not = default_branch,
message = "
The {ui_code('pr_*()')} functions facilitate pull requests.
The current branch ({ui_value(gb)}) is this repo's default \\
branch, but pull requests should NOT come from the default branch.
Do you need to call {ui_code('pr_init()')} (new PR)?
Or {ui_code('pr_resume()')} or {ui_code('pr_fetch()')} (existing PR)?"
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/pr.R
|
proj_desc <- function(path = proj_get()) {
desc::desc(file = path)
}
proj_version <- function() {
proj_desc()$get_field("Version")
}
proj_deps <- function() {
proj_desc()$get_deps()
}
proj_desc_create <- function(name, fields = list(), roxygen = TRUE) {
fields <- use_description_defaults(name, roxygen = roxygen, fields = fields)
# https://github.com/r-lib/desc/issues/132
desc <- desc::desc(text = glue("{names(fields)}: {fields}"))
tidy_desc(desc)
tf <- withr::local_tempfile()
desc$write(file = tf)
write_over(proj_path("DESCRIPTION"), read_utf8(tf))
# explicit check of "usethis.quiet" since I'm not doing the printing
if (!is_quiet()) {
desc$print()
}
}
# Here overwrite means "update the field if there is already a value in it,
# including appending".
proj_desc_field_update <- function(key, value, overwrite = TRUE, append = FALSE) {
check_string(key)
check_character(value)
check_bool(overwrite)
desc <- proj_desc()
old <- desc$get_list(key, default = "")
if (all(value %in% old)) {
return(invisible())
}
if (!overwrite && length(old > 0) && any(old != "")) {
ui_stop(
"{ui_field(key)} has a different value in DESCRIPTION. \\
Use {ui_code('overwrite = TRUE')} to overwrite."
)
}
ui_done("Adding {ui_value(value)} to {ui_field(key)}")
if (append) {
value <- union(old, value)
}
# https://github.com/r-lib/desc/issues/117
desc$set_list(key, value)
desc$write()
invisible()
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/proj-desc.R
|
proj <- new.env(parent = emptyenv())
proj_get_ <- function() proj$cur
proj_set_ <- function(path) {
old <- proj$cur
proj$cur <- path
invisible(old)
}
#' Utility functions for the active project
#'
#' @description
#' Most `use_*()` functions act on the **active project**. If it is
#' unset, usethis uses [rprojroot](https://rprojroot.r-lib.org) to
#' find the project root of the current working directory. It establishes the
#' project root by looking for a `.here` file, an RStudio Project, a package
#' `DESCRIPTION`, Git infrastructure, a `remake.yml` file, or a `.projectile`
#' file. It then stores the active project for use for the remainder of the
#' session.
#'
#' In general, end user scripts should not contain direct calls to
#' `usethis::proj_*()` utility functions. They are internal functions that are
#' exported for occasional interactive use or use in packages that extend
#' usethis. End user code should call functions in
#' [rprojroot](https://rprojroot.r-lib.org) or its simpler companion,
#' [here](https://here.r-lib.org), to programmatically detect a project and
#' build paths within it.
#'
#' If you are puzzled why a path (usually the current working directory) does
#' *not* appear to be inside project, it can be helpful to call
#' `here::dr_here()` to get much more verbose feedback.
#'
#' @name proj_utils
#' @family project functions
#' @examples
#' \dontrun{
#' ## see the active project
#' proj_get()
#'
#' ## manually set the active project
#' proj_set("path/to/target/project")
#'
#' ## build a path within the active project (both produce same result)
#' proj_path("R/foo.R")
#' proj_path("R", "foo", ext = "R")
#'
#' ## build a path within SOME OTHER project
#' with_project("path/to/some/other/project", proj_path("blah.R"))
#'
#' ## convince yourself that with_project() temporarily changes the project
#' with_project("path/to/some/other/project", print(proj_sitrep()))
#' }
NULL
#' @describeIn proj_utils Retrieves the active project and, if necessary,
#' attempts to set it in the first place.
#' @export
proj_get <- function() {
# Called for first time so try working directory
if (!proj_active()) {
proj_set(".")
}
proj_get_()
}
#' @describeIn proj_utils Sets the active project.
#' @param path Path to set. This `path` should exist or be `NULL`.
#' @param force If `TRUE`, use this path without checking the usual criteria for
#' a project. Use sparingly! The main application is to solve a temporary
#' chicken-egg problem: you need to set the active project in order to add
#' project-signalling infrastructure, such as initialising a Git repo or
#' adding a `DESCRIPTION` file.
#' @export
proj_set <- function(path = ".", force = FALSE) {
if (!force && dir_exists(path %||% "") && is_in_proj(path)) {
return(invisible(proj_get_()))
}
path <- proj_path_prep(path)
if (is.null(path) || force) {
proj_string <- if (is.null(path)) "<no active project>" else path
ui_done("Setting active project to {ui_value(proj_string)}")
return(proj_set_(path))
}
check_path_is_directory(path)
new_project <- proj_find(path)
if (is.null(new_project)) {
ui_stop('
Path {ui_path(path)} does not appear to be inside a project or package.
Read more in the help for {ui_code("proj_get()")}.')
}
proj_set(path = new_project, force = TRUE)
}
#' @describeIn proj_utils Builds paths within the active project returned by
#' `proj_get()`. Thin wrapper around [fs::path()].
#' @inheritParams fs::path
#' @export
proj_path <- function(..., ext = "") {
has_absolute_path <- function(x) any(is_absolute_path(x))
dots <- list(...)
if (any(map_lgl(dots, has_absolute_path))) {
ui_stop("Paths must be relative to the active project")
}
path_norm(path(proj_get(), ..., ext = ext))
}
#' @describeIn proj_utils Runs code with a temporary active project and,
#' optionally, working directory. It is an example of the `with_*()` functions
#' in [withr](https://withr.r-lib.org).
#' @param code Code to run with temporary active project
#' @param setwd Whether to also temporarily set the working directory to the
#' active project, if it is not `NULL`
#' @param quiet Whether to suppress user-facing messages, while operating in the
#' temporary active project
#' @export
with_project <- function(path = ".",
code,
force = FALSE,
setwd = TRUE,
quiet = getOption("usethis.quiet", default = FALSE)) {
local_project(path = path, force = force, setwd = setwd, quiet = quiet)
force(code)
}
#' @describeIn proj_utils Sets an active project and, optionally, working
#' directory until the current execution environment goes out of scope, e.g.
#' the end of the current function or test. It is an example of the
#' `local_*()` functions in [withr](https://withr.r-lib.org).
#' @param .local_envir The environment to use for scoping. Defaults to current
#' execution environment.
#' @export
local_project <- function(path = ".",
force = FALSE,
setwd = TRUE,
quiet = getOption("usethis.quiet", default = FALSE),
.local_envir = parent.frame()) {
withr::local_options(usethis.quiet = quiet, .local_envir = .local_envir)
old_project <- proj_get_() # this could be `NULL`, i.e. no active project
withr::defer(proj_set(path = old_project, force = TRUE), envir = .local_envir)
proj_set(path = path, force = force)
temp_proj <- proj_get_() # this could be `NULL`
if (isTRUE(setwd) && !is.null(temp_proj)) {
withr::local_dir(temp_proj, .local_envir = .local_envir)
}
}
## usethis policy re: preparation of the path to active project
proj_path_prep <- function(path) {
if (is.null(path)) {
return(path)
}
path <- path_abs(path)
if (file_exists(path)) {
path_real(path)
} else {
path
}
}
## usethis policy re: preparation of user-provided path to a resource on user's
## file system
user_path_prep <- function(path) {
## usethis uses fs's notion of home directory
## this ensures we are consistent about that
path_expand(path)
}
proj_rel_path <- function(path) {
if (is_in_proj(path)) {
as.character(path_rel(path, start = proj_get()))
} else {
path
}
}
proj_crit <- function() {
rprojroot::has_file(".here") |
rprojroot::is_rstudio_project |
rprojroot::is_r_package |
rprojroot::is_git_root |
rprojroot::is_remake_project |
rprojroot::is_projectile_project
}
proj_find <- function(path = ".") {
tryCatch(
rprojroot::find_root(proj_crit(), path = path),
error = function(e) NULL
)
}
possibly_in_proj <- function(path = ".") !is.null(proj_find(path))
is_package <- function(base_path = proj_get()) {
res <- tryCatch(
rprojroot::find_package_root_file(path = base_path),
error = function(e) NULL
)
!is.null(res)
}
check_is_package <- function(whos_asking = NULL) {
if (is_package()) {
return(invisible())
}
message <- "Project {ui_value(project_name())} is not an R package."
if (!is.null(whos_asking)) {
message <- c(
"{ui_code(whos_asking)} is designed to work with packages.",
message
)
}
ui_stop(message)
}
check_is_project <- function() {
if (!possibly_in_proj()) {
ui_stop('
We do not appear to be inside a valid project or package.
Read more in the help for {ui_code("proj_get()")}.')
}
}
proj_active <- function() !is.null(proj_get_())
is_in_proj <- function(path) {
if (!proj_active()) {
return(FALSE)
}
identical(
proj_get(),
## use path_abs() in case path does not exist yet
path_common(c(proj_get(), path_expand(path_abs(path))))
)
}
project_name <- function(base_path = proj_get()) {
## escape hatch necessary to solve this chicken-egg problem:
## create_package() calls use_description(), which calls project_name()
## to learn package name from the path, in order to make DESCRIPTION
## and DESCRIPTION is how we recognize a package as a usethis project
if (!possibly_in_proj(base_path)) {
return(path_file(base_path))
}
if (is_package(base_path)) {
proj_desc(base_path)$get_field("Package")
} else {
path_file(base_path)
}
}
#' Activate a project
#'
#' Activates a project in usethis, R session, and (if relevant) RStudio senses.
#' If you are in RStudio, this will open a new RStudio session. If not, it will
#' change the working directory and [active project][proj_set()].
#'
#' @param path Project directory
#' @return Single logical value indicating if current session is modified.
#' @export
proj_activate <- function(path) {
check_path_is_directory(path)
path <- user_path_prep(path)
if (rstudio_available() && rstudioapi::hasFun("openProject")) {
ui_done("Opening {ui_path(path, base = NA)} in new RStudio session")
rstudioapi::openProject(path, newSession = TRUE)
invisible(FALSE)
} else {
proj_set(path)
rel_path <- path_rel(proj_get(), path_wd())
if (rel_path != ".") {
ui_done("Changing working directory to {ui_path(path, base = NA)}")
setwd(proj_get())
}
invisible(TRUE)
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/proj.R
|
#' Create or edit R or test files
#'
#' This pair of functions makes it easy to create paired R and test files,
#' using the convention that the tests for `R/foofy.R` should live
#' in `tests/testthat/test-foofy.R`. You can use them to create new files
#' from scratch by supplying `name`, or if you use RStudio, you can call
#' to create (or navigate to) the paired file based on the currently open
#' script.
#'
#' @section Renaming files in an existing package:
#'
#' Here are some tips on aligning file names across `R/` and `tests/testthat/`
#' in an existing package that did not necessarily follow this convention
#' before.
#'
#' This script generates a data frame of `R/` and test files that can help you
#' identify missed opportunities for pairing:
#'
#' ```
#' library(fs)
#' library(tidyverse)
#'
#' bind_rows(
#' tibble(
#' type = "R",
#' path = dir_ls("R/", regexp = "\\.[Rr]$"),
#' name = as.character(path_ext_remove(path_file(path))),
#' ),
#' tibble(
#' type = "test",
#' path = dir_ls("tests/testthat/", regexp = "/test[^/]+\\.[Rr]$"),
#' name = as.character(path_ext_remove(str_remove(path_file(path), "^test[-_]"))),
#' )
#' ) %>%
#' pivot_wider(names_from = type, values_from = path) %>%
#' print(n = Inf)
#' ```
#'
#' The [rename_files()] function can also be helpful.
#'
#' @param name Either a string giving a file name (without directory) or
#' `NULL` to take the name from the currently open file in RStudio.
#' @inheritParams edit_file
#' @seealso The [testing](https://r-pkgs.org/testing-basics.html) and
#' [R code](https://r-pkgs.org/code.html) chapters of
#' [R Packages](https://r-pkgs.org).
#' @export
use_r <- function(name = NULL, open = rlang::is_interactive()) {
use_directory("R")
path <- path("R", compute_name(name))
edit_file(proj_path(path), open = open)
invisible(TRUE)
}
#' @rdname use_r
#' @export
use_test <- function(name = NULL, open = rlang::is_interactive()) {
if (!uses_testthat()) {
use_testthat_impl()
}
path <- path("tests", "testthat", paste0("test-", compute_name(name)))
if (!file_exists(path)) {
use_template("test-example-2.1.R", save_as = path)
}
edit_file(proj_path(path), open = open)
invisible(TRUE)
}
# helpers -----------------------------------------------------------------
compute_name <- function(name = NULL, ext = "R", error_call = caller_env()) {
if (!is.null(name)) {
check_file_name(name, call = error_call)
if (path_ext(name) == "") {
name <- path_ext_set(name, ext)
} else if (path_ext(name) != ext) {
cli::cli_abort(
"{.arg name} must have extension {.str {ext}}, not {.str {path_ext(name)}}.",
call = error_call
)
}
return(as.character(name))
}
if (!rstudio_available()) {
cli::cli_abort(
"{.arg name} is absent but must be specified.",
call = error_call
)
}
compute_active_name(
path = rstudioapi::getSourceEditorContext()$path,
ext = ext,
error_call = error_call
)
}
compute_active_name <- function(path, ext, error_call = caller_env()) {
if (is.null(path)) {
cli::cli_abort(
c(
"No file is open in RStudio.",
i = "Please specify {.arg name}."
),
call = error_call
)
}
## rstudioapi can return a path like '~/path/to/file' where '~' means
## R's notion of user's home directory
path <- proj_path_prep(path_expand_r(path))
dir <- path_dir(proj_rel_path(path))
if (!dir %in% c("R", "src", "tests/testthat")) {
cli::cli_abort("Open file must be a code or test file.", call = error_call)
}
file <- path_file(path)
if (dir == "tests/testthat") {
file <- gsub("^test[-_]", "", file)
}
as.character(path_ext_set(file, ext))
}
check_file_name <- function(name, call = caller_env()) {
if (!is_string(name)) {
cli::cli_abort("{.arg name} must be a single string", call = call)
}
if (name == "") {
cli::cli_abort("{.arg name} must not be an empty string", call = call)
}
if (path_dir(name) != ".") {
cli::cli_abort(
"{.arg name} must be a file name without directory.",
call = call
)
}
if (!valid_file_name(path_ext_remove(name))) {
cli::cli_abort(
c(
"{.arg name} ({.str {name}}) must be a valid file name.",
i = "A valid file name consists of only ASCII letters, numbers, '-', and '_'."
),
call = call
)
}
}
valid_file_name <- function(x) {
grepl("^[a-zA-Z0-9._-]+$", x)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/r.R
|
#' Use C, C++, RcppArmadillo, or RcppEigen
#'
#' Adds infrastructure commonly needed when using compiled code:
#' * Creates `src/`
#' * Adds required packages to `DESCRIPTION`
#' * May create an initial placeholder `.c` or `.cpp` file
#' * Creates `Makevars` and `Makevars.win` files (`use_rcpp_armadillo()` only)
#'
#' @inheritParams use_r
#' @export
use_rcpp <- function(name = NULL) {
check_is_package("use_rcpp()")
check_uses_roxygen("use_rcpp()")
use_dependency("Rcpp", "LinkingTo")
use_dependency("Rcpp", "Imports")
roxygen_ns_append("@importFrom Rcpp sourceCpp") && roxygen_remind()
use_src()
path <- path("src", compute_name(name, "cpp"))
use_template("code.cpp", path)
edit_file(proj_path(path))
invisible()
}
#' @rdname use_rcpp
#' @export
use_rcpp_armadillo <- function(name = NULL) {
use_rcpp(name)
use_dependency("RcppArmadillo", "LinkingTo")
makevars_settings <- list(
"CXX_STD" = "CXX11",
"PKG_CXXFLAGS" = "$(SHLIB_OPENMP_CXXFLAGS)",
"PKG_LIBS" = "$(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS)"
)
use_makevars(makevars_settings)
invisible()
}
#' @rdname use_rcpp
#' @export
use_rcpp_eigen <- function(name = NULL) {
use_rcpp(name)
use_dependency("RcppEigen", "LinkingTo")
roxygen_ns_append("@import RcppEigen") && roxygen_remind()
invisible()
}
#' @rdname use_rcpp
#' @export
use_c <- function(name = NULL) {
check_is_package("use_c()")
check_uses_roxygen("use_c()")
use_src()
path <- path("src", compute_name(name, ext = "c"))
use_template("code.c", path)
edit_file(proj_path(path))
invisible(TRUE)
}
use_src <- function() {
use_directory("src")
use_git_ignore(c("*.o", "*.so", "*.dll"), "src")
roxygen_ns_append(glue("@useDynLib {project_name()}, .registration = TRUE")) &&
roxygen_remind()
invisible()
}
use_makevars <- function(settings = NULL) {
use_directory("src")
settings_list <- settings %||% list()
check_is_named_list(settings_list)
makevars_entries <- vapply(settings_list, glue_collapse, character(1))
makevars_content <- glue("{names(makevars_entries)} = {makevars_entries}")
makevars_path <- proj_path("src", "Makevars")
makevars_win_path <- proj_path("src", "Makevars.win")
if (!file_exists(makevars_path) && !file_exists(makevars_win_path)) {
write_utf8(makevars_path, makevars_content)
file_copy(makevars_path, makevars_win_path)
ui_done("Created {ui_path(makevars_path)} and {ui_path(makevars_win_path)} \\
with requested compilation settings.")
} else {
ui_todo("Ensure the following Makevars compilation settings are set for both \\
{ui_path(makevars_path)} and {ui_path(makevars_win_path)}:")
ui_code_block(
makevars_content
)
edit_file(makevars_path)
edit_file(makevars_win_path)
}
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/rcpp.R
|
#' Create README files
#'
#' @description
#' Creates skeleton README files with possible stubs for
#' * a high-level description of the project/package and its goals
#' * R code to install from GitHub, if GitHub usage detected
#' * a basic example
#'
#' Use `Rmd` if you want a rich intermingling of code and output. Use `md` for a
#' basic README. `README.Rmd` will be automatically added to `.Rbuildignore`.
#' The resulting README is populated with default YAML frontmatter and R fenced
#' code blocks (`md`) or chunks (`Rmd`).
#'
#' If you use `Rmd`, you'll still need to render it regularly, to keep
#' `README.md` up-to-date. `devtools::build_readme()` is handy for this. You
#' could also use GitHub Actions to re-render `README.Rmd` every time you push.
#' An example workflow can be found in the `examples/` directory here:
#' <https://github.com/r-lib/actions/>.
#'
#' If the current project is a Git repo, then `use_readme_rmd()` automatically
#' configures a pre-commit hook that helps keep `README.Rmd` and `README.md`,
#' synchronized. The hook creates friction if you try to commit when
#' `README.Rmd` has been edited more recently than `README.md`. If this hook
#' causes more problems than it solves for you, it is implemented in
#' `.git/hooks/pre-commit`, which you can modify or even delete.
#'
#' @inheritParams use_template
#' @seealso The [other markdown files
#' section](https://r-pkgs.org/other-markdown.html) of [R
#' Packages](https://r-pkgs.org).
#' @export
#' @examples
#' \dontrun{
#' use_readme_rmd()
#' use_readme_md()
#' }
use_readme_rmd <- function(open = rlang::is_interactive()) {
check_is_project()
check_installed("rmarkdown")
is_pkg <- is_package()
repo_spec <- tryCatch(target_repo_spec(ask = FALSE), error = function(e) NULL)
nm <- if (is_pkg) "Package" else "Project"
data <- list2(
!!nm := project_name(),
Rmd = TRUE,
on_github = !is.null(repo_spec),
github_spec = repo_spec
)
new <- use_template(
if (is_pkg) "package-README" else "project-README",
"README.Rmd",
data = data,
ignore = is_pkg,
open = open
)
if (!new) {
return(invisible(FALSE))
}
if (is_pkg && !data$on_github) {
ui_todo("
Update {ui_path('README.Rmd')} to include installation instructions.")
}
if (uses_git()) {
use_git_hook(
"pre-commit",
render_template("readme-rmd-pre-commit.sh")
)
}
invisible(TRUE)
}
#' @export
#' @rdname use_readme_rmd
use_readme_md <- function(open = rlang::is_interactive()) {
check_is_project()
is_pkg <- is_package()
repo_spec <- tryCatch(target_repo_spec(ask = FALSE), error = function(e) NULL)
nm <- if (is_pkg) "Package" else "Project"
data <- list2(
!!nm := project_name(),
Rmd = FALSE,
on_github = !is.null(repo_spec),
github_spec = repo_spec
)
new <- use_template(
if (is_pkg) "package-README" else "project-README",
"README.md",
data = data,
open = open
)
if (is_pkg && !data$on_github) {
ui_todo("
Update {ui_path('README.md')} to include installation instructions.")
}
invisible(new)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/readme.R
|
#' Create a release checklist in a GitHub issue
#'
#' @description
#' When preparing to release a package to CRAN there are quite a few steps that
#' need to be performed, and some of the steps can take multiple hours. This
#' function creates a checklist in a GitHub issue to:
#'
#' * Help you keep track of where you are in the process
#' * Feel a sense of satisfaction as you progress towards final submission
#' * Help watchers of your package stay informed.
#'
#' The checklist contains a generic set of steps that we've found to be helpful,
#' based on the type of release ("patch", "minor", or "major"). You're
#' encouraged to edit the issue to customize this list to meet your needs.
#'
#' ## Customization
#'
#' * If you want to consistently add extra bullets for every release, you can
#' include your own custom bullets by providing an (unexported)
#' `release_bullets()` function that returns a character vector.
#' (For historical reasons, `release_questions()` is also supported).
#'
#' * If you want to check additional packages in the revdep check process,
#' provide an (unexported) `release_extra_revdeps()` function that
#' returns a character vector. This is currently only supported for
#' Posit internal check tooling.
#'
#' @param version Optional version number for release. If unspecified, you can
#' make an interactive choice.
#' @export
#' @examples
#' \dontrun{
#' use_release_issue("2.0.0")
#' }
use_release_issue <- function(version = NULL) {
check_is_package("use_release_issue()")
tr <- target_repo(github_get = TRUE)
if (!isTRUE(tr$can_push)) {
ui_line("
It is very unusual to open a release issue on a repo you can't push to:
{ui_value(tr$repo_spec)}")
if (ui_nope("Do you really want to do this?")) {
ui_oops("Cancelling.")
return(invisible())
}
}
version <- version %||%
choose_version(
"What should the release version be?",
which = c("major", "minor", "patch")
)
if (is.null(version)) {
return(invisible(FALSE))
}
on_cran <- !is.null(cran_version())
checklist <- release_checklist(version, on_cran)
gh <- gh_tr(tr)
issue <- gh(
"POST /repos/{owner}/{repo}/issues",
title = glue("Release {project_name()} {version}"),
body = paste0(checklist, "\n", collapse = "")
)
Sys.sleep(1)
view_url(issue$html_url)
}
release_checklist <- function(version, on_cran) {
type <- release_type(version)
cran_results <- cran_results_url()
has_news <- file_exists(proj_path("NEWS.md"))
has_pkgdown <- uses_pkgdown()
has_lifecycle <- proj_desc()$has_dep("lifecycle")
has_readme <- file_exists(proj_path("README.Rmd"))
has_github_links <- has_github_links()
is_posit_pkg <- is_posit_pkg()
milestone_num <- NA # for testing (and general fallback)
if (uses_git() && curl::has_internet()) {
milestone_num <- tryCatch(
gh_milestone_number(target_repo_spec(), version),
error = function(e) NA
)
}
c(
if (!on_cran) c(
"First release:",
"",
todo("`usethis::use_news_md()`", !has_news),
todo("`usethis::use_cran_comments()`"),
todo("Update (aspirational) install instructions in README"),
todo("Proofread `Title:` and `Description:`"),
todo("Check that all exported functions have `@return` and `@examples`"),
todo("Check that `Authors@R:` includes a copyright holder (role 'cph')"),
todo("Check [licensing of included files](https://r-pkgs.org/license.html#sec-code-you-bundle)"),
todo("Review <https://github.com/DavisVaughan/extrachecks>"),
""
),
"Prepare for release:",
"",
todo("`git pull`"),
if (!is.na(milestone_num)) {
todo("[Close v{version} milestone](../milestone/{milestone_num})")
},
todo("Check [current CRAN check results]({cran_results})", on_cran),
todo("
Check if any deprecation processes should be advanced, as described in \\
[Gradual deprecation](https://lifecycle.r-lib.org/articles/communicate.html#gradual-deprecation)",
type != "patch" && has_lifecycle),
todo("`usethis::use_news_md()`", on_cran && !has_news),
todo("[Polish NEWS](https://style.tidyverse.org/news.html#news-release)", on_cran),
todo("`usethis::use_github_links()`", !has_github_links),
todo("`urlchecker::url_check()`"),
todo("`devtools::build_readme()`", has_readme),
todo("`devtools::check(remote = TRUE, manual = TRUE)`"),
todo("`devtools::check_win_devel()`"),
release_revdepcheck(on_cran, is_posit_pkg),
todo("Update `cran-comments.md`", on_cran),
todo("`git push`"),
todo("Draft blog post", type != "patch"),
todo("Slack link to draft blog in #open-source-comms", type != "patch" && is_posit_pkg),
release_extra_bullets(),
"",
"Submit to CRAN:",
"",
todo("`usethis::use_version('{type}')`"),
todo("`devtools::submit_cran()`"),
todo("Approve email"),
"",
"Wait for CRAN...",
"",
todo("Accepted :tada:"),
todo("Finish & publish blog post", type != "patch"),
todo("Add link to blog post in pkgdown news menu", type != "patch"),
todo("`usethis::use_github_release()`"),
todo("`usethis::use_dev_version(push = TRUE)`"),
todo("`usethis::use_news_md()`", !has_news),
todo("Tweet", type != "patch")
)
}
gh_milestone_number <- function(repo_spec, version, state = "open") {
milestones <- gh::gh(
"/repos/{repo_spec}/milestones",
repo_spec = repo_spec,
state = state
)
titles <- map_chr(milestones, "title")
numbers <- map_int(milestones, "number")
numbers[match(paste0("v", version), titles)]
}
release_revdepcheck <- function(on_cran = TRUE, is_posit_pkg = TRUE, env = NULL) {
if (!on_cran) {
return()
}
env <- env %||% safe_pkg_env()
if (env_has(env, "release_extra_revdeps")) {
extra <- env$release_extra_revdeps()
stopifnot(is.character(extra))
} else {
extra <- character()
}
if (is_posit_pkg) {
if (length(extra) > 0) {
extra_code <- paste0(deparse(extra), collapse = "")
todo("`revdepcheck::cloud_check(extra_revdeps = {extra_code})`")
} else {
todo("`revdepcheck::cloud_check()`")
}
} else {
todo("`revdepcheck::revdep_check(num_workers = 4)`")
}
}
release_extra_bullets <- function(env = NULL) {
env <- env %||% safe_pkg_env()
if (env_has(env, "release_bullets")) {
paste0("* [ ] ", env$release_bullets())
} else if (env_has(env, "release_questions")) {
# For backwards compatibility with devtools
paste0("* [ ] ", env$release_questions())
} else {
character()
}
}
safe_pkg_env <- function() {
tryCatch(
ns_env(project_name()),
error = function(e) emptyenv()
)
}
release_type <- function(version) {
x <- unclass(numeric_version(version))[[1]]
n <- length(x)
if (n >= 3 && x[[3]] != 0L) {
"patch"
} else if (n >= 2 && x[[2]] != 0L) {
"minor"
} else {
"major"
}
}
#' Publish a GitHub release
#'
#' @description
#' Pushes the current branch (if safe) then publishes a GitHub release for the
#' latest CRAN submission.
#'
#' If you use [devtools::submit_cran()] to submit to CRAN, information about the
#' submitted state is captured in a `CRAN-SUBMISSION` file.
#' `use_github_release()` uses this info to populate the GitHub release notes
#' and, after success, deletes the file. In the absence of such a file, we
#' assume that current state (SHA of `HEAD`, package version, NEWS) is the
#' submitted state.
#'
#' @param host,auth_token `r lifecycle::badge("deprecated")`: No longer
#' consulted now that usethis allows the gh package to lookup a token based on
#' a URL determined from the current project's GitHub remotes.
#' @param publish If `TRUE`, publishes a release. If `FALSE`, creates a draft
#' release.
#' @export
use_github_release <- function(publish = TRUE,
host = deprecated(),
auth_token = deprecated()) {
check_is_package("use_github_release()")
if (lifecycle::is_present(host)) {
deprecate_warn_host("use_github_release")
}
if (lifecycle::is_present(auth_token)) {
deprecate_warn_auth_token("use_github_release")
}
tr <- target_repo(github_get = TRUE, ok_configs = c("ours", "fork"))
check_can_push(tr = tr, "to create a release")
dat <- get_release_data(tr)
release_name <- glue("{dat$Package} {dat$Version}")
tag_name <- glue("v{dat$Version}")
kv_line("Release name", release_name)
kv_line("Tag name", tag_name)
kv_line("SHA", dat$SHA)
if (git_can_push()) {
git_push()
}
check_github_has_SHA(SHA = dat$SHA, tr = tr)
on_cran <- !is.null(cran_version())
news <- get_release_news(SHA = dat$SHA, tr = tr, on_cran = on_cran)
gh <- gh_tr(tr)
ui_cli_inform("Publishing {tag_name} release to GitHub")
release <- gh(
"POST /repos/{owner}/{repo}/releases",
name = release_name,
tag_name = tag_name,
target_commitish = dat$SHA,
body = news,
draft = !publish
)
ui_cli_inform("Release at {.url {release$html_url}}")
if (!is.null(dat$file)) {
ui_cli_inform("Deleting {.path {dat$file}}")
file_delete(dat$file)
}
invisible()
}
get_release_data <- function(tr = target_repo(github_get = TRUE)) {
cran_submission <-
path_first_existing(proj_path(c("CRAN-SUBMISSION", "CRAN-RELEASE")))
if (is.null(cran_submission)) {
ui_done("Using current HEAD commit for the release")
challenge_non_default_branch()
check_branch_pushed()
return(list(
Package = project_name(),
Version = proj_version(),
SHA = gert::git_info(repo = git_repo())$commit
))
}
if (path_file(cran_submission) == "CRAN-SUBMISSION") {
# new style ----
# Version: 2.4.2
# Date: 2021-10-13 20:40:36 UTC
# SHA: fbe18b5a22be8ebbb61fa7436e826ba8d7f485a9
out <- as.list(read.dcf(cran_submission)[1, ])
}
if (path_file(cran_submission) == "CRAN-RELEASE") {
gh <- gh_tr(tr)
# old style ----
# This package was submitted to CRAN on 2021-10-13.
# Once it is accepted, delete this file and tag the release (commit e10658f5).
lines <- read_utf8(cran_submission)
str_extract <- function(marker, pattern) {
re_match(grep(marker, lines, value = TRUE), pattern)$.match
}
date <- str_extract("submitted.*on", "[0-9]{4}-[0-9]{2}-[0-9]{2}")
sha <- str_extract("commit", "[[:xdigit:]]{7,40}")
if (nchar(sha) != 40) {
# the release endpoint requires the full sha
sha <-
gh("/repos/{owner}/{repo}/commits/{commit_sha}", commit_sha = sha)$sha
}
HEAD <- gert::git_info(repo = git_repo())$commit
if (HEAD == sha) {
version <- proj_version()
} else {
tf <- withr::local_tempfile()
gh(
"/repos/{owner}/{repo}/contents/{path}",
path = "DESCRIPTION",
ref = sha,
.destfile = tf,
.accept = "application/vnd.github.v3.raw"
)
version <- desc::desc_get_version(tf)
}
out <- list(
Version = version,
Date = Sys.Date(),
SHA = sha
)
}
out$Package <- project_name()
out$file <- cran_submission
ui_done("
{ui_path(out$file)} file found, from a submission on {as.Date(out$Date)}")
out
}
check_github_has_SHA <- function(SHA = gert::git_info(repo = git_repo())$commit,
tr = target_repo(github_get = TRUE)) {
safe_gh <- purrr::safely(gh_tr(tr))
SHA_GET <- safe_gh(
"/repos/{owner}/{repo}/git/commits/{commit_sha}",
commit_sha = SHA
)
if (is.null(SHA_GET$error)) {
return()
}
if (inherits(SHA_GET$error, "http_error_404")) {
ui_stop("
Can't find SHA {ui_value(substr(SHA, 1, 7))} in {ui_value(tr$repo_spec)}.
Do you need to push?")
}
ui_stop("Internal error: Unexpected error when checking for SHA on GitHub")
}
get_release_news <- function(SHA = gert::git_info(repo = git_repo())$commit,
tr = target_repo(github_get = TRUE),
on_cran = !is.null(cran_version())) {
HEAD <- gert::git_info(repo = git_repo())$commit
if (HEAD == SHA) {
news_path <- proj_path("NEWS.md")
news <- if (file_exists(news_path)) read_utf8(news_path) else NULL
} else {
news <- tryCatch(
read_github_file(
tr$repo_spec,
path = "NEWS.md",
ref = SHA,
host = tr$api_url
),
github_error = NULL
)
}
if (is.null(news)) {
ui_oops("
Can't find {ui_path('NEWS.md')} in the released package source.
usethis consults this file for release notes.
Call {ui_code('usethis::use_news_md()')} to set this up for the future.")
if (on_cran) "-- no release notes --" else "Initial release"
} else {
news_latest(news)
}
}
cran_version <- function(package = project_name(), available = NULL) {
if (!curl::has_internet()) {
return(NULL)
}
if (is.null(available)) {
# Guard against CRAN mirror being unset
available <- tryCatch(
available.packages(repos = default_cran_mirror()),
error = function(e) NULL
)
if (is.null(available)) {
return(NULL)
}
}
idx <- available[, "Package"] == package
if (any(idx)) {
as.package_version(available[package, "Version"])
} else {
NULL
}
}
cran_results_url <- function(package = project_name()) {
glue("https://cran.rstudio.org/web/checks/check_results_{package}.html")
}
news_latest <- function(lines) {
headings <- which(grepl("^#\\s+", lines))
if (length(headings) == 0) {
ui_stop("No top-level headings found in {ui_value('NEWS.md')}")
} else if (length(headings) == 1) {
news <- lines[seq2(headings + 1, length(lines))]
} else {
news <- lines[seq2(headings[[1]] + 1, headings[[2]] - 1)]
}
# Remove leading and trailing empty lines
text <- which(news != "")
if (length(text) == 0) {
return("")
}
news <- news[text[[1]]:text[[length(text)]]]
paste0(news, "\n", collapse = "")
}
is_posit_pkg <- function() {
is_posit_cph_or_fnd() || is_in_posit_org()
}
is_posit_cph_or_fnd <- function() {
if (!is_package()) {
return(FALSE)
}
roles <- get_posit_roles()
"cph" %in% roles || "fnd" %in% roles
}
is_posit_person_canonical <- function() {
if (!is_package()) {
return(FALSE)
}
roles <- get_posit_roles()
length(roles) > 0 &&
"fnd" %in% roles &&
"cph" %in% roles &&
attr(roles, "appears_in", exact = TRUE) == "given" &&
attr(roles, "appears_as", exact = TRUE) == "Posit Software, PBC"
}
get_posit_roles <- function() {
if (!is_package()) {
return()
}
desc <- proj_desc()
fnd <- unclass(desc$get_author("fnd"))
cph <- unclass(desc$get_author("cph"))
detect_posit <- function(x) {
any(grepl("rstudio|posit", tolower(x[c("given", "family")])))
}
fnd <- purrr::keep(fnd, detect_posit)
cph <- purrr::keep(cph, detect_posit)
if (length(fnd) < 1 && length(cph) < 1) {
return(character())
}
person <- c(fnd, cph)[[1]]
out <- person$role
if (!is.null(person$given) && nzchar(person$given)) {
attr(out, "appears_as") <- person$given
attr(out, "appears_in") <- "given"
} else {
attr(out, "appears_as") <- person$family
attr(out, "appears_in") <- "family"
}
out
}
is_in_posit_org <- function() {
if (!is_package()) {
return(FALSE)
}
desc <- proj_desc()
urls <- desc$get_urls()
dat <- parse_github_remotes(urls)
dat <- dat[dat$host == "github.com", ]
purrr::some(dat$repo_owner, ~ .x %in% posit_orgs())
}
posit_orgs <- function() {
c(
"tidyverse",
"r-lib",
"tidymodels",
"rstudio"
)
}
todo <- function(x, cond = TRUE) {
x <- glue(x, .envir = parent.frame())
if (cond) {
paste0("* [ ] ", x)
}
}
author_has_rstudio_email <- function() {
if (!is_package()) {
return()
}
desc <- proj_desc()
any(grepl("@rstudio[.]com", tolower(desc$get_authors())))
}
pkg_minimum_r_version <- function() {
deps <- proj_desc()$get_deps()
r_dep <- deps[deps$package == "R" & deps$type == "Depends", "version"]
if (length(r_dep) == 0) {
return(numeric_version("0"))
}
numeric_version(gsub("[^0-9.]", "", r_dep))
}
# Borrowed from pak, but modified also retain user's non-cran repos:
# https://github.com/r-lib/pak/blob/168ab5d58fc244e5084c2800c87b8a574d66c3ba/R/default-cran-mirror.R
default_cran_mirror <- function() {
repos <- getOption("repos")
cran <- repos["CRAN"]
if (is.null(cran) || is.na(cran) || cran == "@CRAN@") {
repos["CRAN"] <- "https://cloud.r-project.org"
}
repos
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/release.R
|
#' Automatically rename paired `R/` and `test/` files
#'
#' @description
#' * Moves `R/{old}.R` to `R/{new}.R`
#' * Moves `src/{old}.*` to `src/{new}.*`
#' * Moves `tests/testthat/test-{old}.R` to `tests/testthat/test-{new}.R`
#' * Moves `tests/testthat/test-{old}-*.*` to `tests/testthat/test-{new}-*.*`
#' and updates paths in the test file.
#' * Removes `context()` calls from the test file, which are unnecessary
#' (and discouraged) as of testthat v2.1.0.
#'
#' This is a potentially dangerous operation, so you must be using Git in
#' order to use this function.
#'
#' @param old,new Old and new file names (with or without extensions).
#' @export
rename_files <- function(old, new) {
check_uses_git()
old <- path_ext_remove(old)
new <- path_ext_remove(new)
# R/ ------------------------------------------------------------------------
r_old_path <- proj_path("R", old, ext = "R")
r_new_path <- proj_path("R", new, ext = "R")
if (file_exists(r_old_path)) {
ui_done("Moving {ui_path(r_old_path)} to {ui_path(r_new_path)}")
file_move(r_old_path, r_new_path)
}
# src/ ------------------------------------------------------------------------
if (dir_exists(proj_path("src"))) {
src_old <- dir_ls(proj_path("src"), glob = glue("*/src/{old}.*"))
src_new_file <- gsub(glue("^{old}"), glue("{new}"), path_file(src_old))
src_new <- path(path_dir(src_old), src_new_file)
if (length(src_old) > 1) {
ui_done("Moving {ui_path(src_old)} to {ui_path(src_new)}")
file_move(src_old, src_new)
}
}
# tests/testthat/ ------------------------------------------------------------
if (!uses_testthat()) {
return(invisible())
}
rename_test <- function(path) {
file <- gsub(glue("^test-{old}"), glue("test-{new}"), path_file(path))
file <- gsub(glue("^{old}.md"), glue("{new}.md"), file)
path(path_dir(path), file)
}
old_test <- dir_ls(
proj_path("tests", "testthat"),
glob = glue("*/test-{old}*")
)
new_test <- rename_test(old_test)
if (length(old_test) > 0) {
ui_done("Moving {ui_path(old_test)} to {ui_path(new_test)}")
file_move(old_test, new_test)
}
snaps_dir <- proj_path("tests", "testthat", "_snaps")
if (dir_exists(snaps_dir)) {
old_snaps <- dir_ls(snaps_dir, glob = glue("*/{old}.md"))
if (length(old_snaps) > 0) {
new_snaps <- rename_test(old_snaps)
ui_done("Moving {ui_path(old_snaps)} to {ui_path(new_snaps)}")
file_move(old_snaps, new_snaps)
}
}
# tests/testthat/test-{new}.R ------------------------------------------------
test_path <- proj_path("tests", "testthat", glue("test-{new}"), ext = "R")
if (!file_exists(test_path)) {
return(invisible())
}
lines <- read_utf8(test_path)
# Remove old context lines
context <- grepl("context\\(.*\\)", lines)
if (any(context)) {
ui_done("Removing call to {ui_code('context()')}")
lines <- lines[!context]
if (lines[[1]] == "") {
lines <- lines[-1]
}
}
old_test <- old_test[new_test != test_path]
new_test <- new_test[new_test != test_path]
if (length(old_test) > 0) {
ui_done("Updating paths in {ui_path(test_path)}")
for (i in seq_along(old_test)) {
lines <- gsub(path_file(old_test[[i]]), path_file(new_test[[i]]), lines, fixed = TRUE)
}
}
write_utf8(test_path, lines)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/rename-files.R
|
#' Reverse dependency checks
#'
#' Performs set up for checking the reverse dependencies of an R package, as
#' implemented by the revdepcheck package:
#' * Creates `revdep/` directory and adds it to `.Rbuildignore`
#' * Populates `revdep/.gitignore` to prevent tracking of various revdep
#' artefacts
#' * Prompts user to run the checks with `revdepcheck::revdep_check()`
#'
#' @export
use_revdep <- function() {
check_is_package("use_revdep()")
use_directory("revdep", ignore = TRUE)
use_git_ignore(
directory = "revdep",
c(
"checks", "library",
"checks.noindex", "library.noindex", "cloud.noindex",
"data.sqlite", "*.html"
)
)
ui_todo("Run checks with {ui_code('revdepcheck::revdep_check(num_workers = 4)')}")
invisible()
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/revdep.R
|
#' Add an RMarkdown Template
#'
#' Adds files and directories necessary to add a custom rmarkdown template to
#' RStudio. It creates:
#' * `inst/rmarkdown/templates/{{template_dir}}`. Main directory.
#' * `skeleton/skeleton.Rmd`. Your template Rmd file.
#' * `template.yml` with basic information filled in.
#'
#' @param template_name The name as printed in the template menu.
#' @param template_dir Name of the directory the template will live in within
#' `inst/rmarkdown/templates`. If none is provided by the user, it will be
#' created from `template_name`.
#' @param template_description Sets the value of `description` in
#' `template.yml`.
#' @param template_create_dir Sets the value of `create_dir` in `template.yml`.
#'
#' @export
#' @examples
#' \dontrun{
#' use_rmarkdown_template()
#' }
use_rmarkdown_template <- function(template_name = "Template Name",
template_dir = NULL,
template_description = "A description of the template",
template_create_dir = FALSE) {
# Process some of the inputs
template_dir <- template_dir %||% tolower(asciify(template_name))
template_create_dir <- as.character(template_create_dir)
template_dir <- path("inst", "rmarkdown", "templates", template_dir)
# Scaffold files
use_directory(path(template_dir, "skeleton"))
use_template(
"rmarkdown-template.yml",
data = list(
template_dir = template_dir,
template_name = template_name,
template_description = template_description,
template_create_dir = template_create_dir
),
save_as = path(template_dir, "template.yaml")
)
use_template(
"rmarkdown-template.Rmd",
path(template_dir, "skeleton", "skeleton.Rmd")
)
invisible(TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/rmarkdown.R
|
#' Use roxygen2 with markdown
#'
#' If you are already using roxygen2, but not with markdown, you'll need to use
#' [roxygen2md](https://roxygen2md.r-lib.org) to convert existing Rd expressions
#' to markdown. The conversion is not perfect, so make sure to check the
#' results.
#'
#' @param overwrite Whether to overwrite an existing `Roxygen` field in
#' `DESCRIPTION` with `"list(markdown = TRUE)"`.
#'
#'
#' @export
use_roxygen_md <- function(overwrite = FALSE) {
check_installed("roxygen2")
if (!uses_roxygen()) {
roxy_ver <- as.character(utils::packageVersion("roxygen2"))
proj_desc_field_update("Roxygen", "list(markdown = TRUE)", overwrite = FALSE)
proj_desc_field_update("RoxygenNote", roxy_ver, overwrite = FALSE)
ui_todo("Run {ui_code('devtools::document()')}")
return(invisible())
}
already_setup <- uses_roxygen_md()
if (isTRUE(already_setup)) {
return(invisible())
}
if (isFALSE(already_setup) || isTRUE(overwrite)) {
proj_desc_field_update("Roxygen", "list(markdown = TRUE)", overwrite = TRUE)
check_installed("roxygen2md")
ui_todo("
Run {ui_code('roxygen2md::roxygen2md()')} to convert existing Rd \\
comments to markdown")
if (!uses_git()) {
ui_todo("
Consider using Git for greater visibility into and control over the \\
conversion process")
}
ui_todo("Run {ui_code('devtools::document()')} when you're done")
return(invisible())
}
ui_stop("
{ui_path('DESCRIPTION')} already has a {ui_field('Roxygen')} field
Delete it and try again or call {ui_code('use_roxygen_md(overwrite = TRUE)')}")
invisible()
}
# FALSE: no Roxygen field
# TRUE: plain old "list(markdown = TRUE)"
# NA: everything else
uses_roxygen_md <- function() {
desc <- proj_desc()
if (!desc$has_fields("Roxygen")) {
return(FALSE)
}
roxygen <- desc$get_field("Roxygen", "")
if (identical(roxygen, "list(markdown = TRUE)") ||
identical(roxygen, "list(markdown = TRUE, r6 = FALSE)")) {
TRUE
} else {
NA
}
}
uses_roxygen <- function() {
proj_desc()$has_fields("RoxygenNote")
}
roxygen_ns_append <- function(tag) {
block_append(
glue("{ui_value(tag)}"),
glue("#' {tag}"),
path = proj_path(package_doc_path()),
block_start = "## usethis namespace: start",
block_end = "## usethis namespace: end",
block_suffix = "NULL",
sort = TRUE
)
}
roxygen_ns_show <- function() {
block_show(
path = proj_path(package_doc_path()),
block_start = "## usethis namespace: start",
block_end = "## usethis namespace: end"
)
}
roxygen_remind <- function() {
ui_todo("Run {ui_code('devtools::document()')} to update {ui_path('NAMESPACE')}")
TRUE
}
roxygen_update_ns <- function(load = is_interactive()) {
ui_done("Writing {ui_path('NAMESPACE')}")
utils::capture.output(
suppressMessages(roxygen2::roxygenise(proj_get(), "namespace"))
)
if (load) {
ui_done("Loading {project_name()}")
pkgload::load_all(path = proj_get(), quiet = TRUE)
}
TRUE
}
# Checkers ----------------------------------------------------------------
check_uses_roxygen <- function(whos_asking) {
force(whos_asking)
if (uses_roxygen()) {
return(invisible())
}
ui_stop(
"
Project {ui_value(project_name())} does not use roxygen2.
{ui_code(whos_asking)} can not work without it.
You might just need to run {ui_code('devtools::document()')} once, then try again.
"
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/roxygen.R
|
#' Helpers to make useful changes to `.Rprofile`
#'
#' @description
#' All functions open your `.Rprofile` and give you the code you need to
#' paste in.
#'
#' * `use_devtools()`: makes devtools available in interactive sessions.
#' * `use_usethis()`: makes usethis available in interactive sessions.
#' * `use_reprex()`: makes reprex available in interactive sessions.
#' * `use_conflicted()`: makes conflicted available in interactive sessions.
#' * `use_partial_warnings()`: warns on partial matches.
#'
#' @name rprofile-helper
NULL
#' @rdname rprofile-helper
#' @export
use_conflicted <- function() {
use_rprofile_package("conflicted")
}
#' @rdname rprofile-helper
#' @export
use_reprex <- function() {
use_rprofile_package("reprex")
}
#' @rdname rprofile-helper
#' @export
use_usethis <- function() {
use_rprofile_package("usethis")
}
#' @rdname rprofile-helper
#' @export
use_devtools <- function() {
use_rprofile_package("devtools")
}
use_rprofile_package <- function(package) {
check_installed(package)
ui_todo(
"Include this code in {ui_value('.Rprofile')} to make \\
{ui_field(package)} available in all interactive sessions."
)
ui_code_block(
"
if (interactive()) {{
suppressMessages(require({package}))
}}
"
)
edit_r_profile("user")
}
#' @rdname rprofile-helper
#' @export
use_partial_warnings <- function() {
ui_todo(
"Include this code in {ui_path('.Rprofile')} to warn on partial matches."
)
ui_code_block(
"
options(
warnPartialMatchArgs = TRUE,
warnPartialMatchDollar = TRUE,
warnPartialMatchAttr = TRUE
)
"
)
edit_r_profile("user")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/rprofile.R
|
#' Add RStudio Project infrastructure
#'
#' It is likely that you want to use [create_project()] or [create_package()]
#' instead of `use_rstudio()`! Both `create_*()` functions can add RStudio
#' Project infrastructure to a pre-existing project or package. `use_rstudio()`
#' is mostly for internal use or for those creating a usethis-like package for
#' their organization. It does the following in the current project, often after
#' executing `proj_set(..., force = TRUE)`:
#' * Creates an `.Rproj` file
#' * Adds RStudio files to `.gitignore`
#' * Adds RStudio files to `.Rbuildignore`, if project is a package
#'
#' @param line_ending Line ending
#' @param reformat If `TRUE`, the `.Rproj` is setup with common options that
#' reformat files on save: adding a trailing newline, trimming trailing
#' whitespace, and setting the line-ending. This is best practice for
#' new projects.
#'
#' If `FALSE`, these options are left unset, which is more appropriate when
#' you're contributing to someone else's project that does not have its own
#' `.Rproj` file.
#' @export
use_rstudio <- function(line_ending = c("posix", "windows"), reformat = TRUE) {
line_ending <- arg_match(line_ending)
line_ending <- c("posix" = "Posix", "windows" = "Windows")[[line_ending]]
rproj_file <- paste0(project_name(), ".Rproj")
new <- use_template(
"template.Rproj",
save_as = rproj_file,
data = list(
line_ending = line_ending,
is_pkg = is_package(),
reformat = reformat
),
ignore = is_package()
)
use_git_ignore(".Rproj.user")
if (is_package()) {
use_build_ignore(".Rproj.user")
}
invisible(new)
}
#' Don't save/load user workspace between sessions
#'
#' R can save and reload the user's workspace between sessions via an `.RData`
#' file in the current directory. However, long-term reproducibility is enhanced
#' when you turn this feature off and clear R's memory at every restart.
#' Starting with a blank slate provides timely feedback that encourages the
#' development of scripts that are complete and self-contained. More detail can
#' be found in the blog post [Project-oriented
#' workflow](https://www.tidyverse.org/blog/2017/12/workflow-vs-script/).
#'
#' @inheritParams edit
#'
#' @export
use_blank_slate <- function(scope = c("user", "project")) {
scope <- match.arg(scope)
if (scope == "user") {
use_rstudio_preferences(
save_workspace = "never",
load_workspace = FALSE
)
} else {
rproj_fields <- modify_rproj(
rproj_path(),
list(RestoreWorkspace = "No", SaveWorkspace = "No")
)
write_utf8(rproj_path(), serialize_rproj(rproj_fields))
restart_rstudio("Restart RStudio with a blank slate?")
}
invisible()
}
# Is base_path an RStudio Project or inside an RStudio Project?
is_rstudio_project <- function(base_path = proj_get()) {
length(rproj_paths(base_path)) == 1
}
rproj_paths <- function(base_path, recurse = FALSE) {
dir_ls(base_path, regexp = "[.]Rproj$", recurse = recurse)
}
# Return path to single .Rproj or die trying
rproj_path <- function(base_path = proj_get(), call = caller_env()) {
rproj <- rproj_paths(base_path)
if (length(rproj) == 1) {
rproj
} else if (length(rproj) == 0) {
name <- project_name(base_path)
cli::cli_abort("{.val {name}} is not an RStudio Project.", call = call)
} else {
name <- project_name(base_path)
cli::cli_abort(
c(
"{.val {name}} must contain a single .Rproj file.",
i = "Found {.file {path_rel(rproj, base_path)}}."
),
call = call
)
}
}
# Is base_path open in RStudio?
in_rstudio <- function(base_path = proj_get()) {
if (!rstudio_available()) {
return(FALSE)
}
if (!rstudioapi::hasFun("getActiveProject")) {
return(FALSE)
}
proj <- rstudioapi::getActiveProject()
if (is.null(proj)) {
return(FALSE)
}
path_real(proj) == path_real(base_path)
}
# So we can override the default with a mock
rstudio_available <- function() {
rstudioapi::isAvailable()
}
in_rstudio_server <- function() {
if (!rstudio_available()) {
return(FALSE)
}
identical(rstudioapi::versionInfo()$mode, "server")
}
parse_rproj <- function(file) {
lines <- as.list(read_utf8(file))
has_colon <- grepl(":", lines)
fields <- lapply(lines[has_colon], function(x) strsplit(x, split = ": ")[[1]])
lines[has_colon] <- vapply(fields, `[[`, "character", 2)
names(lines)[has_colon] <- vapply(fields, `[[`, "character", 1)
names(lines)[!has_colon] <- ""
lines
}
modify_rproj <- function(file, update) {
utils::modifyList(parse_rproj(file), update)
}
serialize_rproj <- function(fields) {
named <- nzchar(names(fields))
as.character(ifelse(named, paste0(names(fields), ": ", fields), fields))
}
# Must be last command run
restart_rstudio <- function(message = NULL) {
if (!in_rstudio(proj_get())) {
return(FALSE)
}
if (!is_interactive()) {
return(FALSE)
}
if (!is.null(message)) {
ui_todo(message)
}
if (!rstudioapi::hasFun("openProject")) {
return(FALSE)
}
if (ui_nope("Restart now?")) {
return(FALSE)
}
rstudioapi::openProject(proj_get())
}
rstudio_git_tickle <- function() {
if (rstudioapi::hasFun("executeCommand")) {
rstudioapi::executeCommand("vcsRefresh")
}
invisible()
}
rstudio_config_path <- function(...) {
if (is_windows()) {
# https://github.com/r-lib/usethis/issues/1293
base <- rappdirs::user_config_dir("RStudio", appauthor = NULL)
} else {
# RStudio only uses windows/unix conventions, not mac
base <- rappdirs::user_config_dir("rstudio", os = "unix")
}
path(base, ...)
}
#' Set global RStudio preferences
#'
#' This function allows you to set global RStudio preferences, achieving the
#' same effect programmatically as clicking buttons in RStudio's Global Options.
#' You can find a list of configurable properties at
#' <https://docs.posit.co/ide/server-pro/reference/session_user_settings.html>.
#'
#' @export
#' @param ... <[`dynamic-dots`][rlang::dyn-dots]> Property-value pairs.
#' @return A named list of the previous values, invisibly.
use_rstudio_preferences <- function(...) {
new <- dots_list(..., .homonyms = "last")
if (length(new) > 0 && !is_named(new)) {
cli::cli_abort("All arguments in {.arg ...} must be named.")
}
json <- rstudio_prefs_read()
old <- json[names(new)]
for (name in names(new)) {
val <- new[[name]]
if (identical(json[[name]], val)) {
next
}
ui_done("Setting RStudio preference {ui_field(name)} to {ui_value(val)}.")
json[[name]] <- val
}
rstudio_prefs_write(json)
invisible(old)
}
rstudio_prefs_read <- function() {
path <- rstudio_config_path("rstudio-prefs.json")
if (file_exists(path)) {
jsonlite::read_json(path)
} else {
list()
}
}
rstudio_prefs_write <- function(json) {
path <- rstudio_config_path("rstudio-prefs.json")
create_directory(path_dir(path))
jsonlite::write_json(json, path, auto_unbox = TRUE, pretty = TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/rstudio.R
|
#' Report working directory and usethis/RStudio project
#'
#' @description `proj_sitrep()` reports
#' * current working directory
#' * the active usethis project
#' * the active RStudio Project
#'
#' @description Call this function if things seem weird and you're not sure
#' what's wrong or how to fix it. Usually, all three of these should coincide
#' (or be unset) and `proj_sitrep()` provides suggested commands for getting
#' back to this happy state.
#'
#' @return A named list, with S3 class `sitrep` (for printing purposes),
#' reporting current working directory, active usethis project, and active
#' RStudio Project
#' @export
#' @family project functions
#' @examples
#' proj_sitrep()
proj_sitrep <- function() {
out <- list(
working_directory = getwd(),
active_usethis_proj = if (proj_active()) proj_get(),
active_rstudio_proj = if (rstudioapi::hasFun("getActiveProject")) {
rstudioapi::getActiveProject()
}
## TODO(?): address home directory to help clarify fs issues on Windows?
## home_usethis = fs::path_home(),
## home_r = normalizePath("~")
)
out <- ifelse(map_lgl(out, is.null), out, as.character(path_tidy(out)))
structure(out, class = "sitrep")
}
#' @export
print.sitrep <- function(x, ...) {
keys <- format(names(x), justify = "right")
purrr::walk2(keys, x, kv_line)
rstudio_proj_is_active <- !is.null(x[["active_rstudio_proj"]])
usethis_proj_is_active <- !is.null(x[["active_usethis_proj"]])
rstudio_proj_is_not_wd <- rstudio_proj_is_active &&
x[["working_directory"]] != x[["active_rstudio_proj"]]
usethis_proj_is_not_wd <- usethis_proj_is_active &&
x[["working_directory"]] != x[["active_usethis_proj"]]
usethis_proj_is_not_rstudio_proj <- usethis_proj_is_active &&
rstudio_proj_is_active &&
x[["active_rstudio_proj"]] != x[["active_usethis_proj"]]
if (rstudio_available() && !rstudio_proj_is_active) {
ui_todo(
"
You are working in RStudio, but are not in an RStudio Project.
A Project-based workflow offers many advantages. Read more at:
{ui_field('https://support.rstudio.com/hc/en-us/articles/200526207-Using-Projects')}
{ui_field('https://whattheyforgot.org/project-oriented-workflow.html')}
"
)
}
if (!usethis_proj_is_active) {
ui_todo(
"
There is currently no active usethis project.
usethis attempts to activate a project upon first need.
Call {ui_code('proj_get()')} to initiate project discovery.
Call {ui_code('proj_set(\"path/to/project\")')} or \\
{ui_code('proj_activate(\"path/to/project\")')} to provide
an explicit path.
"
)
}
if (usethis_proj_is_not_wd) {
ui_todo(
"
Your working directory is not the same as the active usethis project.
Set working directory to the project: {ui_code('setwd(proj_get())')}
Set project to working directory: {ui_code('proj_set(getwd())')}
"
)
}
if (rstudio_proj_is_not_wd) {
ui_todo(
"
Your working directory is not the same as the active RStudio Project.
Set working directory to the Project: {ui_code('setwd(rstudioapi::getActiveProject())')}
"
)
}
if (usethis_proj_is_not_rstudio_proj) {
ui_todo(
"
Your active RStudio Project is not the same as the active usethis project.
Set usethis project to RStudio Project: \\
{ui_code('proj_set(rstudioapi::getActiveProject())')}
Restart RStudio in the usethis project: \\
{ui_code('rstudioapi::openProject(proj_get())')}
Open the usethis project in a new instance of RStudio: \\
{ui_code('proj_activate(proj_get())')}
"
)
}
invisible(x)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/sitrep.R
|
#' Use spell check
#'
#' Adds a unit test to automatically run a spell check on documentation and,
#' optionally, vignettes during `R CMD check`, using the
#' [spelling][spelling::spell_check_package] package. Also adds a `WORDLIST`
#' file to the package, which is a dictionary of whitelisted words. See
#' [spelling::wordlist] for details.
#'
#' @param vignettes Logical, `TRUE` to spell check all `rmd` and `rnw` files in
#' the `vignettes/` folder.
#' @param lang Preferred spelling language. Usually either `"en-US"` or
#' `"en-GB"`.
#' @param error Logical, indicating whether the unit test should fail if
#' spelling errors are found. Defaults to `FALSE`, which does not error, but
#' prints potential spelling errors
#' @export
use_spell_check <- function(vignettes = TRUE,
lang = "en-US",
error = FALSE) {
check_is_package("use_spell_check()")
check_installed("spelling")
use_dependency("spelling", "Suggests")
proj_desc_field_update("Language", lang, overwrite = TRUE)
spelling::spell_check_setup(
pkg = proj_get(), vignettes = vignettes, lang = lang, error = error
)
ui_todo("Run {ui_code('devtools::check()')} to trigger spell check")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/spelling.R
|
#' Use a usethis-style template
#'
#' Creates a file from data and a template found in a package. Provides control
#' over file name, the addition to `.Rbuildignore`, and opening the file for
#' inspection.
#'
#' This function can be used as the engine for a templating function in other
#' packages. The `template` argument is used along with the `package` argument
#' to derive the path to your template file; it will be expected at
#' `fs::path_package(package = package, "templates", template)`. We use
#' `fs::path_package()` instead of `base::system.file()` so that path
#' construction works even in a development workflow, e.g., works with
#' `devtools::load_all()` or `pkgload::load_all()`. *Note this describes the
#' behaviour of `fs::path_package()` in fs v1.2.7.9001 and higher.*
#'
#' To interpolate your data into the template, supply a list using
#' the `data` argument. Internally, this function uses
#' [whisker::whisker.render()] to combine your template file with your data.
#'
#' @param template Path to template file relative to `templates/` directory
#' within `package`; see details.
#' @param save_as Path of file to create, relative to root of active project.
#' Defaults to `template`
#' @param data A list of data passed to the template.
#' @param ignore Should the newly created file be added to `.Rbuildignore`?
#' @param open Open the newly created file for editing? Happens in RStudio, if
#' applicable, or via [utils::file.edit()] otherwise.
#' @param package Name of the package where the template is found.
#' @return A logical vector indicating if file was modified.
#' @export
#' @examples
#' \dontrun{
#' # Note: running this will write `NEWS.md` to your working directory
#' use_template(
#' template = "NEWS.md",
#' data = list(Package = "acme", Version = "1.2.3"),
#' package = "usethis"
#' )
#' }
use_template <- function(template,
save_as = template,
data = list(),
ignore = FALSE,
open = FALSE,
package = "usethis") {
template_contents <- render_template(template, data, package = package)
new <- write_over(proj_path(save_as), template_contents)
if (ignore) {
use_build_ignore(save_as)
}
if (open && new) {
edit_file(proj_path(save_as))
}
invisible(new)
}
render_template <- function(template, data = list(), package = "usethis") {
template_path <- find_template(template, package = package)
strsplit(whisker::whisker.render(read_utf8(template_path), data), "\n")[[1]]
}
find_template <- function(template_name, package = "usethis") {
check_installed(package)
path <- tryCatch(
path_package(package = package, "templates", template_name),
error = function(e) ""
)
if (identical(path, "")) {
ui_stop(
"Could not find template {ui_value(template_name)} \\
in package {ui_value(package)}."
)
}
path
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/template.R
|
#' Sets up overall testing infrastructure
#'
#' Creates `tests/testthat/`, `tests/testthat.R`, and adds the testthat package
#' to the Suggests field. Learn more in <https://r-pkgs.org/testing-basics.html>
#'
#' @param edition testthat edition to use. Defaults to the latest edition, i.e.
#' the major version number of the currently installed testthat.
#' @param parallel Should tests be run in parallel? This feature appeared in
#' testthat 3.0.0; see <https://testthat.r-lib.org/articles/parallel.html> for
#' details and caveats.
#' @seealso [use_test()] to create individual test files
#' @export
#' @examples
#' \dontrun{
#' use_testthat()
#'
#' use_test()
#'
#' use_test("something-management")
#' }
use_testthat <- function(edition = NULL, parallel = FALSE) {
use_testthat_impl(edition, parallel = parallel)
ui_todo(
"Call {ui_code('use_test()')} to initialize a basic test file and open it \\
for editing."
)
}
use_testthat_impl <- function(edition = NULL, parallel = FALSE) {
check_installed("testthat")
if (utils::packageVersion("testthat") < "2.1.0") {
ui_stop("testthat 2.1.0 or greater needed. Please install before re-trying")
}
if (is_package()) {
edition <- check_edition(edition)
use_dependency("testthat", "Suggests", paste0(edition, ".0.0"))
proj_desc_field_update("Config/testthat/edition", as.character(edition), overwrite = TRUE)
if (parallel) {
proj_desc_field_update("Config/testthat/parallel", "true", overwrite = TRUE)
} else {
proj_desc()$del("Config/testthat/parallel")
}
} else {
if (!is.null(edition)) {
ui_stop("Can't declare testthat edition outside of a package")
}
}
use_directory(path("tests", "testthat"))
use_template(
"testthat.R",
save_as = path("tests", "testthat.R"),
data = list(name = project_name())
)
}
check_edition <- function(edition = NULL) {
version <- utils::packageVersion("testthat")[[1, c(1, 2)]]
if (version[[2]] == "99") {
version <- version[[1]] + 1L
} else {
version <- version[[1]]
}
if (is.null(edition)) {
version
} else {
if (!is.numeric(edition) || length(edition) != 1) {
ui_stop("`edition` must be a single number")
}
if (edition > version) {
vers <- utils::packageVersion("testthat")
ui_stop("`edition` ({edition}) not available in installed testthat ({vers})")
}
as.integer(edition)
}
}
uses_testthat <- function() {
paths <- proj_path(c(path("inst", "tests"), path("tests", "testthat")))
any(dir_exists(paths))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/test.R
|
#' Prepare to return a tibble
#'
#' @description
#'
#' `r lifecycle::badge("questioning")`
#'
#' Does minimum setup such that a tibble returned by your package
#' is handled using the tibble method for generics like `print()` or \code{[}.
#' Presumably you care about this if you've chosen to store and expose an
#' object with class `tbl_df`. Specifically:
#' * Check that the active package uses roxygen2
#' * Add the tibble package to "Imports" in `DESCRIPTION`
#' * Prepare the roxygen directive necessary to import at least one function
#' from tibble:
#' - If possible, the directive is inserted into existing package-level
#' documentation, i.e. the roxygen snippet created by [use_package_doc()]
#' - Otherwise, we issue advice on where the user should add the directive
#'
#' This is necessary when your package returns a stored data object that has
#' class `tbl_df`, but the package code does not make direct use of functions
#' from the tibble package. If you do nothing, the tibble namespace is not
#' necessarily loaded and your tibble may therefore be printed and subsetted
#' like a base `data.frame`.
#'
#' @export
#' @examples
#' \dontrun{
#' use_tibble()
#' }
use_tibble <- function() {
check_is_package("use_tibble()")
check_uses_roxygen("use_tibble()")
created <- use_import_from("tibble", "tibble")
ui_todo("Document a returned tibble like so:")
ui_code_block("#' @return a [tibble][tibble::tibble-package]", copy = FALSE)
invisible(created)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/tibble.R
|
#' Helpers for tidyverse development
#'
#' These helpers follow tidyverse conventions which are generally a little
#' stricter than the defaults, reflecting the need for greater rigor in
#' commonly used packages.
#'
#' @details
#'
#' * `create_tidy_package()`: creates a new package, immediately applies as many
#' of the tidyverse conventions as possible, issues a few reminders, and
#' activates the new package.
#'
#' * `use_tidy_dependencies()`: sets up standard dependencies used by all
#' tidyverse packages (except packages that are designed to be dependency free).
#'
#' * `use_tidy_description()`: puts fields in standard order and alphabetises
#' dependencies.
#'
#' * `use_tidy_eval()`: imports a standard set of helpers to facilitate
#' programming with the tidy eval toolkit.
#'
#' * `use_tidy_style()`: styles source code according to the [tidyverse style
#' guide](https://style.tidyverse.org). This function will overwrite files! See
#' below for usage advice.
#'
#' * `use_tidy_contributing()`: adds standard tidyverse contributing guidelines.
#'
#' * `use_tidy_issue_template()`: adds a standard tidyverse issue template.
#'
#' * `use_tidy_release_test_env()`: updates the test environment section in
#' `cran-comments.md`.
#'
#' * `use_tidy_support()`: adds a standard description of support resources for
#' the tidyverse.
#'
#' * `use_tidy_coc()`: equivalent to `use_code_of_conduct()`, but puts the
#' document in a `.github/` subdirectory.
#'
#' * `use_tidy_github()`: convenience wrapper that calls
#' `use_tidy_contributing()`, `use_tidy_issue_template()`, `use_tidy_support()`,
#' `use_tidy_coc()`.
#'
#' * [use_tidy_github_labels()] calls `use_github_labels()` to implement
#' tidyverse conventions around GitHub issue label names and colours.
#'
#' * `use_tidy_upkeep_issue()` creates an issue containing a checklist of
#' actions to bring your package up to current tidyverse standards.
#'
#' * `use_tidy_logo()` calls `use_logo()` on the appropriate hex sticker PNG
#' file at <https://github.com/rstudio/hex-stickers>.
#'
#' @section `use_tidy_style()`:
#' Uses the [styler package](https://styler.r-lib.org) package to style all code
#' in a package, project, or directory, according to the [tidyverse style
#' guide](https://style.tidyverse.org).
#'
#' **Warning:** This function will overwrite files! It is strongly suggested to
#' only style files that are under version control or to first create a backup
#' copy.
#'
#' Invisibly returns a data frame with one row per file, that indicates whether
#' styling caused a change.
#'
#' @param strict Boolean indicating whether or not a strict version of styling
#' should be applied. See [styler::tidyverse_style()] for details.
#'
#' @name tidyverse
NULL
#' @export
#' @rdname tidyverse
#' @inheritParams create_package
#' @inheritParams licenses
create_tidy_package <- function(path, copyright_holder = NULL) {
path <- create_package(path, rstudio = TRUE, open = FALSE)
local_project(path)
use_testthat()
use_mit_license(copyright_holder)
use_tidy_description()
use_readme_rmd(open = FALSE)
use_lifecycle_badge("experimental")
use_cran_badge()
use_cran_comments(open = FALSE)
ui_todo("In the new package, remember to do:")
ui_todo("{ui_code('use_git()')}")
ui_todo("{ui_code('use_github()')}")
ui_todo("{ui_code('use_tidy_github()')}")
ui_todo("{ui_code('use_tidy_github_actions()')}")
ui_todo("{ui_code('use_tidy_github_labels()')}")
ui_todo("{ui_code('use_pkgdown_github_pages()')}")
proj_activate(path)
}
#' @export
#' @rdname tidyverse
use_tidy_description <- function() {
desc <- proj_desc()
tidy_desc(desc)
desc$write()
invisible(TRUE)
}
#' @export
#' @rdname tidyverse
use_tidy_dependencies <- function() {
check_has_package_doc("use_tidy_dependencies()")
use_dependency("rlang", "Imports")
use_dependency("lifecycle", "Imports")
use_dependency("cli", "Imports")
use_dependency("glue", "Imports")
use_dependency("withr", "Imports")
# standard imports
imports <- any(
roxygen_ns_append("@import rlang"),
roxygen_ns_append("@importFrom glue glue"),
roxygen_ns_append("@importFrom lifecycle deprecated")
)
if (imports) {
roxygen_update_ns()
}
# add badges; we don't need the details
ui_silence(use_lifecycle())
# If needed, copy in lightweight purrr compatibility layer
if (!proj_desc()$has_dep("purrr")) {
use_directory("R")
use_standalone("r-lib/rlang", "purrr")
}
invisible()
}
#' @export
#' @rdname tidyverse
use_tidy_contributing <- function() {
use_dot_github()
data <- list(
Package = project_name(),
github_spec = target_repo_spec(ask = FALSE)
)
use_template(
"tidy-contributing.md",
path(".github", "CONTRIBUTING.md"),
data = data
)
}
#' @export
#' @rdname tidyverse
use_tidy_support <- function() {
use_dot_github()
data <- list(
Package = project_name(),
github_spec = target_repo_spec(ask = FALSE)
)
use_template(
"tidy-support.md",
path(".github", "SUPPORT.md"),
data = data
)
}
#' @export
#' @rdname tidyverse
use_tidy_issue_template <- function() {
use_dot_github()
use_directory(path(".github", "ISSUE_TEMPLATE"))
use_template(
"tidy-issue.md",
path(".github", "ISSUE_TEMPLATE", "issue_template.md")
)
}
#' @export
#' @rdname tidyverse
use_tidy_coc <- function() {
old_top_level_coc <- proj_path(c("CODE_OF_CONDUCT.md", "CONDUCT.md"))
old <- file_exists(old_top_level_coc)
if (any(old)) {
file_delete(old_top_level_coc[old])
}
use_dot_github()
use_coc(contact = "[email protected]", path = ".github")
}
#' @export
#' @rdname tidyverse
use_tidy_github <- function() {
use_dot_github()
use_tidy_contributing()
use_tidy_issue_template()
use_tidy_support()
use_tidy_coc()
}
use_dot_github <- function(ignore = TRUE) {
use_directory(".github", ignore = ignore)
use_git_ignore("*.html", directory = ".github")
}
#' @export
#' @rdname tidyverse
use_tidy_style <- function(strict = TRUE) {
check_installed("styler")
challenge_uncommitted_changes(msg = "
There are uncommitted changes and it is highly recommended to get into a \\
clean Git state before restyling your project's code")
if (is_package()) {
styled <- styler::style_pkg(
proj_get(),
style = styler::tidyverse_style,
strict = strict
)
} else {
styled <- styler::style_dir(
proj_get(),
style = styler::tidyverse_style,
strict = strict
)
}
ui_line()
ui_done("Styled project according to the tidyverse style guide")
invisible(styled)
}
#' Identify contributors via GitHub activity
#'
#' Derives a list of GitHub usernames, based on who has opened issues or pull
#' requests. Used to populate the acknowledgment section of package release blog
#' posts at <https://www.tidyverse.org/blog/>. If no arguments are given, we
#' retrieve all contributors to the active project since its last (GitHub)
#' release. Unexported helper functions, `releases()` and `ref_df()` can be
#' useful interactively to get a quick look at release tag names and a data
#' frame about refs (defaulting to releases), respectively.
#'
#' @param repo_spec Optional GitHub repo specification in any form accepted for
#' the `repo_spec` argument of [create_from_github()] (plain spec or a browser
#' or Git URL). A URL specification is the only way to target a GitHub host
#' other than `"github.com"`, which is the default.
#' @param from,to GitHub ref (i.e., a SHA, tag, or release) or a timestamp in
#' ISO 8601 format, specifying the start or end of the interval of interest,
#' in the sense of `[from, to]`. Examples: "08a560d", "v1.3.0",
#' "2018-02-24T00:13:45Z", "2018-05-01". When `from = NULL, to = NULL`, we set
#' `from` to the timestamp of the most recent (GitHub) release. Otherwise,
#' `NULL` means "no bound".
#'
#' @return A character vector of GitHub usernames, invisibly.
#' @export
#'
#' @examples
#' \dontrun{
#' # active project, interval = since the last release
#' use_tidy_thanks()
#'
#' # active project, interval = since a specific datetime
#' use_tidy_thanks(from = "2020-07-24T00:13:45Z")
#'
#' # r-lib/usethis, interval = since a certain date
#' use_tidy_thanks("r-lib/usethis", from = "2020-08-01")
#'
#' # r-lib/usethis, up to a specific release
#' use_tidy_thanks("r-lib/usethis", from = NULL, to = "v1.1.0")
#'
#' # r-lib/usethis, since a specific commit, up to a specific date
#' use_tidy_thanks("r-lib/usethis", from = "08a560d", to = "2018-05-14")
#'
#' # r-lib/usethis, but with copy/paste of a browser URL
#' use_tidy_thanks("https://github.com/r-lib/usethis")
#' }
use_tidy_thanks <- function(repo_spec = NULL,
from = NULL,
to = NULL) {
repo_spec <- repo_spec %||% target_repo_spec()
parsed_repo_spec <- parse_repo_url(repo_spec)
repo_spec <- parsed_repo_spec$repo_spec
# this is the most practical way to propagate `host` to downstream helpers
if (!is.null(parsed_repo_spec$host)) {
withr::local_envvar(c(GITHUB_API_URL = parsed_repo_spec$host))
}
if (is.null(to)) {
from <- from %||% releases(repo_spec)[[1]]
}
from_timestamp <- as_timestamp(repo_spec, x = from) %||% "2008-01-01"
to_timestamp <- as_timestamp(repo_spec, x = to)
ui_done("
Looking for contributors from {as.Date(from_timestamp)} to \\
{to_timestamp %||% 'now'}")
res <- gh::gh(
"/repos/{owner}/{repo}/issues",
owner = spec_owner(repo_spec), repo = spec_repo(repo_spec),
since = from_timestamp,
state = "all",
filter = "all",
.limit = Inf
)
if (length(res) < 1) {
ui_oops("No matching issues/PRs found")
return(invisible())
}
creation_time <- function(x) {
as.POSIXct(map_chr(x, "created_at"))
}
res <- res[creation_time(res) >= as.POSIXct(from_timestamp)]
if (!is.null(to_timestamp)) {
res <- res[creation_time(res) <= as.POSIXct(to_timestamp)]
}
if (length(res) == 0) {
ui_line("No matching issues/PRs found.")
return(invisible())
}
contributors <- sort(unique(map_chr(res, c("user", "login"))))
contrib_link <- glue("[@{contributors}](https://github.com/{contributors})")
ui_done("Found {length(contributors)} contributors:")
ui_code_block(glue_collapse(contrib_link, sep = ", ", last = ", and ") + glue("."))
invisible(contributors)
}
## if x appears to be a timestamp, pass it through
## otherwise, assume it's a ref and look up its timestamp
as_timestamp <- function(repo_spec, x = NULL) {
if (is.null(x)) {
return(NULL)
}
as_POSIXct <- try(as.POSIXct(x), silent = TRUE)
if (inherits(as_POSIXct, "POSIXct")) {
return(x)
}
ui_done("Resolving timestamp for ref {ui_value(x)}")
ref_df(repo_spec, refs = x)$timestamp
}
## returns a data frame on GitHub refs, defaulting to all releases
ref_df <- function(repo_spec, refs = NULL) {
check_name(repo_spec)
check_character(refs, allow_null = TRUE)
refs <- refs %||% releases(repo_spec)
if (is.null(refs)) {
return(NULL)
}
get_thing <- function(thing) {
gh::gh(
"/repos/{owner}/{repo}/commits/{thing}",
owner = spec_owner(repo_spec), repo = spec_repo(repo_spec),
thing = thing
)
}
res <- lapply(refs, get_thing)
data.frame(
ref = refs,
sha = substr(map_chr(res, "sha"), 1, 7),
timestamp = map_chr(res, c("commit", "committer", "date")),
stringsAsFactors = FALSE
)
}
## returns character vector of release tag names
releases <- function(repo_spec) {
check_name(repo_spec)
res <- gh::gh(
"/repos/{owner}/{repo}/releases",
owner = spec_owner(repo_spec), repo = spec_repo(repo_spec)
)
if (length(res) < 1) {
return(NULL)
}
map_chr(res, "tag_name")
}
## approaches based on available.packages() and/or installed.packages() present
## several edge cases, requirements, and gotchas
## for this application, hard-wiring seems to be "good enough"
base_and_recommended <- function() {
# base_pkgs <- as.vector(installed.packages(priority = "base")[, "Package"])
# av <- available.packages()
# keep <- av[ , "Priority", drop = TRUE] %in% "recommended"
# rec_pkgs <- unname(av[keep, "Package", drop = TRUE])
# dput(sort(unique(c(base_pkgs, rec_pkgs))))
c(
"base", "boot", "class", "cluster", "codetools", "compiler",
"datasets", "foreign", "graphics", "grDevices", "grid", "KernSmooth",
"lattice", "MASS", "Matrix", "methods", "mgcv", "nlme", "nnet",
"parallel", "rpart", "spatial", "splines", "stats", "stats4",
"survival", "tcltk", "tools", "utils"
)
}
#' @rdname tidyverse
#' @inheritParams use_logo
#' @export
use_tidy_logo <- function(geometry = "240x278", retina = TRUE) {
if (!is_posit_pkg()) {
ui_stop("This function can only be used for Posit packages")
}
tf <- withr::local_tempfile(fileext = ".png")
gh::gh(
"/repos/rstudio/hex-stickers/contents/PNG/{pkg}.png/",
pkg = project_name(),
.destfile = tf,
.accept = "application/vnd.github.v3.raw"
)
use_logo(tf, geometry = geometry, retina = retina)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/tidyverse.R
|
#' Create a learnr tutorial
#'
#' Creates a new tutorial below `inst/tutorials/`. Tutorials are interactive R
#' Markdown documents built with the [`learnr`
#' package](https://rstudio.github.io/learnr/index.html). `use_tutorial()` does
#' this setup:
#' * Adds learnr to Suggests in `DESCRIPTION`.
#' * Gitignores `inst/tutorials/*.html` so you don't accidentally track
#' rendered tutorials.
#' * Creates a new `.Rmd` tutorial from a template and, optionally, opens it
#' for editing.
#' * Adds new `.Rmd` to `.Rbuildignore`.
#'
#' @param name Base for file name to use for new `.Rmd` tutorial. Should consist
#' only of numbers, letters, `_` and `-`. We recommend using lower case.
#' @param title The human-facing title of the tutorial.
#' @inheritParams use_template
#' @seealso The [learnr package
#' documentation](https://rstudio.github.io/learnr/index.html).
#' @export
#' @examples
#' \dontrun{
#' use_tutorial("learn-to-do-stuff", "Learn to do stuff")
#' }
use_tutorial <- function(name, title, open = rlang::is_interactive()) {
check_name(name)
check_name(title)
dir_path <- path("inst", "tutorials", name)
dir_create(dir_path)
use_directory(dir_path)
use_git_ignore("*.html", directory = dir_path)
use_dependency("learnr", "Suggests")
path <- path(dir_path, asciify(name), ext = "Rmd")
new <- use_template(
"tutorial-template.Rmd",
save_as = path,
data = list(tutorial_title = title),
ignore = FALSE,
open = open
)
invisible(new)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/tutorial.R
|
#' User interface
#'
#' @description
#' These functions are used to construct the user interface of usethis. Use
#' them in your own package so that your `use_` functions work the same way
#' as usethis.
#'
#' The `ui_` functions can be broken down into four main categories:
#'
#' * block styles: `ui_line()`, `ui_done()`, `ui_todo()`, `ui_oops()`,
#' `ui_info()`.
#' * conditions: `ui_stop()`, `ui_warn()`.
#' * questions: [ui_yeah()], [ui_nope()].
#' * inline styles: `ui_field()`, `ui_value()`, `ui_path()`, `ui_code()`,
#' `ui_unset()`.
#'
#' The question functions [ui_yeah()] and [ui_nope()] have their own [help
#' page][ui-questions].
#'
#' @section Silencing output:
#' All UI output (apart from `ui_yeah()`/`ui_nope()` prompts) can be silenced
#' by setting `options(usethis.quiet = TRUE)`. Use `ui_silence()` to silence
#' selected actions.
#'
#' @param x A character vector.
#'
#' For block styles, conditions, and questions, each element of the
#' vector becomes a line, and the result is processed by [glue::glue()].
#' For inline styles, each element of the vector becomes an entry in a
#' comma separated list.
#' @param .envir Used to ensure that [glue::glue()] gets the correct
#' environment. For expert use only.
#'
#' @return The block styles, conditions, and questions are called for their
#' side-effect. The inline styles return a string.
#' @keywords internal
#' @family user interface functions
#' @name ui
#' @examples
#' new_val <- "oxnard"
#' ui_done("{ui_field('name')} set to {ui_value(new_val)}")
#' ui_todo("Redocument with {ui_code('devtools::document()')}")
#'
#' ui_code_block(c(
#' "Line 1",
#' "Line 2",
#' "Line 3"
#' ))
NULL
# Block styles ------------------------------------------------------------
#' @rdname ui
#' @export
ui_line <- function(x = character(), .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
ui_inform(x)
}
#' @rdname ui
#' @export
ui_todo <- function(x, .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
ui_bullet(x, crayon::red(cli::symbol$bullet))
}
#' @rdname ui
#' @export
ui_done <- function(x, .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
ui_bullet(x, crayon::green(cli::symbol$tick))
}
#' @rdname ui
#' @export
ui_oops <- function(x, .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
ui_bullet(x, crayon::red(cli::symbol$cross))
}
#' @rdname ui
#' @export
ui_info <- function(x, .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
ui_bullet(x, crayon::yellow(cli::symbol$info))
}
#' @param copy If `TRUE`, the session is interactive, and the clipr package
#' is installed, will copy the code block to the clipboard.
#' @rdname ui
#' @export
ui_code_block <- function(x,
copy = rlang::is_interactive(),
.envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
block <- indent(x, " ")
block <- crayon::silver(block)
ui_inform(block)
if (copy && clipr::clipr_available()) {
x <- crayon::strip_style(x)
clipr::write_clip(x)
ui_inform(" [Copied to clipboard]")
}
}
# Conditions --------------------------------------------------------------
#' @rdname ui
#' @export
ui_stop <- function(x, .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
cnd <- structure(
class = c("usethis_error", "error", "condition"),
list(message = x)
)
stop(cnd)
}
#' @rdname ui
#' @export
ui_warn <- function(x, .envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
warning(x, call. = FALSE, immediate. = TRUE)
}
# Silence -----------------------------------------------------------------
#' @rdname ui
#' @param code Code to execute with usual UI output silenced.
#' @export
ui_silence <- function(code) {
withr::with_options(list(usethis.quiet = TRUE), code)
}
# Questions ---------------------------------------------------------------
#' User interface - Questions
#'
#' These functions are used to interact with the user by posing a simple yes or
#' no question. For details on the other `ui_*()` functions, see the [ui] help
#' page.
#'
#' @inheritParams ui
#' @param yes A character vector of "yes" strings, which are randomly sampled to
#' populate the menu.
#' @param no A character vector of "no" strings, which are randomly sampled to
#' populate the menu.
#' @param n_yes An integer. The number of "yes" strings to include.
#' @param n_no An integer. The number of "no" strings to include.
#' @param shuffle A logical. Should the order of the menu options be randomly
#' shuffled?
#'
#' @return A logical. `ui_yeah()` returns `TRUE` when the user selects a "yes"
#' option and `FALSE` otherwise, i.e. when user selects a "no" option or
#' refuses to make a selection (cancels). `ui_nope()` is the logical opposite
#' of `ui_yeah()`.
#' @name ui-questions
#' @keywords internal
#' @family user interface functions
#' @examples
#' \dontrun{
#' ui_yeah("Do you like R?")
#' ui_nope("Have you tried turning it off and on again?", n_yes = 1, n_no = 1)
#' ui_yeah("Are you sure its plugged in?", yes = "Yes", no = "No", shuffle = FALSE)
#' }
NULL
#' @rdname ui-questions
#' @export
ui_yeah <- function(x,
yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"),
no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"),
n_yes = 1, n_no = 2, shuffle = TRUE,
.envir = parent.frame()) {
x <- glue_collapse(x, "\n")
x <- glue(x, .envir = .envir)
if (!is_interactive()) {
ui_stop(c(
"User input required, but session is not interactive.",
"Query: {x}"
))
}
n_yes <- min(n_yes, length(yes))
n_no <- min(n_no, length(no))
qs <- c(sample(yes, n_yes), sample(no, n_no))
if (shuffle) {
qs <- sample(qs)
}
# TODO: should this be ui_inform()?
rlang::inform(x)
out <- utils::menu(qs)
out != 0L && qs[[out]] %in% yes
}
#' @rdname ui-questions
#' @export
ui_nope <- function(x,
yes = c("Yes", "Definitely", "For sure", "Yup", "Yeah", "I agree", "Absolutely"),
no = c("No way", "Not now", "Negative", "No", "Nope", "Absolutely not"),
n_yes = 1, n_no = 2, shuffle = TRUE,
.envir = parent.frame()) {
# TODO(jennybc): is this correct in the case of no selection / cancelling?
!ui_yeah(
x = x, yes = yes, no = no,
n_yes = n_yes, n_no = n_no,
shuffle = shuffle,
.envir = .envir
)
}
# Inline styles -----------------------------------------------------------
#' @rdname ui
#' @export
ui_field <- function(x) {
x <- crayon::green(x)
x <- glue_collapse(x, sep = ", ")
x
}
#' @rdname ui
#' @export
ui_value <- function(x) {
if (is.character(x)) {
x <- encodeString(x, quote = "'")
}
x <- crayon::blue(x)
x <- glue_collapse(x, sep = ", ")
x
}
#' @rdname ui
#' @export
#' @param base If specified, paths will be displayed relative to this path.
ui_path <- function(x, base = NULL) {
is_directory <- is_dir(x) | grepl("/$", x)
if (is.null(base)) {
x <- proj_rel_path(x)
} else if (!identical(base, NA)) {
x <- path_rel(x, base)
}
# rationalize trailing slashes
x <- path_tidy(x)
x <- ifelse(is_directory, paste0(x, "/"), x)
ui_value(x)
}
#' @rdname ui
#' @export
ui_code <- function(x) {
x <- encodeString(x, quote = "`")
x <- crayon::silver(x)
x <- glue_collapse(x, sep = ", ")
x
}
#' @rdname ui
#' @export
ui_unset <- function(x = "unset") {
check_string(x)
x <- glue("<{x}>")
x <- crayon::silver(x)
x
}
# rlang::inform() wrappers -----------------------------------------------------
indent <- function(x, first = " ", indent = first) {
x <- gsub("\n", paste0("\n", indent), x)
paste0(first, x)
}
ui_bullet <- function(x, bullet = cli::symbol$bullet) {
bullet <- paste0(bullet, " ")
x <- indent(x, bullet, " ")
ui_inform(x)
}
# All UI output must eventually go through ui_inform() so that it
# can be quieted with 'usethis.quiet' when needed.
ui_inform <- function(...) {
if (!is_quiet()) {
inform(paste0(...))
}
invisible()
}
is_quiet <- function() {
isTRUE(getOption("usethis.quiet", default = FALSE))
}
# Sitrep helpers ---------------------------------------------------------------
hd_line <- function(name) {
ui_inform(crayon::bold(name))
}
kv_line <- function(key, value, .envir = parent.frame()) {
value <- if (is.null(value)) ui_unset() else ui_value(value)
key <- glue(key, .envir = .envir)
ui_inform(glue("{cli::symbol$bullet} {key}: {value}"))
}
# cli wrappers ------------------------------------------------------------
ui_cli_inform <- function(..., .envir = parent.frame()) {
if (!is_quiet()) {
cli::cli_inform(..., .envir = .envir)
}
invisible()
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/ui.R
|
#' Create an upkeep checklist in a GitHub issue
#'
#' @description
#' This opens an issue in your package repository with a checklist of tasks for
#' regular maintenance of your package. This is a fairly opinionated list of
#' tasks but we believe taking care of them will generally make your package
#' better, easier to maintain, and more enjoyable for your users. Some of the
#' tasks are meant to be performed only once (and once completed shouldn't show
#' up in subsequent lists), and some should be reviewed periodically. The
#' tidyverse team uses a similar function [use_tidy_upkeep_issue()] for our
#' annual package Spring Cleaning.
#'
#' @param year Year you are performing the upkeep, used in the issue title.
#' Defaults to current year
#'
#' @export
#' @examples
#' \dontrun{
#' use_upkeep_issue(2023)
#' }
use_upkeep_issue <- function(year = NULL) {
make_upkeep_issue(year = year, tidy = FALSE)
}
make_upkeep_issue <- function(year, tidy) {
who <- if (tidy) "use_tidy_upkeep_issue()" else "use_upkeep_issue()"
check_is_package(who)
tr <- target_repo(github_get = TRUE)
if (!isTRUE(tr$can_push)) {
ui_line("
It is very unusual to open an upkeep issue on a repo you can't push to:
{ui_value(tr$repo_spec)}")
if (ui_nope("Do you really want to do this?")) {
ui_oops("Cancelling.")
return(invisible())
}
}
gh <- gh_tr(tr)
if (tidy) {
checklist <- tidy_upkeep_checklist(year, repo_spec = tr$repo_spec)
} else {
checklist <- upkeep_checklist()
}
title_year <- year %||% format(Sys.Date(), "%Y")
issue <- gh(
"POST /repos/{owner}/{repo}/issues",
title = glue("Upkeep for {project_name()} ({title_year})"),
body = paste0(checklist, "\n", collapse = ""),
labels = if (tidy) list("upkeep")
)
Sys.sleep(1)
view_url(issue$html_url)
}
upkeep_checklist <- function() {
bullets <- c(
todo("`usethis::use_readme_rmd()`", !file_exists(proj_path("README.Rmd"))),
todo("`usethis::use_roxygen_md()`", !is_true(uses_roxygen_md())),
todo("`usethis::use_github_links()`", !has_github_links()),
todo("`usethis::use_pkgdown_github_pages()`", !uses_pkgdown()),
todo("`usethis::use_tidy_description()`"),
todo(
"
`usethis::use_package_doc()`
Consider letting usethis manage your `@importFrom` directives here. \\
`usethis::use_import_from()` is handy for this.",
!has_package_doc()
),
todo(
"
`usethis::use_testthat()`. \\
Learn more about testing at <https://r-pkgs.org/tests.html>",
!uses_testthat()
),
todo(
"
`usethis::use_testthat(3)` and upgrade to 3e, \\
[testthat 3e vignette](https://testthat.r-lib.org/articles/third-edition.html)",
uses_old_testthat_edition(current = 3)
),
todo("
Align the names of `R/` files and `test/` files for workflow happiness. \\
The docs for `usethis::use_r()` include a helpful script. \\
`usethis::rename_files()` may be be useful."),
todo(
"Consider changing default branch from `master` to `main`",
git_default_branch() == "master"
),
todo("`usethis::use_code_of_conduct()`", !has_coc()),
todo(
"Remove description of test environments from `cran-comments.md`.
See `usethis::use_cran_comments()`.",
has_old_cran_comments()
),
todo("
Add alt-text to pictures, plots, etc; see \\
<https://posit.co/blog/knitr-fig-alt/> for examples"),
"",
"Set up or update GitHub Actions. \\
Updating workflows to the latest version will often fix troublesome actions:",
todo("`usethis::use_github_action('check-standard')`"),
todo("`usethis::use_github_action('pkgdown')`", uses_pkgdown()),
todo("`usethis::use_github_action('test-coverage')`", uses_testthat())
)
c(bullets, upkeep_extra_bullets(), checklist_footer(tidy = FALSE))
}
# tidyverse upkeep issue -------------------------------------------------------
#' @export
#' @rdname tidyverse
#' @param year Approximate year when you last touched this package. If `NULL`,
#' the default, will give you a full set of actions to perform.
use_tidy_upkeep_issue <- function(year = NULL) {
make_upkeep_issue(year = year, tidy = TRUE)
}
# for mocking
Sys.Date <- NULL
tidy_upkeep_checklist <- function(year = NULL, repo_spec = "OWNER/REPO") {
posit_pkg <- is_posit_pkg()
posit_person_ok <- is_posit_person_canonical()
year <- year %||% 2000
bullets <- c(
"### To begin",
"",
todo('`pr_init("upkeep-{format(Sys.Date(), "%Y-%m")}")`'),
""
)
if (year <= 2000) {
bullets <- c(
bullets,
"### Pre-history",
"",
todo("`usethis::use_readme_rmd()`"),
todo("`usethis::use_roxygen_md()`"),
todo("`usethis::use_github_links()`"),
todo("`usethis::use_pkgdown_github_pages()`"),
todo("`usethis::use_tidy_github_labels()`"),
todo("`usethis::use_tidy_style()`"),
todo("`urlchecker::url_check()`"),
""
)
}
if (year <= 2020) {
bullets <- c(
bullets,
"### 2020",
"",
todo("`usethis::use_package_doc()`"),
todo("`usethis::use_testthat(3)`"),
todo("Align the names of `R/` files and `test/` files"),
""
)
}
if (year <= 2021) {
bullets <- c(
bullets,
"### 2021",
"",
todo("Remove check environments section from `cran-comments.md`"),
todo("Use lifecycle instead of artisanal deprecation messages"),
""
)
}
if (year <= 2022) {
bullets <- c(
bullets,
"### 2022",
"",
todo("Handle and close any still-open `master` --> `main` issues"),
todo('`usethis:::use_codecov_badge("{repo_spec}")`'),
todo("Update pkgdown site using instructions at <https://tidytemplate.tidyverse.org>"),
todo("Update lifecycle badges with more accessible SVGs: `usethis::use_lifecycle()`"),
""
)
}
if (year <= 2023) {
desc <- proj_desc()
bullets <- c(
bullets,
"### 2023",
"",
todo(
"
Update email addresses *@rstudio.com -> *@posit.co",
author_has_rstudio_email()
),
todo(
'
Update copyright holder in DESCRIPTION: \\
`person("Posit Software, PBC", role = c("cph", "fnd"))`',
posit_pkg && !posit_person_ok
),
todo(
"
Run `devtools::document()` to re-generate package-level help topic \\
with DESCRIPTION changes",
author_has_rstudio_email() || (posit_pkg && !posit_person_ok)
),
todo("`usethis::use_tidy_logo(); pkgdown::build_favicons(overwrite = TRUE)`"),
todo("`usethis::use_tidy_coc()`"),
todo(
"Modernize citation files; see updated `use_citation()`",
has_citation_file()
),
todo('Use `pak::pak("{repo_spec}")` in README'),
todo("
Consider running `usethis::use_tidy_dependencies()` and/or \\
replace compat files with `use_standalone()`"),
todo("Use cli errors or [file an issue](new) if you don\'t have time to do it now"),
todo('
`usethis::use_standalone("r-lib/rlang", "types-check")` \\
instead of home grown argument checkers;
or [file an issue](new) if you don\'t have time to do it now'),
todo(
"
Change files ending in `.r` to `.R` in `R/` and/or `tests/testthat/`",
lowercase_r()
),
todo("
Add alt-text to pictures, plots, etc; see \\
https://posit.co/blog/knitr-fig-alt/ for examples"
),
""
)
}
bullets <- c(
bullets,
"### To finish",
"",
todo("`usethis::use_mit_license()`", grepl("MIT", desc$get_field("License"))),
todo(
'`usethis::use_package("R", "Depends", "{tidy_minimum_r_version()}")`',
tidy_minimum_r_version() > pkg_minimum_r_version()
),
todo("`usethis::use_tidy_description()`"),
todo("`usethis::use_tidy_github_actions()`"),
todo("`devtools::build_readme()`"),
todo("[Re-publish released site](https://pkgdown.r-lib.org/dev/articles/how-to-update-released-site.html) if needed"),
""
)
c(bullets, checklist_footer(tidy = TRUE))
}
# upkeep helpers ----------------------------------------------------------
# https://www.tidyverse.org/blog/2019/04/r-version-support/
tidy_minimum_r_version <- function() {
con <- curl::curl("https://api.r-hub.io/rversions/r-oldrel/4")
withr::defer(close(con))
# I do not want a failure here to make use_tidy_upkeep_issue() fail
json <- tryCatch(readLines(con, warn = FALSE), error = function(e) NULL)
if (is.null(json)) {
oldrel_4 <- "3.6"
} else {
version <- jsonlite::fromJSON(json)$version
oldrel_4 <- re_match(version, "[0-9]+[.][0-9]+")$.match
}
numeric_version(oldrel_4)
}
lowercase_r <- function() {
path <- proj_path(c("R", "tests"))
path <- path[fs::dir_exists(path)]
any(fs::path_ext(fs::dir_ls(path, recurse = TRUE)) == "r")
}
has_coc <- function() {
path <- proj_path(c(".", ".github"), "CODE_OF_CONDUCT.md")
any(file_exists(path))
}
has_citation_file <- function() {
file_exists(proj_path("inst/CITATION"))
}
uses_old_testthat_edition <- function(current) {
if (!requireNamespace("testthat", quietly = TRUE)) {
return(FALSE)
}
uses_testthat() && testthat::edition_get() < current
}
upkeep_extra_bullets <- function(env = NULL) {
env <- env %||% safe_pkg_env()
if (env_has(env, "upkeep_bullets")) {
c(paste0("* [ ] ", env$upkeep_bullets()), "")
} else {
""
}
}
checklist_footer <- function(tidy) {
tidy_fun <- if (tidy) "tidy_" else ""
glue('<sup>\\
Created on {Sys.Date()} with `usethis::use_{tidy_fun}upkeep_issue()`, using \\
[usethis v{usethis_version()}](https://usethis.r-lib.org)\\
</sup>')
}
usethis_version <- function() {
utils::packageVersion("usethis")
}
has_old_cran_comments <- function() {
cc <- proj_path("cran-comments.md")
file_exists(cc) &&
any(grepl("# test environment", readLines(cc), ignore.case = TRUE))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/upkeep.R
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/use-compat-file.R
|
|
#' Copy a file from any GitHub repo into the current project
#'
#' Gets the content of a file from GitHub, from any repo the user can read, and
#' writes it into the active project. This function wraps an endpoint of the
#' GitHub API which supports specifying a target reference (i.e. branch, tag,
#' or commit) and which follows symlinks.
#'
#' @param repo_spec A string identifying the GitHub repo or, alternatively, a
#' GitHub file URL. Acceptable forms:
#' * Plain `OWNER/REPO` spec
#' * A blob URL, such as `"https://github.com/OWNER/REPO/blob/REF/path/to/some/file"`
#' * A raw URL, such as `"https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file"`
#'
#' In the case of a URL, the `path`, `ref`, and `host` are extracted from it, in
#' addition to the `repo_spec`.
#' @param path Path of file to copy, relative to the GitHub repo it lives in.
#' This is extracted from `repo_spec` when user provides a URL.
#' @param save_as Path of file to create, relative to root of active project.
#' Defaults to the last part of `path`, in the sense of `basename(path)` or
#' `fs::path_file(path)`.
#' @param ref The name of a branch, tag, or commit. By default, the file at
#' `path` will be copied from its current state in the repo's default branch.
#' This is extracted from `repo_spec` when user provides a URL.
#' @inheritParams use_template
#' @inheritParams use_github
#' @inheritParams write_over
#'
#' @return A logical indicator of whether a file was written, invisibly.
#' @export
#'
#' @examples
#' \dontrun{
#' use_github_file(
#' "https://github.com/r-lib/actions/blob/v2/examples/check-standard.yaml"
#' )
#'
#' use_github_file(
#' "r-lib/actions",
#' path = "examples/check-standard.yaml",
#' ref = "v2",
#' save_as = ".github/workflows/R-CMD-check.yaml"
#' )
#' }
use_github_file <- function(repo_spec,
path = NULL,
save_as = NULL,
ref = NULL,
ignore = FALSE,
open = FALSE,
overwrite = FALSE,
host = NULL) {
check_name(repo_spec)
maybe_name(path)
maybe_name(save_as)
maybe_name(ref)
check_bool(ignore)
check_bool(open)
check_bool(overwrite)
maybe_name(host)
dat <- parse_file_url(repo_spec)
if (dat$parsed) {
repo_spec <- dat$repo_spec
path <- dat$path
ref <- dat$ref
host <- dat$host
}
save_as <- save_as %||% path_file(path)
ref_string <- if (is.null(ref)) "" else glue("@{ref}")
github_string <- glue("{repo_spec}/{path}{ref_string}")
ui_done("Saving {ui_path(github_string)} to {ui_path(save_as)}")
lines <- read_github_file(
repo_spec = repo_spec,
path = path,
ref = ref,
host = host
)
new <- write_over(
proj_path(save_as),
lines,
quiet = TRUE,
overwrite = overwrite
)
if (ignore) {
use_build_ignore(save_as)
}
if (open && new) {
edit_file(proj_path(save_as))
}
invisible(new)
}
read_github_file <- function(repo_spec, path, ref = NULL, host = NULL) {
# https://docs.github.com/en/rest/reference/repos#contents
# https://docs.github.com/en/rest/reference/repos#if-the-content-is-a-symlink
# If the requested {path} points to a symlink, and the symlink's target is a
# normal file in the repository, then the API responds with the content of the
# file....
tf <- withr::local_tempfile()
gh::gh(
"/repos/{repo_spec}/contents/{path}",
repo_spec = repo_spec,
path = path,
ref = ref,
.api_url = host,
.destfile = tf,
.accept = "application/vnd.github.v3.raw"
)
read_utf8(tf)
}
# https://github.com/OWNER/REPO/blob/REF/path/to/some/file
# https://raw.githubusercontent.com/OWNER/REPO/REF/path/to/some/file
# https://github.acme.com/OWNER/REPO/blob/REF/path/to/some/file
# https://raw.github.acme.com/OWNER/REPO/REF/path/to/some/file
parse_file_url <- function(x) {
out <- list(
parsed = FALSE,
repo_spec = x,
path = NULL,
ref = NULL,
host = NULL
)
dat <- re_match(x, github_remote_regex)
if (is.na(dat$.match)) {
return(out)
}
# TODO: generalize here for GHE hosts that don't include 'github'
if (!grepl("github", dat$host)) {
ui_stop("URL doesn't seem to be associated with GitHub.")
}
if (!grepl("^(raw[.])?github", dat$host) ||
!nzchar(dat$fragment) ||
(grepl("^github", dat$host) && !grepl("^/blob/", dat$fragment))) {
ui_stop("Can't parse the URL provided via {ui_code('repo_spec')}.")
}
out$parsed <- TRUE
dat$host <- sub("^raw[.]", "", dat$host)
dat$host <- sub("^githubusercontent", "github", dat$host)
dat$fragment <- sub("^/(blob/)?", "", dat$fragment)
dat_fragment <- re_match(dat$fragment, "^(?<ref>[^/]+)/(?<path>.+)$")
out$repo_spec <- make_spec(owner = dat$repo_owner, repo = dat$repo_name)
out$path <- dat_fragment$path
out$ref <- dat_fragment$ref
out$host <- glue_chr("https://{dat$host}")
out
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/use_github_file.R
|
#' Import a function from another package
#'
#' @description
#' `use_import_from()` imports a function from another package by adding the
#' roxygen2 `@importFrom` tag to the package-level documentation (which can be
#' created with [`use_package_doc()`]). Importing a function from another
#' package allows you to refer to it without a namespace (e.g., `fun()` instead
#' of `package::fun()`).
#'
#' `use_import_from()` also re-documents the NAMESPACE, and re-load the current
#' package. This ensures that `fun` is immediately available in your development
#' session.
#'
#' @param package Package name
#' @param fun A vector of function names
#' @param load Logical. Re-load with [`pkgload::load_all()`]?
#' @return
#' Invisibly, `TRUE` if the package document has changed, `FALSE` if not.
#' @export
#' @examples
#' \dontrun{
#' use_import_from("usethis", "ui_todo")
#' }
use_import_from <- function(package, fun, load = is_interactive()) {
if (!is_string(package)) {
ui_stop("{ui_code('package')} must be a single string")
}
check_is_package("use_import_from()")
check_uses_roxygen("use_import_from()")
check_installed(package)
check_has_package_doc("use_import_from()")
check_functions_exist(package, fun)
use_dependency(package, "Imports")
changed <- roxygen_ns_append(glue("@importFrom {package} {fun}"))
if (changed) {
roxygen_update_ns(load)
}
invisible(changed)
}
check_functions_exist <- function(package, fun) {
purrr::walk2(package, fun, check_fun_exists)
}
check_fun_exists <- function(package, fun) {
if (exists(fun, envir = asNamespace(package))) {
return()
}
name <- paste0(package, "::", fun, "()")
ui_stop("Can't find {ui_code(name)}")
}
check_has_package_doc <- function(whos_asking) {
if (has_package_doc()) {
return(invisible(TRUE))
}
msg <- c(
"{ui_code(whos_asking)} requires package-level documentation.",
"Would you like to add it now?"
)
if (is_interactive() && ui_yeah(msg)) {
use_package_doc()
} else {
ui_stop(c(
"{ui_code(whos_asking)} requires package docs",
"You can add it by running {ui_code('use_package_doc()')}"
))
}
invisible(TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/use_import_from.R
|
#' Use a standalone file from another repo
#'
#' @description
#' A "standalone" file implements a minimum set of functionality in such a way
#' that it can be copied into another package. `use_standalone()` makes it easy
#' to get such a file into your own repo.
#'
#' It always overwrites an existing standalone file of the same name, making
#' it easy to update previously imported code.
#'
#' @section Supported fields:
#'
#' A standalone file has YAML frontmatter that provides additional information,
#' such as where the file originates from and when it was last updated. Here is
#' an example:
#'
#' ```
#' ---
#' repo: r-lib/rlang
#' file: standalone-types-check.R
#' last-updated: 2023-03-07
#' license: https://unlicense.org
#' dependencies: standalone-obj-type.R
#' imports: rlang (>= 1.1.0)
#' ---
#'
#' Two of these fields are consulted by `use_standalone()`:
#'
#' - `dependencies`: A file or a list of files in the same repo that
#' the standalone file depends on. These files are retrieved
#' automatically by `use_standalone()`.
#'
#' - `imports`: A package or list of packages that the standalone file
#' depends on. A minimal version may be specified in parentheses,
#' e.g. `rlang (>= 1.0.0)`. These dependencies are passed to
#' [use_package()] to ensure they are included in the `Imports:`
#' field of the `DESCRIPTION` file.
#'
#' Note that lists are specified with standard YAML syntax, using
#' square brackets, for example: `imports: [rlang (>= 1.0.0), purrr]`.
#'
#' @inheritParams create_from_github
#' @inheritParams use_github_file
#' @param file Name of standalone file. The `standalone-` prefix and file
#' extension are optional. If omitted, will allow you to choose from the
#' standalone files offered by that repo.
#' @export
#' @examples
#' \dontrun{
#' use_standalone("r-lib/rlang", file = "types-check")
#' use_standalone("r-lib/rlang", file = "types-check", ref = "standalone-dep")
#' }
use_standalone <- function(repo_spec, file = NULL, ref = NULL, host = NULL) {
check_is_project()
maybe_name(file)
maybe_name(host)
maybe_name(ref)
parsed_repo_spec <- parse_repo_url(repo_spec)
if (!is.null(parsed_repo_spec$host)) {
repo_spec <- parsed_repo_spec$repo_spec
host <- parsed_repo_spec$host
}
if (is.null(file)) {
file <- standalone_choose(repo_spec, ref = ref, host = host)
} else {
file <- as_standalone_file(file)
}
src_path <- path("R", file)
dest_path <- path("R", as_standalone_dest_file(file))
lines <- read_github_file(repo_spec, path = src_path, ref = ref, host = host)
lines <- c(standalone_header(repo_spec, src_path), lines)
write_over(proj_path(dest_path), lines, overwrite = TRUE)
dependencies <- standalone_dependencies(lines, path)
for (dependency in dependencies$deps) {
use_standalone(repo_spec, dependency, ref = ref, host = host)
}
imports <- dependencies$imports
for (i in seq_len(nrow(imports))) {
import <- imports[i, , drop = FALSE]
if (is.na(import$ver)) {
ver <- NULL
} else {
ver <- import$ver
}
ui_silence(
use_package(import$pkg, min_version = ver)
)
}
invisible()
}
standalone_choose <- function(repo_spec, ref = NULL, host = NULL, error_call = caller_env()) {
json <- gh::gh(
"/repos/{repo_spec}/contents/{path}",
repo_spec = repo_spec,
ref = ref,
.api_url = host,
path = "R/"
)
names <- map_chr(json, "name")
names <- names[grepl("^standalone-", names)]
choices <- gsub("^standalone-|.[Rr]$", "", names)
if (length(choices) == 0) {
cli::cli_abort(
"No standalone files found in {repo_spec}.",
call = error_call
)
}
if (!is_interactive()) {
cli::cli_abort(
c(
"`file` is absent, but must be supplied.",
i = "Possible options are {.or {choices}}."
),
call = error_call
)
}
choice <- utils::menu(
choices = choices,
title = "Which standalone file do you want to use (0 to exit)?"
)
if (choice == 0) {
cli::cli_abort("Selection cancelled", call = error_call)
}
names[[choice]]
}
as_standalone_file <- function(file) {
if (path_ext(file) == "") {
file <- unclass(path_ext_set(file, "R"))
}
if (!grepl("standalone-", file)) {
file <- paste0("standalone-", file)
}
file
}
as_standalone_dest_file <- function(file) {
gsub("standalone-", "import-standalone-", file)
}
standalone_header <- function(repo_spec, path) {
c(
"# Standalone file: do not edit by hand",
glue("# Source: <https://github.com/{repo_spec}/blob/main/{path}>"),
paste0("# ", strrep("-", 72 - 2)),
"#"
)
}
standalone_dependencies <- function(lines, path, error_call = caller_env()) {
dividers <- which(lines == "# ---")
if (length(dividers) != 2) {
cli::cli_abort(
"Can't find yaml metadata in {.path {path}}.",
call = error_call
)
}
header <- lines[dividers[[1]]:dividers[[2]]]
header <- gsub("^# ", "", header)
temp <- withr::local_tempfile(lines = header)
yaml <- rmarkdown::yaml_front_matter(temp)
as_chr_field <- function(field) {
if (!is.null(field) && !is.character(field)) {
cli::cli_abort(
"Invalid dependencies specification in {.path {path}}.",
call = error_call
)
}
field %||% character()
}
deps <- as_chr_field(yaml$dependencies)
imports <- as_chr_field(yaml$imports)
imports <- as_version_info(imports, error_call = error_call)
if (any(stats::na.omit(imports$cmp) != ">=")) {
cli::cli_abort(
"Version specification must use {.code >=}.",
call = error_call
)
}
list(deps = deps, imports = imports)
}
as_version_info <- function(fields, error_call = caller_env()) {
if (!length(fields)) {
return(version_info_df())
}
if (any(grepl(",", fields))) {
msg <- c(
"Version field can't contain comma.",
"i" = "Do you need to wrap in a list?"
)
cli::cli_abort(msg, call = error_call)
}
info <- lapply(fields, as_version_info_row, error_call = error_call)
inject(rbind(!!!info))
}
as_version_info_row <- function(field, error_call = caller_env()) {
version_regex <- "(.*) \\((.*)\\)$"
has_ver <- grepl(version_regex, field)
if (!has_ver) {
return(version_info_df(field, NA, NA))
}
pkg <- sub(version_regex, "\\1", field)
ver <- sub(version_regex, "\\2", field)
ver <- strsplit(ver, " ")[[1]]
if (!is_character(ver, n = 2) || any(is.na(ver)) || !all(nzchar(ver))) {
cli::cli_abort(
c(
"Can't parse version `{field}` in `imports:` field.",
"i" = "Example of expected version format: `rlang (>= 1.0.0)`."
),
call = error_call
)
}
version_info_df(pkg, ver[[1]], ver[[2]])
}
version_info_df <- function(pkg = chr(), cmp = chr(), ver = chr()) {
df <- data.frame(
pkg = as.character(pkg),
cmp = as.character(cmp),
ver = as.character(ver)
)
structure(df, class = c("tbl", "data.frame"))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/use_standalone.R
|
#' Defunct PR functions
#'
#' @description
#' `r lifecycle::badge("defunct")`
#'
#' * `pr_pull_upstream()` has been replaced by [pr_merge_main()].
#' * `pr_sync()` has been replaced by [pr_pull()] + [pr_merge_main()] + [pr_push()]
#'
#' @keywords internal
#' @export
pr_pull_upstream <- function() {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "pr_pull_upstream()",
with = "pr_merge_main()",
)
}
#' @rdname pr_pull_upstream
#' @export
pr_sync <- function() {
details <- glue("
Sync a PR with:
* {ui_code('pr_pull()')}
* {ui_code('pr_merge_main()')}
* {ui_code('pr_push()')}")
lifecycle::deprecate_stop(
when = "2.0.0",
what = "pr_sync()",
details = details
)
}
#' Defunct GitHub functions
#'
#' @description
#' `r lifecycle::badge("defunct")`
#'
#' * `browse_github_token()` and `browse_github_pat()` have been replaced by
#' [create_github_token()].
#' * `github_token()` has been replaced by [gh::gh_token()]
#' * `git_branch_default()` has been replaced by [git_default_branch()].
#'
#' @keywords internal
#' @export
browse_github_token <- function(...) {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "browse_github_token()",
with = "create_github_token()"
)
}
#' @rdname browse_github_token
#' @export
browse_github_pat <- function(...) {
lifecycle::deprecate_stop(
"2.0.0",
what = "browse_github_pat()",
with = "create_github_token()"
)
}
#' @rdname browse_github_token
#' @export
github_token <- function() {
details <- glue("
Call {ui_code('gh::gh_token()')} to retrieve a GitHub personal access token
Call {ui_code('gh_token_help()')} if you need help getting or configuring \\
your token")
lifecycle::deprecate_stop(
"2.0.0",
what = "github_token()",
details = details
)
}
#' @rdname browse_github_token
#' @export
git_branch_default <- function() {
lifecycle::deprecate_soft("2.1.0", "git_branch_default()", "git_default_branch()")
git_default_branch()
}
#' Defunct tidyverse functions
#'
#' @description
#' `r lifecycle::badge("defunct")`
#'
#' * `use_tidy_labels()` has been replaced by [use_tidy_github_labels()].
#' * `use_tidy_ci()` has been replaced by [use_tidy_github_actions()].
#' * `use_tidy_eval()` is defunct because there's no longer a need to
#' systematically import and re-export a large number of functions in order
#' to use tidy evaluation. Instead, use [use_import_from()] to tactically
#' import functions as you need them.
#'
#' @keywords internal
#' @export
use_tidy_labels <- function() {
lifecycle::deprecate_stop("2.1.0", "use_tidy_labels()", "use_tidy_github_labels()")
}
#' @rdname use_tidy_labels
#' @export
use_tidy_ci <- function(...) {
lifecycle::deprecate_stop("2.1.0", "use_tidy_ci()", "use_tidy_github_actions()")
}
#' @rdname use_tidy_labels
#' @keywords internal
#' @export
use_tidy_eval <- function() {
lifecycle::deprecate_stop(
"2.2.0",
"use_tidy_eval()",
details = c(
"There is no longer a need to systematically import and/or re-export functions",
"Instead import functions as needed, with e.g.:",
'usethis::use_import_from("rlang", c(".data", ".env"))'
)
)
}
#' Defunct git2r functions
#'
#' @description
#' `r lifecycle::badge("defunct")`
#'
#' In usethis v2.0.0, usethis switched from git2r to gert (+ credentials) for
#' all Git operations. This pair of packages (gert + credentials) is designed to
#' discover and use the same credentials as command line Git. As a result, a
#' great deal of credential-handling assistance has been removed from usethis,
#' primarily around SSH keys.
#'
#' If you have credential problems, focus your troubleshooting on getting the
#' credentials package to find your credentials. The [introductory
#' vignette](https://docs.ropensci.org/credentials/articles/intro.html)
#' is a good place to start.
#'
#' If you use the HTTPS protocol (which we recommend), a GitHub personal access
#' token will satisfy all auth needs, for both Git and the GitHub API, and is
#' therefore the easiest approach to get working. See [gh_token_help()] for
#' more.
#'
#' @param protocol Deprecated.
#' @param auth_token Deprecated.
#' @param credentials Deprecated.
#'
#' @return These functions raise a warning and return an invisible `NULL`.
#' @export
git_credentials <- function(protocol = deprecated(),
auth_token = deprecated()) {
lifecycle::deprecate_stop(
"2.0.0",
"git_credentials()",
details = git2r_explanation
)
invisible()
}
#' @rdname git_credentials
#' @export
use_git_credentials <- function(credentials = deprecated()) {
lifecycle::deprecate_stop(
"2.0.0",
"use_git_credentials()",
details = git2r_explanation
)
invisible()
}
git2r_explanation <- glue("
usethis now uses the gert package for Git operations, instead of git2r, and
gert relies on the credentials package for auth. Therefore git2r credentials
are no longer accepted.")
deprecate_warn_credentials <- function(whos_asking, details = NULL) {
whos_asking <- sub("[()]+$", "", whos_asking)
what <- glue("{whos_asking}(credentials = )")
lifecycle::deprecate_warn(
"2.0.0",
what,
details = details %||% git2r_explanation
)
}
deprecate_warn_host <- function(whos_asking, details = NULL) {
whos_asking <- sub("[()]+$", "", whos_asking)
what <- glue("{whos_asking}(host = )")
host_explanation <- glue("
usethis now determines the {ui_code('host')} from the current project's \\
Git remotes.
The {ui_code('host')} argument is ignored and will eventually be removed.")
lifecycle::deprecate_warn(
"2.0.0",
what,
details = details %||% host_explanation
)
}
deprecate_warn_auth_token <- function(whos_asking, details = NULL) {
whos_asking <- sub("[()]+$", "", whos_asking)
what <- glue("{whos_asking}(auth_token = )")
auth_token_explanation <- glue("
usethis now delegates token lookup to the gh package, which retrieves \\
credentials based on the targeted host URL.
This URL is determined by the current project's Git remotes.
The {ui_code('auth_token')} argument is ignored and will eventually be \\
removed.")
lifecycle::deprecate_warn(
"2.0.0",
what,
details = details %||% auth_token_explanation
)
}
deprecate_warn_repo_spec <- function(whos_asking, details = NULL) {
whos_asking <- sub("[()]+$", "", whos_asking)
what <- glue("{whos_asking}(repo_spec = )")
repo_spec_explanation <- glue("
usethis now consults the current project's Git remotes to determine the \\
target repo.
The {ui_code('repo_spec')} argument is ignored and will eventually be \\
removed.")
lifecycle::deprecate_warn(
"2.0.0",
what,
details = details %||% repo_spec_explanation
)
}
# ci ----------------------------------------------------------------------
#' Defunct Travis and Appveyor functions
#'
#' @description
#' `r lifecycle::badge("defunct")`
#'
#' These functions which formally supported CI on Appveyor and Travis are
#' now defunct as we no longer recommend using these services. We now
#' recommend using GitHub actions, e.g. with [use_github_action()].
#'
#' @export
#' @keywords internal
use_travis <- function(browse = rlang::is_interactive(),
ext = c("com", "org")) {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "use_travis()",
with = "use_github_action()"
)
}
#' @export
#' @rdname use_travis
use_travis_badge <- function(ext = c("com", "org"), repo_spec = NULL) {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "use_travis_badge()"
)
}
#' @export
#' @rdname use_travis
use_pkgdown_travis <- function() {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "use_pkgdown_travis()",
with = "use_pkgdown_github_pages()"
)
}
#' @export
#' @rdname use_travis
use_appveyor <- function(browse = rlang::is_interactive()) {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "use_appveyor()",
with = "use_github_action()"
)
}
#' @export
#' @rdname use_travis
use_appveyor_badge <- function(repo_spec = NULL) {
lifecycle::deprecate_stop(
when = "2.0.0",
what = "use_appveyor_badge()",
)
}
#' @export
#' @rdname use_travis
browse_travis <- function(package = NULL, ext = c("com", "org")) {
lifecycle::deprecate_stop(
when = "2.2.0",
what = "browse_travis()",
)
}
# GitHub actions --------------------------------------------------------------
#' Defunct GitHub Actions workflows
#'
#' @description
#' `r lifecycle::badge("deprecated")`
#'
#' * `use_github_actions()` is deprecated because it was just an alias
#' for [use_github_action_check_release()].
#'
#' * `use_github_action_check_full()` is overkill for most packages and is
#' not recommended.
#'
#' * `use_github_action_check_release()`, `use_github_action_check_standard()`,
#' and `use_github_action_pr_commands()` are deprecated in favor of
#' [use_github_action()], which can now suggest specific workflows to use.
#'
#' @export
#' @keywords internal
use_github_actions <- function() {
lifecycle::deprecate_warn(
when = "2.2.0",
what = "use_github_actions()",
with = "use_github_action('check-release')"
)
use_github_action('check-release')
}
#' @rdname use_github_actions
#' @export
use_github_action_check_release <- function(save_as = "R-CMD-check.yaml",
ref = NULL,
ignore = TRUE,
open = FALSE) {
lifecycle::deprecate_warn(
when = "2.2.0",
what = "use_github_action_check_release()",
with = "use_github_action('check-release')"
)
use_github_action(
"check-release.yaml",
ref = ref,
save_as = save_as,
ignore = ignore,
open = open
)
use_github_actions_badge(save_as)
}
#' @rdname use_github_actions
#' @export
use_github_action_check_standard <- function(save_as = "R-CMD-check.yaml",
ref = NULL,
ignore = TRUE,
open = FALSE) {
lifecycle::deprecate_warn(
when = "2.2.0",
what = "use_github_action_check_standard()",
with = "use_github_action('check-standard')"
)
use_github_action(
"check-standard.yaml",
ref = ref,
save_as = save_as,
ignore = ignore,
open = open
)
use_github_actions_badge(save_as)
}
#' @rdname use_github_actions
#' @export
use_github_action_pr_commands <- function(save_as = "pr-commands.yaml",
ref = NULL,
ignore = TRUE,
open = FALSE) {
lifecycle::deprecate_warn(
when = "2.2.0",
what = "use_github_action_pr_commands()",
with = "use_github_action('pr-commands')"
)
use_github_action(
"pr-commands.yaml",
ref = ref,
save_as = save_as,
ignore = ignore,
open = open
)
}
#' @rdname use_github_actions
#' @export
use_github_action_check_full <- function(save_as = "R-CMD-check.yaml",
ignore = TRUE,
open = FALSE,
repo_spec = NULL) {
details <- glue("
It is overkill for the vast majority of R packages.
The \"check-full\" workflow is among those configured by \\
{ui_code('use_tidy_github_actions()')}.
If you really want it, request it by name with \\
{ui_code('use_github_action()')}.")
lifecycle::deprecate_stop(
"2.1.0",
"use_github_action_check_full()",
details = details
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/usethis-defunct.R
|
#' @keywords internal
"_PACKAGE"
## usethis namespace: start
#' @import fs
#' @import rlang
#' @importFrom glue glue glue_collapse glue_data
#' @importFrom lifecycle deprecated
#' @importFrom purrr map map_chr map_lgl map_int
#' @importFrom utils available.packages
## usethis namespace: end
NULL
#' Options consulted by usethis
#'
#' @description
#' User-configurable options consulted by usethis, which provide a mechanism
#' for setting default behaviors for various functions.
#'
#' If the built-in defaults don't suit you, set one or more of these options.
#' Typically, this is done in the `.Rprofile` startup file, which you can open
#' for editing with [edit_r_profile()] - this will set the specified options for
#' all future R sessions. Your code will look something like:
#'
#' ```
#' options(
#' usethis.description = list(
#' "Authors@R" = utils::person(
#' "Jane", "Doe",
#' email = "[email protected]",
#' role = c("aut", "cre"),
#' comment = c(ORCID = "YOUR-ORCID-ID")
#' ),
#' License = "MIT + file LICENSE"
#' ),
#' usethis.destdir = "/path/to/folder/", # for use_course(), create_from_github()
#' usethis.protocol = "ssh", # Use ssh git protocol
#' usethis.overwrite = TRUE # overwrite files in Git repos without confirmation
#' )
#' ```
#'
#' @section Options for the usethis package:
#'
#' - `usethis.description`: customize the default content of new `DESCRIPTION`
#' files by setting this option to a named list.
#' If you are a frequent package developer, it is worthwhile to pre-configure
#' your preferred name, email, license, etc. See the example above and the
#' [article on usethis setup](https://usethis.r-lib.org/articles/articles/usethis-setup.html)
#' for more details.
#'
#' - `usethis.destdir`: Default directory in which to place new projects
#' downloaded by [use_course()] and [create_from_github()].
#' If this option is unset, the user's Desktop or similarly conspicuous place
#' will be used.
#'
#' - `usethis.protocol`: specifies your preferred transport protocol for Git.
#' Either "https" (default) or "ssh":
#' * `usethis.protocol = "https"` implies `https://github.com/<OWNER>/<REPO>.git`
#' * `usethis.protocol = "ssh"` implies `git@@github.com:<OWNER>/<REPO>.git`
#'
#' You can also change this for the duration of your R session with
#' [use_git_protocol()].
#'
#' - `usethis.overwrite`: If `TRUE`, usethis overwrites an existing file without
#' asking for user confirmation if the file is inside a Git repo. The
#' rationale is that the normal Git workflow makes it easy to see and
#' selectively accept/discard any proposed changes.
#'
#' - `usethis.quiet`: Set to `TRUE` to suppress user-facing messages. Default
#' `FALSE`.
#'
#' - `usethis.allow_nested_project`: Whether or not to allow
#' you to create a project inside another project. This is rarely a good idea,
#' so this option defaults to `FALSE`.
#'
#' @name usethis_options
NULL
release_bullets <- function() {
c(
"Check that `use_code_of_conduct()` is shipping the latest version of the Contributor Covenant (<https://www.contributor-covenant.org>)."
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/usethis-package.R
|
# Functions that are in a grey area between usethis and gh
gh_tr <- function(tr) {
force(tr)
function(endpoint, ...) {
gh::gh(
endpoint,
...,
owner = tr$repo_owner, repo = tr$repo_name, .api_url = tr$api_url
)
}
}
# Functions inlined from gh ----
get_baseurl <- function(url) { # https://github.uni.edu/api/v3/
if (!any(grepl("^https?://", url))) {
stop("Only works with HTTP(S) protocols")
}
prot <- sub("^(https?://).*$", "\\1", url) # https://
rest <- sub("^https?://(.*)$", "\\1", url) # github.uni.edu/api/v3/
host <- sub("/.*$", "", rest) # github.uni.edu
paste0(prot, host) # https://github.uni.edu
}
# https://api.github.com --> https://github.com
# api.github.com --> github.com
normalize_host <- function(x) {
sub("api[.]github[.]com", "github.com", x)
}
get_hosturl <- function(url) {
url <- get_baseurl(url)
normalize_host(url)
}
# (almost) the inverse of get_hosturl()
# https://github.com --> https://api.github.com
# https://github.uni.edu --> https://github.uni.edu/api/v3
get_apiurl <- function(url) {
host_url <- get_hosturl(url)
prot_host <- strsplit(host_url, "://", fixed = TRUE)[[1]]
if (is_github_dot_com(host_url)) {
paste0(prot_host[[1]], "://api.github.com")
} else {
paste0(host_url, "/api/v3")
}
}
is_github_dot_com <- function(url) {
url <- get_baseurl(url)
url <- normalize_host(url)
grepl("^https?://github.com", url)
}
default_api_url <- function() {
Sys.getenv("GITHUB_API_URL", unset = "https://api.github.com")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-gh.R
|
# gert -------------------------------------------------------------------------
gert_shush <- function(expr, regexp) {
check_character(regexp)
withCallingHandlers(
gertMessage = function(cnd) {
m <- map_lgl(regexp, ~ grepl(.x, cnd_message(cnd), perl = TRUE))
if (any(m)) {
cnd_muffle(cnd)
}
},
expr
)
}
# Repository -------------------------------------------------------------------
git_repo <- function() {
check_uses_git()
proj_get()
}
uses_git <- function() {
repo <- tryCatch(
gert::git_find(proj_get()),
error = function(e) NULL
)
!is.null(repo)
}
check_uses_git <- function() {
if (uses_git()) {
return(invisible())
}
ui_stop(c(
"Cannot detect that project is already a Git repository.",
"Do you need to run {ui_code('use_git()')}?"
))
}
git_init <- function() {
gert::git_init(proj_get())
}
# Config -----------------------------------------------------------------------
# `where = "de_facto"` means look at the values that are "in force", i.e. where
# local repo variables override global user-level variables, when both are
# defined
#
# `where = "local"` is strict, i.e. it only returns a value that is in the local
# config
git_cfg_get <- function(name, where = c("de_facto", "local", "global")) {
where <- match.arg(where)
if (where == "de_facto") {
return(git_cfg_get(name, "local") %||% git_cfg_get(name, "global"))
}
if (where == "global" || !uses_git()) {
dat <- gert::git_config_global()
} else {
dat <- gert::git_config(repo = git_repo())
}
if (where == "local") {
dat <- dat[dat$level == "local", ]
}
out <- dat$value[tolower(dat$name) == tolower(name)]
if (length(out) > 0) out else NULL
}
# more-specific case for user-name and -email
git_user_get <- function(where = c("de_facto", "local", "global")) {
where <- match.arg(where)
list(
name = git_cfg_get("user.name", where),
email = git_cfg_get("user.email", where)
)
}
# translate from "usethis" terminology to "git" terminology
where_from_scope <- function(scope = c("user", "project")) {
scope = match.arg(scope)
where_scope <- c(user = "global", project = "de_facto")
where_scope[scope]
}
# ensures that core.excludesFile is configured
# if configured, leave well enough alone
# if not, check for existence of one of the Usual Suspects; if found, configure
# otherwise, configure as path_home(".gitignore")
ensure_core_excludesFile <- function() {
path <- git_ignore_path(scope = "user")
if (!is.null(path)) {
return(invisible())
}
# .gitignore is most common, but .gitignore_global appears in prominent
# places --> so we allow the latter, but prefer the former
path <-
path_first_existing(path_home(c(".gitignore", ".gitignore_global"))) %||%
path_home(".gitignore")
if (!is_windows()) {
# express path relative to user's home directory, except on Windows
path <- path("~", path_rel(path, path_home()))
}
ui_done("Configuring {ui_field('core.excludesFile')}: {ui_path(path)}")
gert::git_config_global_set("core.excludesFile", path)
invisible()
}
# Status------------------------------------------------------------------------
git_status <- function(untracked) {
check_bool(untracked)
st <- gert::git_status(repo = git_repo())
if (!untracked) {
st <- st[st$status != "new", ]
}
st
}
# Commit -----------------------------------------------------------------------
git_ask_commit <- function(message, untracked, push = FALSE, paths = NULL) {
if (!is_interactive() || !uses_git()) {
return(invisible())
}
# this is defined here to encourage all commits to route through this function
git_commit <- function(paths, message) {
repo <- git_repo()
ui_done("Adding files")
gert::git_add(paths, repo = repo)
ui_done("Making a commit with message {ui_value(message)}")
gert::git_commit(message, repo = repo)
}
uncommitted <- git_status(untracked)$file
if (is.null(paths)) {
paths <- uncommitted
} else {
paths <- intersect(paths, uncommitted)
}
n <- length(paths)
if (n == 0) {
return(invisible())
}
paths <- sort(paths)
ui_paths <- map_chr(paths, ui_path)
if (n > 10) {
ui_paths <- c(ui_paths[1:10], "...")
}
if (n == 1) {
file_hint <- "There is 1 uncommitted file:"
} else {
file_hint <- "There are {n} uncommitted files:"
}
ui_line(c(
file_hint,
paste0("* ", ui_paths)
))
# Only push if no remote & a single change
push <- push && git_can_push(max_local = 1)
msg <- paste0(
"Is it ok to commit ",
if (push) "and push ",
if (n == 1) 'it' else 'them',
"?"
)
if (ui_yeah(msg)) {
git_commit(paths, message)
if (push) {
git_push()
}
}
invisible()
}
git_uncommitted <- function(untracked = FALSE) {
nrow(git_status(untracked)) > 0
}
challenge_uncommitted_changes <- function(untracked = FALSE, msg = NULL) {
if (!uses_git()) {
return(invisible())
}
if (rstudioapi::hasFun("documentSaveAll")) {
rstudioapi::documentSaveAll()
}
default_msg <- "
There are uncommitted changes, which may cause problems or be lost when \\
we push, pull, switch, or compare branches"
msg <- glue(msg %||% default_msg)
if (git_uncommitted(untracked = untracked)) {
if (ui_yeah("{msg}\nDo you want to proceed anyway?")) {
return(invisible())
} else {
ui_stop("Uncommitted changes. Please commit before continuing.")
}
}
}
git_conflict_report <- function() {
st <- git_status(untracked = FALSE)
conflicted <- st$file[st$status == "conflicted"]
n <- length(conflicted)
if (n == 0) {
return(invisible())
}
conflicted_paths <- map_chr(conflicted, ui_path)
ui_line(c(
"There are {n} conflicted files:",
paste0("* ", conflicted_paths)
))
msg <- glue("
Are you ready to sort this out?
If so, we will open the conflicted files for you to edit.")
yes <- "Yes, I'm ready to resolve the merge conflicts."
no <- "No, I want to abort this merge."
if (ui_yeah(msg, yes = yes, no = no, shuffle = FALSE)) {
ui_silence(purrr::walk(conflicted, edit_file))
ui_stop("
Please fix each conflict, save, stage, and commit.
To back out of this merge, run {ui_code('gert::git_merge_abort()')} \\
(in R) or {ui_code('git merge --abort')} (in the shell).")
} else {
gert::git_merge_abort(repo = git_repo())
ui_stop("Abandoning the merge, since it will cause merge conflicts")
}
}
# Remotes ----------------------------------------------------------------------
## remref --> remote, branch
git_parse_remref <- function(remref) {
regex <- paste0("^", names(git_remotes()), collapse = "|")
regex <- glue("({regex})/(.*)")
list(remote = sub(regex, "\\1", remref), branch = sub(regex, "\\2", remref))
}
remref_remote <- function(remref) git_parse_remref(remref)$remote
remref_branch <- function(remref) git_parse_remref(remref)$branch
# Pull -------------------------------------------------------------------------
# Pull from remref or upstream tracking. If neither given/exists, do nothing.
# Therefore, this does less than `git pull`.
git_pull <- function(remref = NULL, verbose = TRUE) {
check_string(remref, allow_na = TRUE, allow_null = TRUE)
repo <- git_repo()
branch <- git_branch()
remref <- remref %||% git_branch_tracking(branch)
if (is.na(remref)) {
if (verbose) {
ui_done("No remote branch to pull from for {ui_value(branch)}.")
}
return(invisible())
}
if (verbose) {
ui_done("Pulling from {ui_value(remref)}.")
}
gert::git_fetch(
remote = remref_remote(remref),
refspec = remref_branch(remref),
repo = repo,
verbose = FALSE
)
# this is pretty brittle, because I've hard-wired these messages
# https://github.com/r-lib/gert/blob/main/R/merge.R
# but at time of writing, git_merge() offers no verbosity control
gert_shush(
regexp = c(
"Already up to date, nothing to merge",
"Performing fast-forward merge, no commit needed"
),
gert::git_merge(remref, repo = repo)
)
st <- git_status(untracked = TRUE)
if (any(st$status == "conflicted")) {
git_conflict_report()
}
invisible()
}
# Branch ------------------------------------------------------------------
git_branch <- function() {
info <- gert::git_info(repo = git_repo())
branch <- info$shorthand
if (identical(branch, "HEAD")) {
ui_stop("Detached head; can't continue")
}
if (is.na(branch)) {
ui_stop("On an unborn branch -- do you need to make an initial commit?")
}
branch
}
git_branch_tracking <- function(branch = git_branch()) {
repo <- git_repo()
if (!gert::git_branch_exists(branch, local = TRUE, repo = repo)) {
ui_stop("There is no local branch named {ui_value(branch)}")
}
gbl <- gert::git_branch_list(local = TRUE, repo = repo)
sub("^refs/remotes/", "", gbl$upstream[gbl$name == branch])
}
git_branch_compare <- function(branch = git_branch(), remref = NULL) {
remref <- remref %||% git_branch_tracking(branch)
gert::git_fetch(
remote = remref_remote(remref),
refspec = remref_branch(remref),
repo = git_repo(),
verbose = FALSE
)
out <- gert::git_ahead_behind(upstream = remref, ref = branch, repo = git_repo())
list(local_only = out$ahead, remote_only = out$behind)
}
git_can_push <- function(max_local = Inf, branch = git_branch(), remref = NULL) {
remref <- remref %||% git_branch_tracking(branch)
if (is.null(remref)) {
return(FALSE)
}
comp <- git_branch_compare(branch, remref)
comp$remote_only == 0 && comp$local_only <= max_local
}
git_push <- function(branch = git_branch(), remref = NULL, verbose = TRUE) {
remref <- remref %||% git_branch_tracking(branch)
if (verbose) {
ui_done("Pushing local {ui_value(branch)} branch to {ui_value(remref)}.")
}
gert::git_push(
remote = remref_remote(remref),
refspec = glue("refs/heads/{branch}:refs/heads/{remref_branch(remref)}"),
verbose = FALSE,
repo = git_repo()
)
}
git_push_first <- function(branch = git_branch(), remote = "origin", verbose = TRUE) {
if (verbose) {
remref <- glue("{remote}/{branch}")
ui_done("
Pushing {ui_value(branch)} branch to GitHub and setting \\
{ui_value(remref)} as upstream branch"
)
}
gert::git_push(
remote = remote,
set_upstream = TRUE,
verbose = FALSE,
repo = git_repo()
)
}
# Checks ------------------------------------------------------------------
check_current_branch <- function(is = NULL, is_not = NULL,
message = NULL) {
gb <- git_branch()
if (!is.null(is)) {
check_string(is)
if (gb == is) {
return(invisible())
} else {
msg <- message %||%
"Must be on branch {ui_value(is)}, not {ui_value(gb)}."
ui_stop(msg)
}
}
if (!is.null(is_not)) {
check_string(is_not)
if (gb != is_not) {
return(invisible())
} else {
msg <- message %||%
"Can't be on branch {ui_value(gb)}."
ui_stop(msg)
}
}
invisible()
}
# examples of remref: upstream/main, origin/foofy
check_branch_up_to_date <- function(direction = c("pull", "push"),
remref = NULL,
use = NULL) {
direction <- match.arg(direction)
branch <- git_branch()
remref <- remref %||% git_branch_tracking(branch)
use <- use %||% switch(direction, pull = "git pull", push = "git push")
if (is.na(remref)) {
ui_done("Local branch {ui_value(branch)} is not tracking a remote branch.")
return(invisible())
}
if (direction == "pull") {
ui_done("
Checking that local branch {ui_value(branch)} has the changes \\
in {ui_value(remref)}")
} else {
ui_done("
Checking that remote branch {ui_value(remref)} has the changes \\
in {ui_value(branch)}")
}
comparison <- git_branch_compare(branch, remref)
# TODO: properly pluralize "commit(s)" when I switch to cli
if (direction == "pull") {
if (comparison$remote_only == 0) {
return(invisible())
} else {
ui_stop("
Local branch {ui_value(branch)} is behind {ui_value(remref)} by \\
{comparison$remote_only} commit(s).
Please use {ui_code(use)} to update.")
}
} else {
if (comparison$local_only == 0) {
return(invisible())
} else {
# TODO: consider offering to push for them?
ui_stop("
Local branch {ui_value(branch)} is ahead of {ui_value(remref)} by \\
{comparison$local_only} commit(s).
Please use {ui_code(use)} to update.")
}
}
}
check_branch_pulled <- function(remref = NULL, use = NULL) {
check_branch_up_to_date(direction = "pull", remref = remref, use = use)
}
check_branch_pushed <- function(remref = NULL, use = NULL) {
check_branch_up_to_date(direction = "push", remref = remref, use = use)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-git.R
|
# OWNER/REPO --> OWNER, REPO
parse_repo_spec <- function(repo_spec) {
repo_split <- strsplit(repo_spec, "/")[[1]]
if (length(repo_split) != 2) {
ui_stop("{ui_code('repo_spec')} must be of form {ui_value('owner/repo')}.")
}
list(owner = repo_split[[1]], repo = repo_split[[2]])
}
spec_owner <- function(repo_spec) parse_repo_spec(repo_spec)$owner
spec_repo <- function(repo_spec) parse_repo_spec(repo_spec)$repo
# OWNER, REPO --> OWNER/REPO
make_spec <- function(owner = NA, repo = NA) {
no_spec <- is.na(owner) | is.na(repo)
as.character(ifelse(no_spec, NA, glue("{owner}/{repo}")))
}
# named vector or list of GitHub URLs --> data frame of URL parts
# more general than the name suggests
# definitely designed for GitHub URLs but not overtly GitHub-specific
# https://stackoverflow.com/questions/2514859/regular-expression-for-git-repository
# https://git-scm.com/docs/git-clone#_git_urls
# https://stackoverflow.com/questions/27745/getting-parts-of-a-url-regex
github_remote_regex <- paste0(
"^",
"(?<protocol>\\w+://)?",
"(?<user>.+@)?",
"(?<host>[^/:]+)",
"[/:]",
"(?<repo_owner>[^/]+)",
"/",
"(?<repo_name>[^/#]+)",
"(?<fragment>.*)",
"$"
)
parse_github_remotes <- function(x) {
# https://github.com/r-lib/usethis
# --> https, github.com, rlib, usethis
# https://github.com/r-lib/usethis.git
# --> https, github.com, rlib, usethis
# https://github.com/r-lib/usethis#readme
# --> https, github.com, rlib, usethis
# https://github.com/r-lib/usethis/issues/1169
# --> https, github.com, rlib, usethis
# https://github.acme.com/r-lib/devtools.git
# --> https, github.acme.com, rlib, usethis
# [email protected]:r-lib/usethis.git
# --> ssh, github.com, rlib, usethis
# ssh://[email protected]/rstudio/packrat.git
# --> ssh, github.com, rlib, usethis
dat <- re_match(x, github_remote_regex)
dat$protocol <- sub("://$", "", dat$protocol)
dat$user <- sub("@$", "", dat$user)
dat$repo_name <- sub("[.]git$", "", dat$repo_name)
dat$url <- dat$.text
# as.character() necessary for edge case of length-0 input
dat$protocol <- as.character(ifelse(dat$protocol == "https", "https", "ssh"))
dat$name <- if (rlang::is_named(x)) {
names(x)
} else {
rep_len(NA_character_, length.out = nrow(dat))
}
dat[c("name", "url", "host", "repo_owner", "repo_name", "protocol")]
}
parse_repo_url <- function(x) {
check_name(x)
dat <- re_match(x, github_remote_regex)
if (is.na(dat$.match)) {
list(repo_spec = x, host = NULL)
} else {
dat <- parse_github_remotes(x)
# TODO: generalize here for GHE hosts that don't include 'github'
if (!grepl("github", dat$host)) {
ui_stop("URL doesn't seem to be associated with GitHub: {ui_value(x)}")
}
list(
repo_spec = make_spec(owner = dat$repo_owner, repo = dat$repo_name),
host = glue("https://{dat$host}")
)
}
}
github_url_from_git_remotes <- function() {
tr <- tryCatch(target_repo(github_get = NA), error = function(e) NULL)
if (is.null(tr)) {
return()
}
parsed <- parse_github_remotes(tr$url)
glue_data_chr(parsed, "https://{host}/{repo_owner}/{repo_name}")
}
#' Gather LOCAL data on GitHub-associated remotes
#'
#' Creates a data frame where each row represents a GitHub-associated remote.
#' The data frame is initialized via `gert::git_remote_list()`, possibly
#' filtered for specific remote names. The remote URLs are parsed into parts,
#' like `host` and `repo_owner`. This is filtered again for rows where the
#' `host` appears to be a GitHub deployment (currently a crude search for
#' "github"). Some of these parts are recombined or embellished to get new
#' columns (`host_url`, `api_url`, `repo_spec`). All operations are entirely
#' mechanical and local.
#'
#' @param these Intersect the list of remotes with `these` remote names. To keep
#' all remotes, use `these = NULL` or `these = character()`.
#' @param x Data frame with character columns `name` and `url`. Exposed as an
#' argument for internal reasons. It's so we can call the functions that
#' marshal info about GitHub remotes with 0-row input to obtain a properly
#' typed template without needing a Git repo or calling GitHub. We just want
#' to get a data frame with zero rows, but with the column names and types
#' implicit in our logic.
#' @keywords internal
#' @noRd
github_remote_list <- function(these = c("origin", "upstream"), x = NULL) {
x <- x %||% gert::git_remote_list(repo = git_repo())
check_character(these, allow_null = TRUE)
check_data_frame(x)
check_character(x$name)
check_character(x$url)
if (length(these) > 0) {
x <- x[x$name %in% these, ]
}
parsed <- parse_github_remotes(set_names(x$url, x$name))
# TODO: generalize here for GHE hosts that don't include 'github'
is_github <- grepl("github", parsed$host)
parsed <- parsed[is_github, ]
parsed$remote <- parsed$name
parsed$host_url <- glue_chr("https://{parsed$host}")
parsed$api_url <- map_chr(parsed$host_url, get_apiurl)
parsed$repo_spec <- make_spec(parsed$repo_owner, parsed$repo_name)
parsed[c(
"remote",
"url", "host_url", "api_url", "host", "protocol",
"repo_owner", "repo_name", "repo_spec"
)]
}
#' Gather LOCAL and (maybe) REMOTE data on GitHub-associated remotes
#'
#' Creates a data frame where each row represents a GitHub-associated remote,
#' starting with the output of `github_remote_list()` (local data). This
#' function's job is to (maybe) add information we can only get from the GitHub
#' API. If `github_get = FALSE`, we don't even attempt to call the API.
#' Otherwise, we try and will succeed if gh discovers a suitable token. The
#' resulting data, even if the API data is absent, is massaged into a data
#' frame.
#'
#' @inheritParams github_remote_list
#' @param github_get Whether to attempt to get repo info from the GitHub API. We
#' try for `NA` (the default) and `TRUE`. If we aren't successful, we proceed
#' anyway for `NA` but error for `TRUE`. When `FALSE`, no attempt is made to
#' call the API.
#' @keywords internal
#' @noRd
github_remotes <- function(these = c("origin", "upstream"),
github_get = NA,
x = NULL) {
grl <- github_remote_list(these = these, x = x)
get_gh_repo <- function(repo_owner, repo_name,
api_url = "https://api.github.com") {
if (isFALSE(github_get)) {
f <- function(...) list()
} else {
f <- purrr::possibly(gh::gh, otherwise = list())
}
f(
"GET /repos/{owner}/{repo}",
owner = repo_owner, repo = repo_name, .api_url = api_url
)
}
repo_info <- purrr::pmap(
grl[c("repo_owner", "repo_name", "api_url")],
get_gh_repo
)
# NOTE: these can be two separate matters:
# 1. Did we call the GitHub API? Means we know `is_fork` and the parent repo.
# 2. If so, did we call it with auth? Means we know if we can push.
grl$github_got <- map_lgl(repo_info, ~ length(.x) > 0)
if (isTRUE(github_get) && any(!grl$github_got)) {
oops <- which(!grl$github_got)
oops_remotes <- grl$remote[oops]
oops_hosts <- unique(grl$host[oops])
ui_stop("
Unable to get GitHub info for these remotes: {ui_value(oops_remotes)}
Are we offline? Is GitHub down? Has the repo been deleted?
Otherwise, you probably need to configure a personal access token (PAT) \\
for {ui_value(oops_hosts)}
See {ui_code('?gh_token_help')} for advice")
}
grl$default_branch <- map_chr(repo_info, "default_branch", .default = NA)
grl$is_fork <- map_lgl(repo_info, "fork", .default = NA)
# `permissions` is an example of data that is not present if the request
# did not include a PAT
grl$can_push <- map_lgl(repo_info, c("permissions", "push"), .default = NA)
grl$can_admin <- map_lgl(repo_info, c("permissions", "admin"), .default = NA)
grl$perm_known <- !is.na(grl$can_push)
grl$parent_repo_owner <-
map_chr(repo_info, c("parent", "owner", "login"), .default = NA)
grl$parent_repo_name <-
map_chr(repo_info, c("parent", "name"), .default = NA)
grl$parent_repo_spec <- make_spec(grl$parent_repo_owner, grl$parent_repo_name)
parent_info <- purrr::pmap(
set_names(
grl[c("parent_repo_owner", "parent_repo_name", "api_url")],
~ sub("parent_", "", .x)
),
get_gh_repo
)
grl$can_push_to_parent <-
map_lgl(parent_info, c("permissions", "push"), .default = NA)
grl
}
#' Classify the GitHub remote configuration
#'
#' @description
#' Classify the active project's GitHub remote situation, so diagnostic and
#' other downstream functions can decide whether to proceed / abort / complain &
#' offer to fix.
#' We only consider the remotes where:
#' * Name is `origin` or `upstream` and the remote URL "looks like github"
#' (github.com or a GHE deployment)
#'
#' We have to call the GitHub API to fully characterize the GitHub remote
#' situation. That's the only way to learn if the user can push to a remote,
#' whether a remote is a fork, and which repo is the parent of a fork.
#' `github_get` controls whether we make these API calls.
#'
#' Some functions can get by with the information that's available locally, i.e.
#' we can use simple logic to decide whether to target `origin` or `upstream` or
#' present the user with a choice. We can set `github_get = FALSE` in this case.
#' Other functions, like the `pr_*()` functions, are more demanding and we'll
#' always determine the config with `github_get = TRUE`.
#'
#' Most usethis functions should call the higher-level functions `target_repo()`
#' or `target_repo_spec()`.
#'
#' Only functions that really need full access to the GitHub remote config
#' should call this directly. Ways to work with a config:
#' * `cfg <- github_remote_config(github_get = )`
#' * `check_for_bad_config(cfg)` errors for obviously bad configs (by default)
#' or you can specify the configs considered to be bad
#' * Emit a custom message then call `stop_bad_github_remote_config()` directly
#' * If the config is suboptimal-but-supported, use
#' `ui_github_remote_config_wat()` to educate the user and give them a chance
#' to back out.
#'
#' Fields in an instance of `github_remote_config`:
#' * `type`: explained below
#' * `pr_ready`: Logical. Do the `pr_*()` functions support it?
#' * `desc`: A description used in messages and menus.
#' * `origin`: Information about the `origin` GitHub remote.
#' * `upstream`: Information about the `upstream` GitHub remote.
#'
#' Possible GitHub remote configurations, the common cases:
#' * no_github: No `origin`, no `upstream`.
#' * ours: `origin` exists, is not a fork, and we can push to it. Owner of
#' `origin` could be current user, another user, or an org. No `upstream`.
#' - Less common variant: `upstream` exists, `origin` does not, and we can
#' push to `upstream`. The fork-ness of `upstream` is not consulted.
#' * fork: `origin` exists and we can push to it. `origin` is a fork of the repo
#' configured as `upstream`. We may or may not be able to push to `upstream`.
#' * theirs: Exactly one of `origin` and `upstream` exist and we can't push to
#' it. The fork-ness of this remote repo is not consulted.
#'
#' Possible GitHub remote configurations, the peculiar ones:
#' * fork_upstream_is_not_origin_parent: `origin` exists, it's a fork, but its
#' parent repo is not configured as `upstream`. Either there's no `upstream`
#' or `upstream` exists but it's not the parent of `origin`.
#' * fork_cannot_push_origin: `origin` is a fork and its parent is configured
#' as `upstream`. But we can't push to `origin`.
#' * upstream_but_origin_is_not_fork: `origin` and `upstream` both exist, but
#' `origin` is not a fork of anything and, specifically, it's not a fork of
#' `upstream`.
#'
#' Remote configuration "guesses" we apply when `github_get = FALSE` or when
#' we make unauthorized requests (no PAT found) and therefore have no info on
#' permissions
#' * maybe_ours_or_theirs: Exactly one of `origin` and `upstream` exists.
#' * maybe_fork: Both `origin` and `upstream` exist.
#'
#' @inheritParams github_remotes
#' @keywords internal
#' @noRd
new_github_remote_config <- function() {
ptype <- github_remotes(
x = data.frame(name = character(), url = character(), stringsAsFactors = FALSE)
)
# 0-row df --> a well-named list of properly typed NAs
ptype <- map(ptype, ~ c(NA, .x))
structure(
list(
type = NA_character_,
host_url = NA_character_,
pr_ready = FALSE,
desc = "Unexpected remote configuration.",
origin = c(name = "origin", is_configured = FALSE, ptype),
upstream = c(name = "upstream", is_configured = FALSE, ptype)
),
class = "github_remote_config"
)
}
github_remote_config <- function(github_get = NA) {
cfg <- new_github_remote_config()
grl <- github_remotes(github_get = github_get)
if (nrow(grl) == 0) {
return(cfg_no_github(cfg))
}
cfg$origin$is_configured <- "origin" %in% grl$remote
cfg$upstream$is_configured <- "upstream" %in% grl$remote
single_remote <- xor(cfg$origin$is_configured, cfg$upstream$is_configured)
if (!single_remote) {
if (length(unique(grl$host)) != 1) {
ui_stop("
Internal error: Multiple GitHub hosts
{ui_value(grl$host)}")
}
if (length(unique(grl$github_got)) != 1) {
ui_stop("
Internal error: Got GitHub API info for some remotes, but not all
Do all the remotes still exist? Do you still have access?")
}
if (length(unique(grl$perm_known)) != 1) {
ui_stop("
Internal error: Know GitHub permissions for some remotes, but not all")
}
}
cfg$host_url <- unique(grl$host_url)
github_got <- any(grl$github_got)
perm_known <- any(grl$perm_known)
if (cfg$origin$is_configured) {
cfg$origin <-
utils::modifyList(cfg$origin, grl[grl$remote == "origin",])
}
if (cfg$upstream$is_configured) {
cfg$upstream <-
utils::modifyList(cfg$upstream, grl[grl$remote == "upstream",])
}
if (github_got && !single_remote) {
cfg$origin$parent_is_upstream <-
identical(cfg$origin$parent_repo_spec, cfg$upstream$repo_spec)
}
if (!github_got || !perm_known) {
if (single_remote) {
return(cfg_maybe_ours_or_theirs(cfg))
} else {
return(cfg_maybe_fork(cfg))
}
}
# `github_got` must be TRUE
# `perm_known` must be TRUE
# origin only
if (single_remote && cfg$origin$is_configured) {
if (cfg$origin$is_fork) {
if (cfg$origin$can_push) {
return(cfg_fork_upstream_is_not_origin_parent(cfg))
} else {
return(cfg_theirs(cfg))
}
} else {
if (cfg$origin$can_push) {
return(cfg_ours(cfg))
} else {
return(cfg_theirs(cfg))
}
}
}
# upstream only
if (single_remote && cfg$upstream$is_configured) {
if (cfg$upstream$can_push) {
return(cfg_ours(cfg))
} else {
return(cfg_theirs(cfg))
}
}
# origin and upstream
if (cfg$origin$is_fork) {
if (cfg$origin$parent_is_upstream) {
if (cfg$origin$can_push) {
return(cfg_fork(cfg))
} else {
return(cfg_fork_cannot_push_origin(cfg))
}
} else {
return(cfg_fork_upstream_is_not_origin_parent(cfg))
}
} else {
return(cfg_upstream_but_origin_is_not_fork(cfg))
}
}
#' Select a target (GitHub) repo
#'
#' @description
#' Returns information about ONE GitHub repository. Used when we need to
#' designate which repo we will, e.g., open an issue on or activate a CI service
#' for. This information might be used in a GitHub API request or to form URLs.
#'
#' Examples:
#' * Badge URLs
#' * URLs where you can activate a CI service
#' * URLs for DESCRIPTION fields such as URL and BugReports
#' `target_repo()` passes `github_get` along to `github_remote_config()`. If
#' `github_get = TRUE`, `target_repo()` will error for configs other than
#' `"ours"` or `"fork"`. `target_repo()` always errors for bad configs. If
#' `github_get = NA` or `FALSE`, the "maybe" configs are tolerated.
#'
#' `target_repo_spec()` is a less capable function for when you just need an
#' `OWNER/REPO` spec. Currently, it does not set or offer control over
#' `github_get`, although I've considered explicitly setting `github_get =
#' FALSE` or adding this argument, defaulting to `FALSE`.
#'
#' @inheritParams github_remotes
#' @param cfg An optional GitHub remote configuration. Used to get the target
#' repo when the function had some need for the full config.
#' @param role We use "source" to mean the principal repo where a project's
#' development happens. We use "primary" to mean the principal repo this
#' particular user interacts with or has the greatest power over. They can be
#' the same or different. Examples:
#' * For a personal project you own, "source" and "primary" are the same.
#' Presumably the `origin` remote.
#' * For a collaboratively developed project, an outside contributor must create
#' a fork in order to make a PR. For such a person, their fork is "primary"
#' (presumably `origin`) and the original repo that they forked is "source"
#' (presumably `upstream`).
#' This is *almost* consistent with terminology used by the GitHub API. A fork
#' has a "source repo" and a "parent repo", which are usually the same. They
#' only differ when working with a fork of a repo that is itself a fork. In this
#' rare case, the parent is the immediate fork parent and the source is the
#' ur-parent, i.e. the root of this particular tree. The source repo is not a
#' fork.
#' @param ask In some configurations, if `ask = TRUE` and we're in an
#' interactive session, user gets a choice between `origin` and `upstream`.
#' @keywords internal
#' @noRd
target_repo <- function(cfg = NULL,
github_get = NA,
role = c("source", "primary"),
ask = is_interactive(),
ok_configs = c("ours", "fork", "theirs")) {
cfg <- cfg %||% github_remote_config(github_get = github_get)
stopifnot(inherits(cfg, "github_remote_config"))
role <- match.arg(role)
check_for_bad_config(cfg)
if (isTRUE(github_get)) {
check_for_config(cfg, ok_configs = ok_configs)
}
# upstream only
if (cfg$upstream$is_configured && !cfg$origin$is_configured) {
return(cfg$upstream)
}
# origin only
if (cfg$origin$is_configured && !cfg$upstream$is_configured) {
return(cfg$origin)
}
if (!ask || !is_interactive()) {
return(switch(
role,
source = cfg$upstream,
primary = cfg$origin
))
}
choices <- c(
origin = glue("{cfg$origin$repo_spec} = {ui_value('origin')}"),
upstream = glue("{cfg$upstream$repo_spec} = {ui_value('upstream')}")
)
title <- glue("Which repo should we target?")
choice <- utils::menu(choices, graphics = FALSE, title = title)
cfg[[names(choices)[choice]]]
}
target_repo_spec <- function(role = c("source", "primary"),
ask = is_interactive()) {
tr <- target_repo(role = match.arg(role), ask = ask)
tr$repo_spec
}
# formatting github remote configurations for humans ---------------------------
format_remote <- function(remote) {
effective_spec <- function(remote) {
if (remote$is_configured) {
ui_value(remote$repo_spec)
} else {
ui_unset("not configured")
}
}
push_clause <- function(remote) {
if (!remote$is_configured || is.na(remote$can_push)) {
return()
}
if (remote$can_push) " (can push)" else " (can not push)"
}
out <- c(
glue("{remote$name} = {effective_spec(remote)}"),
push_clause(remote),
if (isTRUE(remote$is_fork)) {
glue(" = fork of {ui_value(remote$parent_repo_spec)}")
}
)
glue_collapse(out)
}
format_fields <- function(cfg) {
list(
type = glue("Type = {ui_value(cfg$type)}"),
host_url = glue("Host = {ui_value(cfg$host_url)}"),
pr_ready = glue("Config supports a pull request = {ui_value(cfg$pr_ready)}"),
origin = format_remote(cfg$origin),
upstream = format_remote(cfg$upstream),
desc = if (is.na(cfg$desc)) {
glue("Desc = {ui_unset('no description')}")
} else {
glue("Desc = {cfg$desc}")
}
)
}
#' @export
format.github_remote_config <- function(x, ...) {
glue::as_glue(format_fields(x))
}
#' @export
print.github_remote_config <- function(x, ...) {
cat(format(x, ...), sep = "\n")
invisible(x)
}
# refines output of format_fields() to create input better suited to
# ui_github_remote_config_wat() and stop_bad_github_remote_config()
github_remote_config_wat <- function(cfg, context = c("menu", "abort")) {
context <- match.arg(context)
adjective <- switch(context, menu = "Unexpected", abort = "Unsupported")
out <- format_fields(cfg)
out$pr_ready <- NULL
out$type <- glue("{adjective} GitHub remote configuration: {ui_value(cfg$type)}")
out$desc <- if (is.na(cfg$desc)) NULL else cfg$desc
out
}
# returns TRUE if user selects "no" --> exit the calling function
# return FALSE if user select "yes" --> keep going, they've been warned
ui_github_remote_config_wat <- function(cfg) {
ui_nope(
github_remote_config_wat(cfg, context = "menu"),
yes = "Yes, I want to proceed. I know what I'm doing.",
no = "No, I want to stop and straighten out my GitHub remotes first.",
shuffle = FALSE
)
}
stop_bad_github_remote_config <- function(cfg) {
abort(
message = unname(unlist(github_remote_config_wat(cfg, context = "abort"))),
class = c("usethis_error_bad_github_remote_config", "usethis_error"),
cfg = cfg
)
}
stop_maybe_github_remote_config <- function(cfg) {
msg <- github_remote_config_wat(cfg)
msg$type <- glue("
Pull request functions can't work with GitHub remote configuration: \\
{ui_value(cfg$type)}
The most likely problem is that we aren't discovering your GitHub \\
personal access token
Call {ui_code('gh_token_help()')} for help")
abort(
message = unname(unlist(msg)),
class = c("usethis_error_invalid_pr_config", "usethis_error"),
cfg = cfg
)
}
check_for_bad_config <- function(cfg,
bad_configs = c(
"no_github",
"fork_upstream_is_not_origin_parent",
"fork_cannot_push_origin",
"upstream_but_origin_is_not_fork"
)) {
if (cfg$type %in% bad_configs) {
stop_bad_github_remote_config(cfg)
}
invisible()
}
check_for_maybe_config <- function(cfg) {
maybe_configs <- grep("^maybe_", all_configs(), value = TRUE)
if (cfg$type %in% maybe_configs) {
stop_maybe_github_remote_config(cfg)
}
invisible()
}
check_for_config <- function(cfg = NULL,
ok_configs = c("ours", "fork", "theirs")) {
cfg <- cfg %||% github_remote_config(github_get = TRUE)
stopifnot(inherits(cfg, "github_remote_config"))
if (cfg$type %in% ok_configs) {
return(invisible(cfg))
}
check_for_maybe_config(cfg)
bad_configs <- grep("^maybe_", all_configs(), invert = TRUE, value = TRUE)
bad_configs <- setdiff(bad_configs, ok_configs)
check_for_bad_config(cfg, bad_configs = bad_configs)
ui_stop("
Internal error: Unexpected GitHub remote configuration: {ui_value(cfg$type)}")
}
check_can_push <- function(tr = target_repo(github_get = TRUE),
objective = "for this operation") {
if (isTRUE(tr$can_push)) {
return(invisible())
}
ui_stop("
You don't seem to have push access for {ui_value(tr$repo_spec)}, which \\
is required {objective}.")
}
# github remote configurations -------------------------------------------------
all_configs <- function() {
c(
"no_github",
"ours",
"theirs",
"maybe_ours_or_theirs",
"fork",
"maybe_fork",
"fork_cannot_push_origin",
"fork_upstream_is_not_origin_parent",
"upstream_but_origin_is_not_fork"
)
}
read_more <- function() {
glue("
Read more about the GitHub remote configurations that usethis supports at:
{ui_value('https://happygitwithr.com/common-remote-setups.html')}")
}
read_more_maybe <- function() {
glue("
Read more about what this GitHub remote configurations means at:
{ui_value('https://happygitwithr.com/common-remote-setups.html')}")
}
cfg_no_github <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "no_github",
pr_ready = FALSE,
desc = glue("
Neither {ui_value('origin')} nor {ui_value('upstream')} is a GitHub \\
repo.
{read_more()}")
)
)
}
cfg_ours <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "ours",
pr_ready = TRUE,
desc = glue("
{ui_value('origin')} is both the source and primary repo.
{read_more()}")
)
)
}
cfg_theirs <- function(cfg) {
configured <- if (cfg$origin$is_configured) "origin" else "upstream"
utils::modifyList(
cfg,
list(
type = "theirs",
pr_ready = FALSE,
desc = glue("
The only configured GitHub remote is {ui_value(configured)}, which
you cannot push to.
If your goal is to make a pull request, you must fork-and-clone.
{ui_code('usethis::create_from_github()')} can do this.
{read_more()}")
)
)
}
cfg_maybe_ours_or_theirs <- function(cfg) {
if (cfg$origin$is_configured) {
configured <- "origin"
not_configured <- "upstream"
} else {
configured <- "upstream"
not_configured <- "origin"
}
utils::modifyList(
cfg,
list(
type = "maybe_ours_or_theirs",
pr_ready = NA,
desc = glue("
{ui_value(configured)} is a GitHub repo and {ui_value(not_configured)} \\
is either not configured or is not a GitHub repo.
We may be offline or you may need to configure a GitHub personal access
token. {ui_code('gh_token_help()')} can help with that.
{read_more_maybe()}")
)
)
}
cfg_fork <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "fork",
pr_ready = TRUE,
desc = glue("
{ui_value('origin')} is a fork of {ui_value(cfg$upstream$repo_spec)}, \\
which is configured as the {ui_value('upstream')} remote.
{read_more()}")
)
)
}
cfg_maybe_fork <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "maybe_fork",
pr_ready = NA,
desc = glue("
Both {ui_value('origin')} and {ui_value('upstream')} appear to be \\
GitHub repos. However, we can't confirm their relationship to each \\
other (e.g., fork and fork parent) or your permissions (e.g. push \\
access).
We may be offline or you may need to configure a GitHub personal access
token. {ui_code('gh_token_help()')} can help with that.
{read_more_maybe()}")
)
)
}
cfg_fork_cannot_push_origin <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "fork_cannot_push_origin",
pr_ready = FALSE,
desc = glue("
The {ui_value('origin')} remote is a fork, but you can't push to it.
{read_more()}")
)
)
}
cfg_fork_upstream_is_not_origin_parent <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "fork_upstream_is_not_origin_parent",
pr_ready = FALSE,
desc = glue("
The {ui_value('origin')} GitHub remote is a fork, but its parent is \\
not configured as the {ui_value('upstream')} remote.
{read_more()}")
)
)
}
cfg_upstream_but_origin_is_not_fork <- function(cfg) {
utils::modifyList(
cfg,
list(
type = "upstream_but_origin_is_not_fork",
pr_ready = FALSE,
desc = glue("
Both {ui_value('origin')} and {ui_value('upstream')} are GitHub \\
remotes, but {ui_value('origin')} is not a fork and, in particular, \\
is not a fork of {ui_value('upstream')}.
{read_more()}")
)
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-github.R
|
# wrappers that apply as.character() to glue functions
glue_chr <- function(...) {
as.character(glue(..., .envir = parent.frame(1)))
}
glue_data_chr <- function(.x, ...) {
as.character(glue_data(.x = .x, ..., .envir = parent.frame(1)))
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-glue.R
|
# inlined from
# https://github.com/r-lib/rematch2/commit/aab858e3411810fa107d20db6f936c6b10cbdf3f
# EXCEPT I don't return a tibble
re_match <- function(text, pattern, perl = TRUE, ...) {
check_string(pattern)
text <- as.character(text)
match <- regexpr(pattern, text, perl = perl, ...)
start <- as.vector(match)
length <- attr(match, "match.length")
end <- start + length - 1L
matchstr <- substring(text, start, end)
matchstr[ start == -1 ] <- NA_character_
res <- data.frame(
stringsAsFactors = FALSE,
.text = text,
.match = matchstr
)
if (!is.null(attr(match, "capture.start"))) {
gstart <- attr(match, "capture.start")
glength <- attr(match, "capture.length")
gend <- gstart + glength - 1L
groupstr <- substring(text, gstart, gend)
groupstr[ gstart == -1 ] <- NA_character_
dim(groupstr) <- dim(gstart)
res <- cbind(groupstr, res, stringsAsFactors = FALSE)
}
names(res) <- c(attr(match, "capture.names"), ".text", ".match")
#class(res) <- c("tbl_df", "tbl", class(res))
res
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-rematch2.R
|
# functions to help reduce duplication and increase consistency in the docs
# repo_spec ----
param_repo_spec <- function(...) {
template <- glue("
@param repo_spec \\
Optional GitHub repo specification in this form: `owner/repo`. \\
This can usually be inferred from the GitHub remotes of active \\
project.
")
dots <- list2(...)
if (length(dots) > 0) {
template <- c(template, dots)
}
glue_collapse(template, sep = " ")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-roxygen.R
|
# opening act of an eventual transition away from the ui_*() functions and towards
# the cli-mediated UI we're using in other packages
usethis_abort <- function(message, ..., class = NULL, .envir = parent.frame()) {
#cli::cli_div(theme = usethis_theme())
cli::cli_abort(
message,
class = c(class, "usethis_error"),
.envir = .envir,
...
)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils-ui.R
|
can_overwrite <- function(path) {
if (!file_exists(path)) {
return(TRUE)
}
if (getOption("usethis.overwrite", FALSE)) {
# don't activate a project
# don't assume `path` is in the active project
if (is_in_proj(path) && uses_git()) { # path is in active project
return(TRUE)
}
if (possibly_in_proj(path) && # path is some other project
with_project(proj_find(path), uses_git(), quiet = TRUE)) {
return(TRUE)
}
}
if (is_interactive()) {
ui_yeah("Overwrite pre-existing file {ui_path(path)}?")
} else {
FALSE
}
}
check_is_named_list <- function(x, nm = deparse(substitute(x))) {
if (!is_list(x)) {
bad_class <- paste(class(x), collapse = "/")
ui_stop("{ui_code(nm)} must be a list, not {ui_value(bad_class)}.")
}
if (!is_dictionaryish(x)) {
ui_stop(
"Names of {ui_code(nm)} must be non-missing, non-empty, and non-duplicated."
)
}
x
}
dots <- function(...) {
eval(substitute(alist(...)))
}
asciify <- function(x) {
check_character(x)
gsub("[^a-zA-Z0-9_-]+", "-", x)
}
compact <- function(x) {
is_empty <- vapply(x, function(x) length(x) == 0, logical(1))
x[!is_empty]
}
# Needed for mocking
is_installed <- function(pkg) {
rlang::is_installed(pkg)
}
isFALSE <- function(x) {
identical(x, FALSE)
}
isNA <- function(x) {
length(x) == 1 && is.na(x)
}
path_first_existing <- function(paths) {
# manual loop with explicit use of `[[` to retain "fs" class
for (i in seq_along(paths)) {
path <- paths[[i]]
if (file_exists(path)) {
return(path)
}
}
NULL
}
is_online <- function(host) {
bare_host <- sub("^https?://(.*)$", "\\1", host)
!is.null(curl::nslookup(bare_host, error = FALSE))
}
year <- function() format(Sys.Date(), "%Y")
pluck_lgl <- function(.x, ...) {
as.logical(purrr::pluck(.x, ..., .default = NA))
}
pluck_chr <- function(.x, ...) {
as.character(purrr::pluck(.x, ..., .default = NA))
}
pluck_int <- function(.x, ...) {
as.integer(purrr::pluck(.x, ..., .default = NA))
}
is_windows <- function() {
.Platform$OS.type == "windows"
}
# For stability of `stringsAsFactors` across versions
data.frame <- function(..., stringsAsFactors = FALSE) {
base::data.frame(..., stringsAsFactors = stringsAsFactors)
}
# wrapper around check_name() from import-standalone-types-check.R
# for the common case when NULL is allowed (often default)
maybe_name <- function(x, ..., arg = caller_arg(x),
call = caller_env()) {
check_name(x, ..., allow_null = TRUE,
arg = arg, call = call)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/utils.R
|
#' Increment package version
#'
#' @description
#'
#' usethis supports semantic versioning, which is described in more detail in
#' the [version
#' section](https://r-pkgs.org/lifecycle.html#sec-lifecycle-version-number) of [R
#' Packages](https://r-pkgs.org). A version number breaks down like so:
#'
#' ```
#' <major>.<minor>.<patch> (released version)
#' <major>.<minor>.<patch>.<dev> (dev version)
#' ```
#' `use_version()` increments the "Version" field in `DESCRIPTION`, adds a new
#' heading to `NEWS.md` (if it exists), commits those changes (if package uses
#' Git), and optionally pushes (if safe to do so). It makes the same update to a
#' line like `PKG_version = "x.y.z";` in `src/version.c` (if it exists).
#'
#' `use_dev_version()` increments to a development version, e.g. from 1.0.0 to
#' 1.0.0.9000. If the existing version is already a development version with
#' four components, it does nothing. Thin wrapper around `use_version()`.
#'
#' @param which A string specifying which level to increment, one of: "major",
#' "minor", "patch", "dev". If `NULL`, user can choose interactively.
#'
#' @seealso The [version
#' section](https://r-pkgs.org/lifecycle.html#sec-lifecycle-version-number) of [R
#' Packages](https://r-pkgs.org).
#'
#' @examples
#' \dontrun{
#' ## for interactive selection, do this:
#' use_version()
#'
#' ## request a specific type of increment
#' use_version("minor")
#' use_dev_version()
#' }
#'
#' @name use_version
NULL
#' @rdname use_version
#' @param push If `TRUE`, also attempts to push the commits to the remote
#' branch.
#' @export
use_version <- function(which = NULL, push = FALSE) {
if (is.null(which) && !is_interactive()) {
return(invisible(FALSE))
}
check_is_package("use_version()")
challenge_uncommitted_changes(
msg = "There are uncommitted changes and you're about to bump version"
)
new_ver <- choose_version("What should the new version be?", which)
if (is.null(new_ver)) {
return(invisible(FALSE))
}
proj_desc_field_update("Version", new_ver, overwrite = TRUE)
if (names(new_ver) == "dev") {
use_news_heading("(development version)")
} else {
use_news_heading(new_ver)
}
use_c_version(new_ver)
git_ask_commit(
glue("Increment version number to {new_ver}"),
untracked = TRUE,
push = push,
paths = c("DESCRIPTION", "NEWS.md", path("src", "version.c"))
)
invisible(TRUE)
}
#' @rdname use_version
#' @export
use_dev_version <- function(push = FALSE) {
check_is_package("use_dev_version()")
if (is_dev_version()) {
return(invisible())
}
use_version(which = "dev", push = push)
}
choose_version <- function(message, which = NULL) {
versions <- bump_version()
rtypes <- names(versions)
which <- which %||% rtypes
which <- arg_match(which, values = rtypes, multiple = TRUE)
versions <- versions[which]
if (length(versions) == 1) {
return(versions)
}
choice <- utils::menu(
choices = glue(
"{format(names(versions), justify = 'right')} --> {versions}"
),
title = glue(
"Current version is {proj_version()}.\n",
"{message} (0 to exit)"
)
)
if (choice == 0) {
invisible()
} else {
# Not using `[[` even though there is only 1 `choice`,
# because that removes the names from `versions`
versions[choice]
}
}
bump_version <- function(ver = proj_version()) {
bumps <- c("major", "minor", "patch", "dev")
vapply(bumps, bump_, character(1), ver = ver)
}
bump_ <- function(x, ver) {
d <- desc::desc(text = paste0("Version: ", ver))
suppressMessages(d$bump_version(x)$get("Version")[[1]])
}
use_c_version <- function(ver) {
version_path <- proj_path("src", "version.c")
if (!file_exists(version_path)) {
return()
}
hint <- glue("{project_name()}_version")
ui_done("
Setting {ui_field(hint)} to {ui_value(ver)} in {ui_path(version_path)}")
lines <- read_utf8(version_path)
re <- glue("(^.*{project_name()}_version = \")([0-9.]+)(\";$)")
lines <- gsub(re, glue("\\1{ver}\\3"), lines)
write_utf8(version_path, lines)
}
is_dev_version <- function(version = proj_version()) {
ver <- package_version(version)
length(unlist(ver)) > 3
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/version.R
|
#' Create a vignette or article
#'
#' Creates a new vignette or article in `vignettes/`. Articles are a special
#' type of vignette that appear on pkgdown websites, but are not included
#' in the package itself (because they are added to `.Rbuildignore`
#' automatically).
#'
#' @section General setup:
#' * Adds needed packages to `DESCRIPTION`.
#' * Adds `inst/doc` to `.gitignore` so built vignettes aren't tracked.
#' * Adds `vignettes/*.html` and `vignettes/*.R` to `.gitignore` so
#' you never accidentally track rendered vignettes.
#' @param name Base for file name to use for new vignette. Should consist only
#' of numbers, letters, `_` and `-`. Lower case is recommended.
#' @param title The title of the vignette.
#' @seealso The [vignettes chapter](https://r-pkgs.org/vignettes.html) of
#' [R Packages](https://r-pkgs.org).
#' @export
#' @examples
#' \dontrun{
#' use_vignette("how-to-do-stuff", "How to do stuff")
#' }
use_vignette <- function(name, title = name) {
check_is_package("use_vignette()")
check_required(name)
check_vignette_name(name)
use_dependency("knitr", "Suggests")
use_dependency("rmarkdown", "Suggests")
proj_desc_field_update("VignetteBuilder", "knitr", overwrite = TRUE)
use_git_ignore("inst/doc")
use_vignette_template("vignette.Rmd", name, title)
invisible()
}
#' @export
#' @rdname use_vignette
use_article <- function(name, title = name) {
check_is_package("use_article()")
deps <- proj_deps()
if (!"rmarkdown" %in% deps$package) {
proj_desc_field_update("Config/Needs/website", "rmarkdown", append = TRUE)
}
use_vignette_template("article.Rmd", name, title, subdir = "articles")
use_build_ignore("vignettes/articles")
invisible()
}
use_vignette_template <- function(template, name, title, subdir = NULL) {
check_name(template)
check_name(name)
check_name(title)
maybe_name(subdir)
use_directory("vignettes")
if (!is.null(subdir)) {
use_directory(path("vignettes", subdir))
}
use_git_ignore(c("*.html", "*.R"), directory = "vignettes")
if (is.null(subdir)) {
path <- path("vignettes", asciify(name), ext = "Rmd")
} else {
path <- path("vignettes", subdir, asciify(name), ext = "Rmd")
}
data <- list(
Package = project_name(),
vignette_title = title,
braced_vignette_title = glue("{{{title}}}")
)
use_template(template,
save_as = path,
data = data,
open = TRUE
)
path
}
check_vignette_name <- function(name) {
if (!valid_vignette_name(name)) {
ui_stop(c(
"{ui_value(name)} is not a valid filename for a vignette. It must:",
"* Start with a letter.",
"* Contain only letters, numbers, '_', and '-'."
))
}
}
# https://cran.r-project.org/doc/manuals/r-release/R-exts.html#Writing-package-vignettes
# "To ensure that they can be accessed from a browser (as an HTML index is
# provided), the file names should start with an ASCII letter and be comprised
# entirely of ASCII letters or digits or hyphen or underscore."
valid_vignette_name <- function(x) {
grepl("^[[:alpha:]][[:alnum:]_-]*$", x)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/vignette.R
|
# unexported function we are experimenting with
use_vscode_debug <- function(open = rlang::is_interactive()) {
usethis::use_directory(".vscode", ignore = TRUE)
deps <- proj_deps()
lt_pkgs <- deps$package[deps$type == "LinkingTo"]
possibly_path_package <- purrr::possibly(path_package, otherwise = NA)
lt_paths <- map_chr(lt_pkgs, ~ possibly_path_package(.x, "include"))
lt_paths <- purrr::discard(lt_paths, is.na)
# this is a bit fiddly, but it produces the desired JSON when lt_paths has
# length 0 or > 0
# I should probably come back and use jsonlite here instead of use_template()
lt_paths <- encodeString(lt_paths, quote = '"')
lt_paths <- glue(" {lt_paths},")
lt_paths <- glue_collapse(lt_paths, sep = "\n")
if (length(lt_paths) > 0) {
lt_paths <- paste0("\n", lt_paths)
}
use_template(
"vscode-c_cpp_properties.json",
save_as = path(".vscode", "c_cpp_properties.json"),
data = list(linking_to_includes = lt_paths),
ignore = FALSE, # the .vscode directory is already ignored
open = open
)
use_template(
"vscode-launch.json",
save_as = path(".vscode", "launch.json"),
ignore = FALSE, # the .vscode directory is already ignored
open = open
)
usethis::use_directory("debug", ignore = TRUE)
use_template(
"vscode-debug.R",
save_as = path("debug", "debug.R"),
ignore = FALSE, # the debug directory is already ignored
open = open
)
invisible(TRUE)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/vscode.R
|
#' Write into or over a file
#'
#' Helpers to write into or over a new or pre-existing file. Designed mostly for
#' for internal use. File is written with UTF-8 encoding.
#'
#' @name write-this
#' @param path Path to target file. It is created if it does not exist, but the
#' parent directory must exist.
#' @param lines Character vector of lines. For `write_union()`, these are lines
#' to add to the target file, if not already present. For `write_over()`,
#' these are the exact lines desired in the target file.
#' @param quiet Logical. Whether to message about what is happening.
#' @return Logical indicating whether a write occurred, invisibly.
#' @keywords internal
#'
#' @examples
#' \dontshow{
#' .old_wd <- setwd(tempdir())
#' }
#' write_union("a_file", letters[1:3])
#' readLines("a_file")
#' write_union("a_file", letters[1:5])
#' readLines("a_file")
#'
#' write_over("another_file", letters[1:3])
#' readLines("another_file")
#' write_over("another_file", letters[1:3])
#' \dontrun{
#' ## will error if user isn't present to approve the overwrite
#' write_over("another_file", letters[3:1])
#' }
#'
#' ## clean up
#' file.remove("a_file", "another_file")
#' \dontshow{
#' setwd(.old_wd)
#' }
NULL
#' @describeIn write-this writes lines to a file, taking the union of what's
#' already there, if anything, and some new lines. Note, there is no explicit
#' promise about the line order. Designed to modify simple config files like
#' `.Rbuildignore` and `.gitignore`.
#' @export
write_union <- function(path, lines, quiet = FALSE) {
check_name(path)
check_character(lines)
check_bool(quiet)
path <- user_path_prep(path)
if (file_exists(path)) {
existing_lines <- read_utf8(path)
} else {
existing_lines <- character()
}
new <- setdiff(lines, existing_lines)
if (length(new) == 0) {
return(invisible(FALSE))
}
if (!quiet) {
ui_done("Adding {ui_value(new)} to {ui_path(path)}")
}
all <- c(existing_lines, new)
write_utf8(path, all)
}
#' @describeIn write-this writes a file with specific lines, creating it if
#' necessary or overwriting existing, if proposed contents are not identical
#' and user is available to give permission.
#' @param overwrite Force overwrite of existing file?
#' @export
write_over <- function(path, lines, quiet = FALSE, overwrite = FALSE) {
check_name(path)
check_character(lines)
stopifnot(length(lines) > 0)
check_bool(quiet)
check_bool(overwrite)
path <- user_path_prep(path)
if (same_contents(path, lines)) {
return(invisible(FALSE))
}
if (overwrite || can_overwrite(path)) {
if (!quiet) {
ui_done("Writing {ui_path(path)}")
}
write_utf8(path, lines)
} else {
if (!quiet) {
ui_done("Leaving {ui_path(path)} unchanged")
}
invisible(FALSE)
}
}
read_utf8 <- function(path, n = -1L) {
base::readLines(path, n = n, encoding = "UTF-8", warn = FALSE)
}
write_utf8 <- function(path, lines, append = FALSE, line_ending = NULL) {
check_name(path)
check_character(lines)
file_mode <- if (append) "ab" else "wb"
con <- file(path, open = file_mode, encoding = "utf-8")
withr::defer(close(con))
if (is.null(line_ending)) {
if (is_in_proj(path)) { # path is in active project
line_ending <- proj_line_ending()
} else if (possibly_in_proj(path)) { # path is some other project
line_ending <-
with_project(proj_find(path), proj_line_ending(), quiet = TRUE)
} else {
line_ending <- platform_line_ending()
}
}
# convert embedded newlines
lines <- gsub("\r?\n", line_ending, lines)
base::writeLines(enc2utf8(lines), con, sep = line_ending, useBytes = TRUE)
invisible(TRUE)
}
same_contents <- function(path, contents) {
if (!file_exists(path)) {
return(FALSE)
}
identical(read_utf8(path), contents)
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/R/write.R
|
---
title: "{{{ vignette_title }}}"
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
comment = "#>"
)
```
```{r setup}
library({{Package}})
```
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/article.Rmd
|
bibentry(
bibtype = "Article",
title = ,
author = ,
journal = ,
year = ,
volume = ,
number = ,
pages = ,
doi =
)
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/citation-template.R
|
library(testthat)
library({{{ name }}})
if (requireNamespace("xml2")) {
test_check("{{{ name }}}", reporter = MultiReporter$new(reporters = list(JunitReporter$new(file = "test-results.xml"), CheckReporter$new())))
} else {
test_check("{{{ name }}}")
}
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/junit-testthat.R
|
## code to prepare `{{{name}}}` dataset goes here
usethis::use_data({{{name}}}, overwrite = TRUE)
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/packagename-data-prep.R
|
#' @keywords internal
"_PACKAGE"
## usethis namespace: start
## usethis namespace: end
NULL
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/packagename-package.R
|
#' Pipe operator
#'
#' See \code{magrittr::\link[magrittr:pipe]{\%>\%}} for details.
#'
#' @name %>%
#' @rdname pipe
#' @keywords internal
#' @export
#' @importFrom magrittr %>%
#' @usage lhs \%>\% rhs
#' @param lhs A value or the magrittr placeholder.
#' @param rhs A function call using the magrittr semantics.
#' @return The result of calling `rhs(lhs)`.
NULL
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/pipe.R
|
---
title: "Template Title"
author: "Your Name"
date: "The Date"
output: output_format
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
```
## Adding an RMarkdown Template
This file is what a user will see when they select your template. Make sure that you update the fields in the yaml header. In particular you will want to update the `output` field to whatever format your template requires.
This is a good place to demonstrate special features that your template provides. Ideally it should knit out-of-the-box, or at least contain clear instructions as to what needs changing.
Finally, be sure to remove this message!
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/rmarkdown-template.Rmd
|
test_that("multiplication works", {
expect_equal(2 * 2, 4)
})
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/test-example-2.1.R
|
# This file is part of the standard setup for testthat.
# It is recommended that you do not modify it.
#
# Where should you do additional test configuration?
# Learn more about the roles of various files in:
# * https://r-pkgs.org/testing-design.html#sec-tests-files-overview
# * https://testthat.r-lib.org/articles/special-files.html
library(testthat)
library({{{ name }}})
test_check("{{{ name }}}")
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/testthat.R
|
---
title: "{{{ tutorial_title }}}"
output: learnr::tutorial
runtime: shiny_prerendered
---
```{r setup, include=FALSE}
library(learnr)
knitr::opts_chunk$set(echo = FALSE)
```
## Topic 1
### Exercise
*Here's a simple exercise with an empty code chunk provided for entering the answer.*
Write the R code required to add two plus two:
```{r two-plus-two, exercise=TRUE}
```
### Exercise with Code
*Here's an exercise with some prepopulated code as well as `exercise.lines = 5` to provide a bit more initial room to work.*
Now write a function that adds any two numbers and then call it:
```{r add-function, exercise=TRUE, exercise.lines = 5}
add <- function() {
}
```
## Topic 2
### Exercise with Hint
*Here's an exercise where the chunk is pre-evaluated via the `exercise.eval` option (so the user can see the default output we'd like them to customize). We also add a "hint" to the correct solution via the chunk immediate below labeled `print-limit-hint`.*
Modify the following code to limit the number of rows printed to 5:
```{r print-limit, exercise=TRUE, exercise.eval=TRUE}
mtcars
```
```{r print-limit-hint}
head(mtcars)
```
### Quiz
*You can include any number of single or multiple choice questions as a quiz. Use the `question` function to define a question and the `quiz` function for grouping multiple questions together.*
Some questions to verify that you understand the purposes of various base and recommended R packages:
```{r quiz}
quiz(
question("Which package contains functions for installing other R packages?",
answer("base"),
answer("tools"),
answer("utils", correct = TRUE),
answer("codetools")
),
question("Which of the R packages listed below are used to create plots?",
answer("lattice", correct = TRUE),
answer("tools"),
answer("stats"),
answer("grid", correct = TRUE)
)
)
```
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/tutorial-template.Rmd
|
---
title: "{{{ vignette_title }}}"
output: rmarkdown::html_vignette
vignette: >
%\VignetteIndexEntry{{{ braced_vignette_title }}}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
comment = "#>"
)
```
```{r setup}
library({{Package}})
```
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/vignette.Rmd
|
devtools::clean_dll()
devtools::load_all()
1 + 1
|
/scratch/gouwar.j/cran-all/cranData/usethis/inst/templates/vscode-debug.R
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.