Skip to content

Commit 5f36b1a

Browse files
author
Michael Chirico
committed
revert usage of gettextf
1 parent 06ae5a7 commit 5f36b1a

File tree

6 files changed

+28
-53
lines changed

6 files changed

+28
-53
lines changed

R/pkg/R/DataFrame.R

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -691,13 +691,13 @@ setMethod("storageLevel",
691691

692692
#' Coalesce
693693
#'
694-
#' Returns a new SparkDataFrame that has exactly \code{numPartitions} partitions.
694+
#' Returns a new SparkDataFrame that has exactly \code{"rtitions} partitions.
695695
#' This operation results in a narrow dependency, e.g. if you go from 1000 partitions to 100
696696
#' partitions, there will not be a shuffle, instead each of the 100 new partitions will claim 10 of
697697
#' the current partitions. If a larger number of partitions is requested, it will stay at the
698698
#' current number of partitions.
699699
#'
700-
#' However, if you're doing a drastic coalesce on a SparkDataFrame, e.g. to numPartitions = 1,
700+
#' However, if you're doing a drastic coalesce on a SparkDataFrame, e.g. to "rtitions = 1,
701701
#' this may result in your computation taking place on fewer nodes than
702702
#' you like (e.g. one node in the case of numPartitions = 1). To avoid this,
703703
#' call \code{repartition}. This will add a shuffle step, but means the
@@ -829,11 +829,8 @@ setMethod("repartitionByRange",
829829
jcol <- lapply(cols, function(c) { c@jc })
830830
sdf <- callJMethod(x@sdf, "repartitionByRange", numToInt(numPartitions), jcol)
831831
} else {
832-
stop(gettextf(
833-
"numPartitions and col must be numeric and Column; however, got %s and %s",
834-
class(numPartitions), class(col), domain = "R-SparkR"),
835-
domain = NA
836-
)
832+
stop("numPartitions and col must be numeric and Column; however, got ",
833+
class(numPartitions), " and ", class(col))
837834
}
838835
} else if (!is.null(col)) {
839836
# only columns are specified
@@ -2620,9 +2617,8 @@ setMethod("join",
26202617
joinType <- gsub("_", "", joinType, fixed = TRUE)
26212618
sdf <- callJMethod(x@sdf, "join", y@sdf, joinExpr@jc, joinType)
26222619
} else {
2623-
stop(gettextf("joinType must be one of the following types: '%s'",
2624-
paste(valid_join_types, collapse = "', '"), domain = "R-SparkR"),
2625-
domain = NA)
2620+
stop("joinType must be one of the following types: ",
2621+
"'", paste(valid_join_types, collapse = "', '"), "'")
26262622
}
26272623
}
26282624
}

R/pkg/R/SQLContext.R

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ sparkR.conf <- function(key, defaultValue) {
112112
error = function(e) {
113113
estr <- as.character(e)
114114
if (any(grepl("java.util.NoSuchElementException", estr, fixed = TRUE))) {
115-
stop(gettextf("Config '%s' is not set", key, domain = "R-SparkR"), domain = NA)
115+
stop("Config '", key, "' is not set")
116116
} else {
117117
stop("Unknown error: ", estr)
118118
}
@@ -208,8 +208,7 @@ getSchema <- function(schema, firstRow = NULL, rdd = NULL) {
208208
names <- lapply(names, function(n) {
209209
nn <- gsub(".", "_", n, fixed = TRUE)
210210
if (nn != n) {
211-
warning(gettextf("Use %s instead of %s as column name",
212-
nn, n, domain = "R-SparkR"), domain = NA)
211+
warning("Use ", nn, " instead of ", n, " as column name")
213212
}
214213
nn
215214
})

R/pkg/R/client.R

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -102,17 +102,9 @@ checkJavaVersion <- function() {
102102
javaVersionNum <- as.integer(versions[1])
103103
}
104104
if (javaVersionNum < minJavaVersion || javaVersionNum >= maxJavaVersion) {
105-
stop(
106-
gettextf(
107-
"Java version, greater than or equal to %s and less than %s, ",
108-
minJavaVersion, maxJavaVersion, domain = "R-SparkR"
109-
),
110-
gettextf(
111-
"is required for this package; found version: %s",
112-
javaVersionStr, domain = "R-SparkR"
113-
),
114-
domain = NA
115-
)
105+
stop("Java version, greater than or equal to ", minJavaVersion,
106+
" and less than ", maxJavaVersion, ", is required for this ",
107+
"package; found version: ", javaVersionStr)
116108
}
117109
return(javaVersionNum)
118110
}

R/pkg/R/install.R

Lines changed: 11 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -103,14 +103,11 @@ install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
103103
# can use dir.exists(packageLocalDir) under R 3.2.0 or later
104104
if (!is.na(file.info(packageLocalDir)$isdir) && !overwrite) {
105105
if (releaseUrl != "") {
106-
message(gettextf("%s found, setting SPARK_HOME to %s",
107-
packageName, packageLocalDir, domain = "R-SparkR"),
108-
domain = NA)
106+
message(packageName, " found, setting SPARK_HOME to ", packageLocalDir)
109107
} else {
110-
message(gettextf("%s for Hadoop %s found, setting SPARK_HOME to %s",
111-
version, if (hadoopVersion == "without") "Free build" else hadoopVersion,
112-
packageLocalDir, domain = "R-SparkR"),
113-
domain = NA)
108+
message(version, " for Hadoop ",
109+
if (hadoopVersion == "without") "Free build" else hadoopVersion,
110+
" found, setting SPARK_HOME to ", packageLocalDir)
114111
}
115112
Sys.setenv(SPARK_HOME = packageLocalDir)
116113
return(invisible(packageLocalDir))
@@ -200,11 +197,9 @@ robustDownloadTar <- function(mirrorUrl, version, hadoopVersion, packageName, pa
200197
# remove any partially downloaded file
201198
unlink(packageLocalPath)
202199
message("Unable to download from default mirror site: ", mirrorUrl)
203-
stop(gettextf("Unable to download Spark %s for Hadoop %s. ",
204-
version, if (hadoopVersion == "without") "Free build" else hadoopVersion,
205-
domain = "R-SparkR"),
206-
"Please check network connection, Hadoop version, or provide other mirror sites.",
207-
domain = NA)
200+
stop("Unable to download Spark ", version,
201+
" for Hadoop ", if (hadoopVersion == "without") "Free build" else hadoopVersion,
202+
". Please check network connection, Hadoop version, or provide other mirror sites.")
208203
}
209204
}
210205

@@ -230,10 +225,9 @@ getPreferredMirror <- function(version, packageName) {
230225

231226
directDownloadTar <- function(mirrorUrl, version, hadoopVersion, packageName, packageLocalPath) {
232227
packageRemotePath <- paste0(file.path(mirrorUrl, version, packageName), ".tgz")
233-
message(gettextf("Downloading %s for Hadoop %s from:\n- %s",
234-
version, if (hadoopVersion == "without") "Free build" else hadoopVersion,
235-
packageRemotePath, domain = "R-SparkR"),
236-
domain = NA)
228+
message("Downloading ", version, " for Hadoop ",
229+
if (hadoopVersion == "without") "Free build" else hadoopVersion,
230+
" from:\n- ", packageRemotePath)
237231
downloadUrl(packageRemotePath, packageLocalPath)
238232
}
239233

@@ -289,7 +283,7 @@ sparkCachePath <- function() {
289283
Sys.getenv("XDG_CACHE_HOME", file.path(Sys.getenv("HOME"), ".cache")), "spark")
290284
}
291285
} else {
292-
stop(gettextf("Unknown OS: %s", .Platform$OS.type, domain = "R-SparkR"), domain = NA)
286+
stop("Unknown OS: ", .Platform$OS.type)
293287
}
294288
normalizePath(path, mustWork = FALSE)
295289
}

R/pkg/R/sparkR.R

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -439,11 +439,9 @@ sparkR.session <- function(
439439
rPackageVersion <- paste0(packageVersion("SparkR"))
440440

441441
if (jvmVersionStrip != rPackageVersion) {
442-
warning(
443-
"Version mismatch between Spark JVM and SparkR package. ",
444-
gettextf("JVM version was %s, while R package version was %s",
445-
jvmVersion, rPackageVersion, domain = "R-SparkR"),
446-
domain = NA)
442+
warning("Version mismatch between Spark JVM and SparkR package. ",
443+
"JVM version was ", jvmVersion,
444+
", while R package version was ", rPackageVersion)
447445
}
448446

449447
sparkSession

R/pkg/R/utils.R

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -354,10 +354,8 @@ varargsToStrEnv <- function(...) {
354354
} else {
355355
value <- pairs[[name]]
356356
if (!(is.logical(value) || is.numeric(value) || is.character(value) || is.null(value))) {
357-
stop(gettextf("Unsupported type for %s : %s. ",
358-
name, class(value), domain = "R-SparkR"),
359-
"Supported types are logical, numeric, character and NULL.",
360-
call. = FALSE, domain = NA)
357+
stop("Unsupported type for ", name, " : ", toString(class(value)), ". ",
358+
"Supported types are logical, numeric, character and NULL.")
361359
}
362360
if (is.logical(value)) {
363361
env[[name]] <- tolower(as.character(value))
@@ -371,9 +369,7 @@ varargsToStrEnv <- function(...) {
371369
}
372370

373371
if (length(ignoredNames) != 0) {
374-
warning(gettextf("Unnamed arguments ignored: %s.",
375-
paste(ignoredNames, collapse = ", "), domain = "R-SparkR"),
376-
call. = FALSE, domain = NA)
372+
warning("Unnamed arguments ignored: ", toString(ignoredNames))
377373
}
378374
env
379375
}

0 commit comments

Comments
 (0)